package com.ibm.dw61;
import java.io.IOException;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Reducer;
public class MaxTempReducer extends
Reducer<Text, IntWritable, Text, IntWritable> {
public void reduce(Text key, Iterable<IntWritable> values, Context context)
throws IOException, InterruptedException {
int maxTemp = Integer.MIN_VALUE;
for (IntWritable value: values) {
maxTemp = Math.max(maxTemp, value.get());
}
context.write(key, new IntWritable(maxTemp));
}
}
问题:
1)int maxTemp = Integer.MIN_VALUE&lt; -----这一行似乎是maxTemp变量的初始化。为什么编码器没有将其初始化为零? Integer.MIN_VALUE给出-2147483648。最低温度不可能达到-100度。
2)context.write(key,new IntWritable(maxTemp)&lt; ------这是最终结果.Key是月份,maxTemp是月份的最高温度。为什么&#39; maxTemp需要新的单词,而不是密钥(月份)?
答案 0 :(得分:0)
1)int maxTemp = Integer.MIN_VALUE
public static final int MIN_VALUE
保持最小值的常量 一个int可以有,-231。
2)context.write(key,new IntWritable(maxTemp)
Why does Hadoop need classes like Text or IntWritable instead of String or Integer?
希望这是有帮助的