我是新手并尝试运行我的第一个hadoop计划。当我在hadoop中执行wordcount工作时,我遇到了一些问题。
WordCount.java
package hdp;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.FileInputFormat;
import org.apache.hadoop.mapred.FileOutputFormat;
import org.apache.hadoop.mapred.JobClient;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
public class WordCount extends Configured implements Tool{
public static void main(String[] args) throws Exception {
System.out.println("application starting ....");
int exitCode = ToolRunner.run(new WordCount(), args);
System.out.println(exitCode);
}
@Override
public int run(String[] args) throws Exception {
if (args.length < 2) {
System.out.println("Plz enter input and output directory properly... ");
return -1;
}
JobConf conf = new JobConf(WordCount.class);
FileInputFormat.setInputPaths(conf, new Path(args[0]));
FileOutputFormat.setOutputPath(conf, new Path(args[1]));
conf.setMapperClass(WordMapper.class);
conf.setReducerClass(WordReducer.class);
conf.setMapOutputKeyClass(Text.class);
conf.setMapOutputKeyClass(IntWritable.class);
conf.setOutputKeyClass(Text.class);
conf.setOutputValueClass(IntWritable.class);
JobClient.runJob(conf);
return 0;
}
@Override
public Configuration getConf() {
return null;
}
@Override
public void setConf(Configuration arg0) {
}
}
WordMapper.java 包hdp;
import java.io.IOException;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.MapReduceBase;
import org.apache.hadoop.mapred.Mapper;
import org.apache.hadoop.mapred.OutputCollector;
import org.apache.hadoop.mapred.Reporter;
public class WordMapper extends MapReduceBase implements Mapper<LongWritable, Text, Text, IntWritable>{
@Override
public void map(LongWritable key, Text value, OutputCollector<Text, IntWritable> collect, Reporter reporter) throws IOException {
String str = value.toString();
for (String s : str.split(" ")) {
if (s.length() > 0) {
collect.collect(new Text(s), new IntWritable(1));
}
}
}
}
WordReducer
package hdp;
import java.io.IOException;
import java.util.Iterator;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.MapReduceBase;
import org.apache.hadoop.mapred.OutputCollector;
import org.apache.hadoop.mapred.Reducer;
import org.apache.hadoop.mapred.Reporter;
public class WordReducer extends MapReduceBase implements Reducer<Text, IntWritable, Text, IntWritable> {
@Override
public void reduce(Text key, Iterator<IntWritable> values, OutputCollector<Text, IntWritable> output, Reporter reporter) throws IOException {
int count = 0;
while (values.hasNext()) {
IntWritable intWritable = values.next();
count += intWritable.get();
}
output.collect(key, new IntWritable(count));
}
}
当我运行程序时,我收到以下错误消息。
16/12/23 00:22:41 INFO mapreduce.Job:任务ID:attempt_1482432671993_0001_m_000001_1,状态:未通过 错误:java.io.IOException:键入map中的键不匹配:expected org.apache.hadoop.io.IntWritable,收到org.apache.hadoop.io.Text at org.apache.hadoop.mapred.MapTask $ MapOutputBuffer.collect(MapTask.java:1072) at org.apache.hadoop.mapred.MapTask $ OldOutputCollector.collect(MapTask.java:610) at hdp.WordMapper.map(WordMapper.java:19) at hdp.WordMapper.map(WordMapper.java:1) 在org.apache.hadoop.mapred.MapRunner.run(MapRunner.java:54) 在org.apache.hadoop.mapred.MapTask.runOldMapper(MapTask.java:453) 在org.apache.hadoop.mapred.MapTask.run(MapTask.java:343) 在org.apache.hadoop.mapred.YarnChild $ 2.run(YarnChild.java:164) at java.security.AccessController.doPrivileged(Native Method) 在javax.security.auth.Subject.doAs(Subject.java:422) 在org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1698) 在org.apache.hadoop.mapred.YarnChild.main(YarnChild.java:158) 16/12/23 00:22:47 INFO mapreduce.Job:任务ID:attempt_1482432671993_0001_m_000000_2,状态:未通过 错误:java.io.IOException:键入map中的键不匹配:expected org.apache.hadoop.io.IntWritable,收到org.apache.hadoop.io.Text at org.apache.hadoop.mapred.MapTask $ MapOutputBuffer.collect(MapTask.java:1072) at org.apache.hadoop.mapred.MapTask $ OldOutputCollector.collect(MapTask.java:610) at hdp.WordMapper.map(WordMapper.java:19) at hdp.WordMapper.map(WordMapper.java:1) 在org.apache.hadoop.mapred.MapRunner.run(MapRunner.java:54) 在org.apache.hadoop.mapred.MapTask.runOldMapper(MapTask.java:453) 在org.apache.hadoop.mapred.MapTask.run(MapTask.java:343) 在org.apache.hadoop.mapred.YarnChild $ 2.run(YarnChild.java:164) at java.security.AccessController.doPrivileged(Native Method) 在javax.security.auth.Subject.doAs(Subject.java:422) 在org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1698) 在org.apache.hadoop.mapred.YarnChild.main(YarnChild.java:158)
请告诉我我错在哪里以及我需要什么样的改变。在 WordCount.java 或 WordReducer 或 WordMapper.java
中答案 0 :(得分:0)
您不小心将输出键类设置了两次:
conf.setMapOutputKeyClass(IntWritable.class);
应该成为
conf.setMapOutputValueClass(IntWritable.class);