如何修复hadoop map-reduce错误?

时间:2019-04-26 00:38:22

标签: java hadoop

我已经编写了一些简单的map-reduce程序,并且用示例代码中要搜索的硬编码单词编写的程序正在运行,但是我的另一个map-reduce程序正在搜索通过命令行参数输入的单词不起作用,它们非常相似,因此我不确定为什么它不起作用。似乎可以开始,但是很快就会失败。

工作程序:

// This is a program that finds the Twitter Users that
// are sending secret messages, i.e., do their tweets contain
// "secretMESSAGE" inside of them.
//
//

import java.io.IOException;
import java.util.StringTokenizer;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.util.GenericOptionsParser;

public class SecretMessage {

  public static class SecretMapper
       extends Mapper<Object, Text, Text, IntWritable>{

    private final static IntWritable one = new IntWritable(1);
    private Text word = new Text();

    public void map(Object key, Text value, Context context
                    ) throws IOException, InterruptedException {
      String itr = new String (value.toString());
      String message = "secretMESSAGE";
      String[] arrOfitr = itr.split("\t");
      String user;
      String tweet;
      if (arrOfitr.length ==2) {
      user = arrOfitr[0];
      tweet = arrOfitr[1];
      int j=0;
      for (int i=0;i<tweet.length();i++) {
          if (j<message.length()) {
          if (tweet.charAt(i) == message.charAt(j)) {
              j++;
          }
          }
      }
      if (j==message.length()) {
          word.set(user);
          context.write(word, one);
      }
      }
    }
  }

  public static class SecretReducer
       extends Reducer<Text,IntWritable,Text,IntWritable> {
    private IntWritable result = new IntWritable();

    public void reduce(Text key, Iterable<IntWritable> values,
                       Context context
                       ) throws IOException, InterruptedException {
      int sum=0;
      for (IntWritable val : values) {
        sum += val.get();
      }
      result.set(sum);
      context.write(key, result);
    }
  }

public static void main(String[] args) throws Exception {
    Configuration conf = new Configuration();
    String[] otherArgs = new GenericOptionsParser(conf, args).getRemainingArgs();
    if (otherArgs.length != 2) {
      System.err.println("Usage: secretmessage <in> <out>");
      System.exit(2);
    }
    Job job = Job.getInstance(conf, "Secret Message");
    job.setJarByClass(SecretMessage.class);
    job.setMapperClass(SecretMapper.class);
    job.setCombinerClass(SecretReducer.class);
    job.setReducerClass(SecretReducer.class);
    job.setOutputKeyClass(Text.class);
    job.setOutputValueClass(IntWritable.class);
    FileInputFormat.addInputPath(job, new Path(otherArgs[0]));
    FileOutputFormat.setOutputPath(job, new Path(otherArgs[1]));
    System.exit(job.waitForCompletion(true) ? 0 : 1);
  }
}

非工作计划:

// This is a program that finds the Twitter Users that
// are sending secret messages, i.e., do their tweets contain
// a general message inside of them.

import java.io.IOException;
import java.util.StringTokenizer;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.util.GenericOptionsParser;

public class GeneralMessage {

  public static class GeneralMapper
       extends Mapper<Object, Text, Text, IntWritable>{

    private final static IntWritable one = new IntWritable(1);
    private Text word = new Text();
    private String message;
    public void setup(Context context
               ) throws IOException, InterruptedException {

      Configuration conf = context.getConfiguration();
      message = conf.get("Message");
      }

    public void map(Object key, Text value, Context context
                    ) throws IOException, InterruptedException {
      String itr = new String (value.toString());
      String[] arrOfitr = itr.split("\t");
      String user;
      String tweet;
      if (arrOfitr.length ==2) {
      user = arrOfitr[0];
      tweet = arrOfitr[1];
      int j=0;
      for (int i=0;i<tweet.length();i++) {
          if (j<message.length()) {
          if (tweet.charAt(i) == message.charAt(j)) {
              j++;
          }
          }
      }
      if (j==message.length()) {
          word.set(user);
          context.write(word, one);
      }
      }
    }
  }

  public static class GeneralReducer
       extends Reducer<Text,IntWritable,Text,IntWritable> {
    private IntWritable result = new IntWritable();

    public void reduce(Text key, Iterable<IntWritable> values,
                       Context context
                       ) throws IOException, InterruptedException {
      int sum=0;
      for (IntWritable val : values) {
        sum += val.get();
      }
      result.set(sum);
      context.write(key, result);
    }
  }

public static void main(String[] args) throws Exception {
    Configuration conf = new Configuration();
    String[] otherArgs = new GenericOptionsParser(conf, args).getRemainingArgs();
    if (otherArgs.length != 3) {
      System.err.println("Usage: generalmessage SEARCHSTRING <in> <out>");
      System.exit(2);
    }
    Job job = Job.getInstance(conf, args[0]);
    job.setJarByClass(GeneralMessage.class);
    job.setMapperClass(GeneralMapper.class);
    job.setCombinerClass(GeneralReducer.class);
    job.setReducerClass(GeneralReducer.class);
    job.setOutputKeyClass(Text.class);
    job.setOutputValueClass(IntWritable.class);
    FileInputFormat.addInputPath(job, new Path(otherArgs[1]));
    FileOutputFormat.setOutputPath(job, new Path(otherArgs[2]));
    System.exit(job.waitForCompletion(true) ? 0 : 1);
  }
}

尝试运行无法运行的程序时出现错误:

jward@pu1:~/CS4000/Homework7/generalmessage$ yarn jar GeneralMessage.jar GeneralMessage ScaryMess input generaloutput
19/04/25 20:23:00 INFO client.RMProxy: Connecting to ResourceManager at pu1.cs.ohio.edu/132.235.1.15:8032
19/04/25 20:23:01 INFO input.FileInputFormat: Total input paths to process : 2
19/04/25 20:23:01 INFO mapreduce.JobSubmitter: number of splits:35
19/04/25 20:23:01 INFO mapreduce.JobSubmitter: Submitting tokens for job: job_1556192514620_0026
19/04/25 20:23:01 INFO impl.YarnClientImpl: Submitted application application_1556192514620_0026
19/04/25 20:23:01 INFO mapreduce.Job: The url to track the job: http://pu1.cs.ohio.edu:8088/proxy/application_1556192514620_0026/
19/04/25 20:23:01 INFO mapreduce.Job: Running job: job_1556192514620_0026
19/04/25 20:23:06 INFO mapreduce.Job: Job job_1556192514620_0026 running in uber mode : false
19/04/25 20:23:06 INFO mapreduce.Job:  map 0% reduce 0%
19/04/25 20:23:08 INFO mapreduce.Job: Task Id : attempt_1556192514620_0026_m_000000_0, Status : FAILED
Error: java.lang.NullPointerException
    at GeneralMessage$GeneralMapper.map(GeneralMessage.java:54)
    at GeneralMessage$GeneralMapper.map(GeneralMessage.java:30)
    at org.apache.hadoop.mapreduce.Mapper.run(Mapper.java:146)
    at org.apache.hadoop.mapred.MapTask.runNewMapper(MapTask.java:787)
    at org.apache.hadoop.mapred.MapTask.run(MapTask.java:341)
    at org.apache.hadoop.mapred.YarnChild$2.run(YarnChild.java:164)
    at java.security.AccessController.doPrivileged(Native Method)
    at javax.security.auth.Subject.doAs(Subject.java:415)
    at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1754)
    at org.apache.hadoop.mapred.YarnChild.main(YarnChild.java:158)

19/04/25 20:23:09 INFO mapreduce.Job:  map 3% reduce 0%
19/04/25 20:23:09 INFO mapreduce.Job: Task Id : attempt_1556192514620_0026_m_000006_0, Status : FAILED
Error: java.lang.NullPointerException
    at GeneralMessage$GeneralMapper.map(GeneralMessage.java:54)
    at GeneralMessage$GeneralMapper.map(GeneralMessage.java:30)
    at org.apache.hadoop.mapreduce.Mapper.run(Mapper.java:146)
    at org.apache.hadoop.mapred.MapTask.runNewMapper(MapTask.java:787)
    at org.apache.hadoop.mapred.MapTask.run(MapTask.java:341)
    at org.apache.hadoop.mapred.YarnChild$2.run(YarnChild.java:164)
    at java.security.AccessController.doPrivileged(Native Method)
    at javax.security.auth.Subject.doAs(Subject.java:415)
    at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1754)
    at org.apache.hadoop.mapred.YarnChild.main(YarnChild.java:158)

Container killed by the ApplicationMaster.
Container killed on request. Exit code is 143
Container exited with a non-zero exit code 143

0 个答案:

没有答案