线程" main"中的例外情况java.lang.ClassNotFoundException:org.myorg.MaxTemperatureWithCompression

时间:2015-12-04 20:18:35

标签: java mapreduce hdfs bigdata hadoop2

我正在尝试使用命令行在Hadoop 2.7.1上使用(使用新API)运行MapReduce作业。我按照以下步骤操作。编译和创建jar文件时没有错误。

javac -cp `hadoop classpath` MaxTemperatureWithCompression.java -d /Users/gangadharkadam/hadoopdata/build

jar -cvf MaxTemperatureWithCompression.jar /Users/gangadharkadam/hadoopdata/build

hadoop jar MaxTemperatureWithCompression.jar org.myorg.MaxTemperatureWithCompression user/ncdc/input /user/ncdc/output

错误讯息 - 线程" main"中的例外情况java.lang.ClassNotFoundException:org.myorg.MaxTemperatureWithCompression     在java.net.URLClassLoader $ 1.run(URLClassLoader.java:366)     在java.net.URLClassLoader $ 1.run(URLClassLoader.java:355)     at java.security.AccessController.doPrivileged(Native Method)     在java.net.URLClassLoader.findClass(URLClassLoader.java:354)     at java.lang.ClassLoader.loadClass(ClassLoader.java:425)     at java.lang.ClassLoader.loadClass(ClassLoader.java:358)     at java.lang.Class.forName0(Native Method)     在java.lang.Class.forName(Class.java:274)     在org.apache.hadoop.util.RunJar.run(RunJar.java:214)     在org.apache.hadoop.util.RunJar.main(RunJar.java:136)

Java代码 -

package org.myorg;

//Standard Java Classes
import java.io.IOException;
import java.util.regex.Pattern;

//extends the class Configured, and implements the Tool utility class
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
import org.apache.hadoop.util.GenericOptionsParser;

//send debugging messages from inside the mapper and reducer classes
import org.apache.log4j.Logger;

//Job class in order to create, configure, and run an instance of your MapReduce
import org.apache.hadoop.mapreduce.Job;

//extend the Mapper class with your own Map class and add your own processing instructions
import org.apache.hadoop.mapreduce.Mapper;

//extend it to create and customize your own Reduce class
import org.apache.hadoop.mapreduce.Reducer;

//Path class to access files in HDFS
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.FileSystem;

//pass required paths using the FileInputFormat and FileOutputFormat classes
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

//Writable objects for writing, reading,and comparing values during map and reduce processing
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.compress.GzipCodec;

public class MaxTemperatureWithCompression extends Configured implements Tool {
    private static final Logger LOG = Logger.getLogger(MaxTemperatureWithCompression.class);

    //main menhod to invoke the toolrunner to create instance of MaxTemperatureWithCompression
    public static void main(String[] args) throws Exception {

        int res = ToolRunner.run(new MaxTemperatureWithCompression(), args);
        System.exit(res);

    }

    //call the run method to configure the job
    public int run(String[] args) throws Exception {
        if (args.length != 2) {
            System.err.println("Usage: MaxTemperatureWithCompression <input path> " + "<output path>");
            System.exit(-1);
          }

        Job job = Job.getInstance(getConf(), "MaxTemperatureWithCompression");

        //set the jar to use based on the class
        job.setJarByClass(MaxTemperatureWithCompression.class);

        //set the input and output path
        FileInputFormat.addInputPath(job, new Path(args[0]));
        FileOutputFormat.setOutputPath(job, new Path(args[1]));

        //set the output key and value
        job.setOutputKeyClass(Text.class);
        job.setOutputValueClass(IntWritable.class);

        //set the compressionformat     
        /*[*/FileOutputFormat.setCompressOutput(job, true);
            FileOutputFormat.setOutputCompressorClass(job, GzipCodec.class);/*]*/

        //set the mapper and reducer class
        job.setMapperClass(Map.class);
        job.setCombinerClass(Reduce.class);
        job.setReducerClass(Reduce.class);

        return job.waitForCompletion(true) ? 0 : 1;

    }

    //mapper
    public static class Map extends Mapper<LongWritable, Text, Text, IntWritable> {
        private static final int MISSING = 9999;

        @Override
        public void map(LongWritable key, Text value, Context context)
        throws IOException,InterruptedException {

            String line = value.toString();
            String year = line.substring(15,19);

            int airTemperature;

            if (line.charAt(87) == '+') {
                airTemperature = Integer.parseInt(line.substring(88, 92));
            }
            else {
                airTemperature = Integer.parseInt(line.substring(87, 92));
            }
            String quality = line.substring(92,93);

            if (airTemperature != MISSING && quality.matches("[01459]")) {
                context.write(new Text(year), new IntWritable(airTemperature));
            }
        }

    }

    //reducer
    public static class Reduce extends Reducer<Text, IntWritable, Text, IntWritable> {

        @Override
        public void reduce(Text key, Iterable<IntWritable> values, Context context) 
        throws IOException, InterruptedException {
            int maxValue = Integer.MIN_VALUE;
            for (IntWritable value : values) {
                maxValue = Math.max(maxValue, value.get());
            }
            context.write(key, new IntWritable(maxValue));
        }

    }

}

我在同一问题上看到的帖子很少,但那些无法帮我解决这个问题。任何有关解决此问题的帮助都非常感谢。提前谢谢。

0 个答案:

没有答案