hadoop中的错误:“线程中的异常”主“java.lang.ClassNotFoundException”

时间:2016-04-25 06:31:51

标签: hadoop mapreduce

我已经下载了hadoop-2.7.1( Apache Hadoop)在Ubuntu 14.04.3 LTS(GNU / Linux Oracle VM VirtualBox上的3.19.0-25-generic x86_64。

我正在使用以下命令来编译和运行我的代码:

编译

hduser@dt-VirtualBox:~/Desktop/project/try1$ javac -classpath $HADOOP_HOME/share/hadoop/common/hadoop-common-2.7.1.jar:$HADOOP_HOME/share/hadoop/mapreduce/hadoop-mapreduce-client-core-2.7.1.jar:$HADOOP_HOME/share/hadoop/common/lib/commons-cli-1.2.jar -d /home/hduser/Desktop/project/try1 *.java}

然后使用:

制作一个类文件的jar文件并运行jar文件
 {hduser@dt-VirtualBox:~/Desktop/project/try1$ hadoop jar table_one.jar DriverMap /trial/trial/ output_tryy1}

请在下面找到错误

{Exception in thread "main" java.lang.ClassNotFoundException: DriverMap
    at java.net.URLClassLoader$1.run(URLClassLoader.java:366)
    at java.net.URLClassLoader$1.run(URLClassLoader.java:355)
    at java.security.AccessController.doPrivileged(Native Method)
    at java.net.URLClassLoader.findClass(URLClassLoader.java:354)
    at java.lang.ClassLoader.loadClass(ClassLoader.java:425)
    at java.lang.ClassLoader.loadClass(ClassLoader.java:358)
    at java.lang.Class.forName0(Native Method)
    at java.lang.Class.forName(Class.java:278)
    at org.apache.hadoop.util.RunJar.run(RunJar.java:214)
    at org.apache.hadoop.util.RunJar.main(RunJar.java:136)}

这是我的DriverMap.java文件:

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.Text;

import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

import java.io.BufferedReader;
import java.io.FileReader;
import java.io.IOException;
import java.util.*;
import java.util.regex.Matcher;
import java.util.regex.Pattern;

import org.apache.hadoop.conf.*;
import org.apache.hadoop.io.*;
import org.apache.hadoop.mapreduce.*;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;

public class DriverMap {

public class MyMapper extends Mapper<Text, Text, Text, Text> {

        String str, token = null;
        List<String> tokens = new ArrayList<String>();
        String productId, userId, score;
        private Text word = new Text();

        public void map(Text key, Text value, Context context) throws IOException, InterruptedException {     
        str = value.toString();
       Pattern p = Pattern.compile(".*productId.*$|.*userId.*$|.*score.*$");
            Matcher m = p.matcher(str);

            while(m.find())
            {
              token = m.group( 0 ); //group 0 is always the entire match


              tokens.add(token);
            }

        //System.out.println(tokens);
        String[] a = tokens.toString().split(":|\\,|]");
        for(int j=0; j<a.length; j=j+6)
        {
            //System.out.println("a1 for " + j+ "  is : "+  a1[j]);
            productId = a[j+1];
            userId = a[j+3];
            score = a[j+5];
            word.set(productId + "|" +userId);
            context.write(word, new Text(score));

          /*System.out.println("productId is: "+ a[j+1]);
            System.out.println("userId is: "+ a[j+3]);
            System.out.println("score is: "+ a[j+5]);*/
        }
    }
}




public static void main(String[] args) throws Exception{
        // TODO Auto-generated method stub
     Configuration conf = new Configuration();

     Job job = new Job(conf, "recommendation");

     job.setOutputKeyClass(Text.class);
     job.setOutputValueClass(Text.class);
     job.setMapOutputKeyClass(Text.class);
     job.setMapOutputValueClass(Text.class);

     job.setMapperClass(MyMapper.class);
     //job.setReducerClass(Reduce.class);
     job.setNumReduceTasks(0);     //This turns off the reducer and our mapper        result will be outputed to the output file.

     job.setInputFormatClass(TextInputFormat.class);
     job.setOutputFormatClass(TextOutputFormat.class);
     job.setJarByClass(DriverMap.class);
     /*   FileInputFormat.addInputPath(job, new Path(args[0]));
     FileOutputFormat.setOutputPath(job, new Path(args[1]));
     */   
     TextInputFormat.setInputPaths(job, new Path(args[0]));
     TextOutputFormat.setOutputPath(job, new Path(args[1]));

     job.waitForCompletion(true);

      }

    }

1 个答案:

答案 0 :(得分:0)

您必须为DriverMap提供包名称。运行jar文件时出现类似“PackageName.DriverMap”的内容。

此外,如果您使用jar文件,则不需要编译src代码。