找不到HibInputFormat类。获得排除classDef not found

时间:2016-01-01 19:23:33

标签: java hadoop hipi

  

hduser @ akshay-Lenovo-G580:〜$ hadoop jar /home/hduser/HipiDemo.jar HelloWorld sampleimages.hib sampleimages_average   警告:不推荐使用$ HADOOP_HOME。

     

线程“main”中的异常java.lang.NoClassDefFoundError:org / hipi / imagebundle / mapreduce / HibInputFormat       在HelloWorld.run(HelloWorld.java:44)       在org.apache.hadoop.util.ToolRunner.run(ToolRunner.java:65)       在org.apache.hadoop.util.ToolRunner.run(ToolRunner.java:79)       在HelloWorld.main(HelloWorld.java:67)       at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)       at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)       at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)       at java.lang.reflect.Method.invoke(Method.java:606)       在org.apache.hadoop.util.RunJar.main(RunJar.java:160)   引起:java.lang.ClassNotFoundException:org.hipi.imagebundle.mapreduce.HibInputFormat       在java.net.URLClassLoader $ 1.run(URLClassLoader.java:366)       在java.net.URLClassLoader $ 1.run(URLClassLoader.java:355)       at java.security.AccessController.doPrivileged(Native Method)       在java.net.URLClassLoader.findClass(URLClassLoader.java:354)       at java.lang.ClassLoader.loadClass(ClassLoader.java:425)       at java.lang.ClassLoader.loadClass(ClassLoader.java:358)       ... 9更多

我的代码:

import hipi.image.FloatImage;

import java.io.IOException;

import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
import org.hipi.image.HipiImageHeader;
import org.hipi.imagebundle.mapreduce.HibInputFormat;

public class HelloWorld extends Configured implements Tool {

public static class HelloWorldMapper extends Mapper<HipiImageHeader,     FloatImage, IntWritable, FloatImage> {
public void map(HipiImageHeader key, FloatImage value, Context context) 
  throws IOException, InterruptedException {
}
}
public static class HelloWorldReducer extends Reducer<IntWritable, FloatImage, IntWritable, Text> {
public void reduce(IntWritable key, Iterable<FloatImage> values, Context context) 
  throws IOException, InterruptedException {
}
}

public int run(String[] args) throws Exception {
  // Check input arguments
  if (args.length != 2) {
   System.out.println("Usage: helloWorld <input HIB> <output directory>");
    System.exit(0);
  }

  // Initialize and configure MapReduce job
  //Job job = Job.getInstance();
  Job job = new Job(getConf(), "Employee Salary");
 // Set input format class which parses the input HIB and spawns map tasks
job.setInputFormatClass(HibInputFormat.class);
// Set the driver, mapper, and reducer classes which express the computation
job.setJarByClass(HelloWorld.class);
job.setMapperClass(HelloWorldMapper.class);
job.setReducerClass(HelloWorldReducer.class);
// Set the types for the key/value pairs passed to/from map and reduce layers
job.setMapOutputKeyClass(IntWritable.class);
job.setMapOutputValueClass(FloatImage.class);
job.setOutputKeyClass(IntWritable.class);
job.setOutputValueClass(Text.class);

// Set the input and output paths on the HDFS
FileInputFormat.setInputPaths(job, new Path(args[0]));
FileOutputFormat.setOutputPath(job, new Path(args[1]));

// Execute the MapReduce job and block until it complets
boolean success = job.waitForCompletion(true);

// Return success or failure
return success ? 0 : 1;
 }

 public static void main(String[] args) throws Exception {
  ToolRunner.run(new HelloWorld(), args);
   System.exit(0);
}

}

1 个答案:

答案 0 :(得分:0)

将包含类HibInputFormat的jar添加到类路径中。

或者,如果您在编译时使用行命令: 例如:

{{1}}