我在编译mapreduce代码时需要获取NoSuchMethodException map <init>方法

时间:2015-05-21 16:25:29

标签: mapreduce

我试图从我尝试的输入文本文件中找出前N个单词但无法编译代码我在mapper中找不到运行时异常()。请帮助我解决这个问题,我非常擅长在这个领域尝试专业知识。专家提出的任何意见和建议确实有助于我取得成功。

import java.io.IOException;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.StringTokenizer;
import java.util.TreeMap;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
import org.apache.hadoop.util.GenericOptionsParser;

public class topn {

    /* Mapper Class */


public class topnmap extends Mapper<LongWritable,Text,Text,IntWritable>
{
    public void init()
    {

    }
    String token=",";
    Text word =new Text();
    public void map(LongWritable key,Text value,Context context)
    {
        String wrd;
        String line=value.toString().replaceAll(token, " ").trim();
        StringTokenizer str=new StringTokenizer(line);
        while(str.hasMoreTokens())
        {
            wrd=str.nextToken();
            word.set(wrd);
            try {
                context.write(word,new IntWritable(1));
            } catch (IOException e) {
                // TODO Auto-generated catch block
                e.printStackTrace();
            } catch (InterruptedException e) {
                // TODO Auto-generated catch block
                e.printStackTrace();
            }
        }
    }
}

/*combiner Class */

public class topncombiner extends Reducer<Text,IntWritable,Text,IntWritable>
{
    public void reduce(Text key,Iterable<IntWritable> value,Context context)
    {
        int sum=0;
        for(IntWritable val:value)
        {
            sum +=val.get();
        }
        try {
            context.write(key,new IntWritable(sum));
        } catch (IOException e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
        } catch (InterruptedException e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
        }
    }
}

/* Reducer */

public class topnreduce extends Reducer<Text,IntWritable,Text,IntWritable>
{
    public void init()
    {

    }
    private Map<Text,IntWritable> m1=new HashMap();
    public void reduce(Text key,Iterable<IntWritable> value,Context context)
    {
        int sum=0;
        for(IntWritable val:value)
        {
            sum +=val.get();
        }
        m1.put(key,new IntWritable(sum));
    }

    /*Clean up */

protected void cleanup(Context context) throws IOException,InterruptedException
{

    Map<Text,IntWritable> sortedmap= sortbyvalues(m1);
    for(Text key:sortedmap.keySet())
    {

        context.write(new Text(key.toString()), sortedmap.get(key));
    }

}

}

/* comparable method */

private static <K extends Comparable,V extends Comparable> Map<K,V> sortbyvalues(Map<K,V> map) {
    List<Map.Entry<K,V>> entry=new LinkedList<Map.Entry<K,V>>(map.entrySet());
    Collections.sort(entry,new Comparator<Map.Entry<K,V>>() {

        @Override
        public int compare(Map.Entry<K, V> o1, Map.Entry<K, V> o2) {
            // TODO Auto-generated method stub
             return o2.getValue().compareTo(o1.getValue());
        }
    });
    Map<K,V> sortedmap=new LinkedHashMap<K,V>();
    for(Map.Entry<K,V> entr:entry)
    {
        sortedmap.put(entr.getKey(), entr.getValue());
    }
    return sortedmap;
    }




 public static void main(String args[]) throws IOException, 

ClassNotFoundException, InterruptedException
{
    Configuration conf=new Configuration();
    Job job=new Job(conf);
    job.setJarByClass(topn.class);
    job.setCombinerClass(topncombiner.class);
    job.setMapperClass(topnmap.class);
    job.setReducerClass(topnreduce.class);
    job.setOutputKeyClass(Text.class);
    job.setOutputValueClass(IntWritable.class);
    job.setInputFormatClass(TextInputFormat.class);
    job.setOutputFormatClass(TextOutputFormat.class);
    FileInputFormat.setInputPaths(job,new Path(args[0]));
    FileOutputFormat.setOutputPath(job,new Path(args[1]));

System.exit(job.waitForCompletion(true) ? 0 : 1);
}

}

1 个答案:

答案 0 :(得分:0)

您的MapperReducerCombiner类需要static。否则Hadoop无法访问它们而没有外部类的实例,而它不会拥有它。