如何统计Hadoop的不同单词数量

时间:2018-03-18 11:52:12

标签: java hadoop mapreduce

以下代码是简单的字数。该程序生成的文件就像

key-value:
hello 5
world 10
good 4
morning 10
nice 5

但我的目标是计算单词的数量。结果应该是5,这是否意味着我需要计算键的数量?如果是这样,我如何计算键数?

以下是功能的代码:

映射器

import java.io.IOException;
import java.util.StringTokenizer;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;

public class WordCountMapper extends Mapper<Object, Text, Text, IntWritable> {
    private final static IntWritable one = new IntWritable(1);
    private Text word = new Text();

    public void map(Object key,Text value, Context context) throws IOException, InterruptedException {
        String remove_pinct = value.toString.replaceAll("[\\pP+~$`^=|<>~`$^+=|<>¥×]", " ");
        StringTokenizer itr = new StringTokenizer(value.toString());

        while(itr.hasMoreTokens()) {
            word.set(itr.nextToken());
            context.write (word,one);
        }
    }
}

减速

import java.io.IOException;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Reducer;

public class WordCountReducer extends Reducer<Text, IntWritable, Text, IntWritable> {
    private IntWritable result = new IntWritable();

    public void reduce (Text key, Iterable<IntWritable> values, Context context) throws IOException, InterruptedException {
        int sum = 0;

        for (IntWritable val : values) {
            sum += val.get();
        }

        result.set(sum);
        context.write(key, result);
    }
}

工作控制

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

public class WordCountJobControl {
    public static void main(String[] args) throws Exception {
        Configuration conf = new Configuration();

        Job job = Job.getInstance(conf, " word count ");
        job.setJarByClass(WordCountJobControl.class);
        job.setMapperClass(WordCountMapper.class);
        job.setCombinerClass(WordCountReducer.class);
        job.setReducerClass(WordCountReducer.class);
        job.setOutputKeyClass(Text.class);
        job.setOutputValueClass(IntWritable.class);

        FileInputFormat.addInputPath(job, new Path(args [0]));
        FileOutputFormat.setOutputPath(job, new Path(args [1]));

        System.exit(job.waitForCompletion(true) ? 0 : 1);
    }
}

2 个答案:

答案 0 :(得分:2)

您可以将减速器的数量限制为一个:

job.setNumReduceTasks(1);

然后在您的reducer计数reduce方法的调用次数中,并在cleanup方法中写入此值,如下所示:

public class WordCountReducer extends Reducer<Text, IntWritable, Text, IntWritable> {

    private int wordCount;

    @Override
    protected void setup(Context context) {
        wordCount = 0;
    }

    @Override
    protected void reduce(Text key, Iterable<IntWritable> values, Context context)
            throws IOException, InterruptedException {
        ++wordCount;
    }

    @Override
    protected void cleanup(Context context) throws IOException, InterruptedException {
        context.write(new Text("WordCount"), new IntWritable(wordCount));
    }
}

您还需要使用设置组合器类删除该行:

job.setCombinerClass(WordCountReducer.class);

答案 1 :(得分:1)

映射

public class WordCountMapper extends Mapper<LongWritable,Text,Text,IntWritable>
{
    protected void map(LongWritablekey,Textvalue,Context context) throws IOException,InterruptedException
    {
        String words[]=value.toString().split(",");
        for(String word:words)
            context.write(new Text(word),new IntWritable(1));
    }
}

减速

    public class WordCountReducer extends Reducer<Text,IntWritable,IntWritable,IntWritable>
    {
    protected void reduce(Text word,Iterable<IntWritable>values,Context context) throws IOException,InterruptedException
        {
            int count=0,len;
            for(IntWritableval:values)
                count+=val.get();
            context.write(new IntWritable(word.toString().length()),new IntWritable(count));
        }
    }

工作控制

public class WordCountJobControl
{
    public static void main(String args[]) throws Exception
    {
        Job job=new Job();
        job.setJobName("Length");
        job.setJarByClass(WordCountJobControl.class);
        job.setMapperClass(WordCountMapper.class);
        job.setReducerClass(WordCountReducer.class);
        job.setOutputKeyClass(Text.class);
        job.setOutputValueClass(IntWritable.class);
        FileInputFormat.addInputPath(job,new Path(args[0]));
        FileOutputFormat.setOutputPath(job,new Path(args[1]));
        System.exit(job.waitForCompletion(true)?0:1);
    }
}