为什么自定义分区程序在mapreduce程序中无法正常工作

时间:2015-02-26 20:04:18

标签: hadoop mapreduce

我已经使用自定义分区程序编写了一个示例mapreduce程序。 我有一个带有数字和文字的文件。数字可以是偶数和奇数的混合。我想对数字进行分区,并将偶数和一个reducer中的偶数移动到另一个reducer。我将reduce partitioners的数量设置为2,但数据只显示在一个reducer中,而分区器不能正常工作,我猜。如果在任何地方出错,请查找内联程序并纠正我。

package com.mapreduce;

import java.io.IOException;

import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Partitioner;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.util.GenericOptionsParser;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;

public class FindEvenOddNumber {

    /**
    * @param args
    * @throws IOException
    */
    public static class MyMapper extends Mapper<LongWritable, Text, Text, IntWritable> {
         Text emitKey = new Text();
         IntWritable emitValue = new IntWritable();

    public void map(LongWritable key, Text value, Context context) throws InterruptedException, IOException {

        String line = value.toString();
        String[] data = line.split("\\t");
        if (data.length == 2) {         
            emitValue.set(Integer.valueOf(data[0]));
            emitKey.set(data[1]);
            context.write(emitKey,emitValue);
            }
        }
    }


    public static class MyReducer extends Reducer<Text, IntWritable, Text, IntWritable> {

        public void reduce(Text key, Iterable<IntWritable> values, Context context)throws IOException, InterruptedException {
            for(IntWritable value : values)
            {
                context.write(key, value);
            }
        }
    }

    public static class ReducerPartitioner extends Partitioner<Text, IntWritable>{

        @Override
        public int getPartition(Text key, IntWritable value, int numReduceTasks) {

            int number = value.get();

            if( numReduceTasks == 0)
            {
            return 0;
            }

            if ( number % 2 == 0 ){
                System.out.println("You entered an even number.");
                return 1 % numReduceTasks;              
            }
            /*if(number >2){

                return 1 % numReduceTasks;  
            }*/

            else
            {
            return 2 % numReduceTasks;
            }

        }

    }


    /**
     * @param args
     */
    public static void main(String args[]) throws IOException,InterruptedException, ClassNotFoundException {

        Configuration conf = new Configuration();
        String userArgs[] = new GenericOptionsParser(conf, args).getRemainingArgs();
        if (userArgs.length < 2) {
            System.out.println("Usage: hadoop jar jarfilename mainclass input output");
            System.exit(1);
        }

        Job job = new Job(conf, "Partitioning Even Odd Numbers");
        job.setJarByClass(FindEvenOddNumber.class);

        job.setMapperClass(MyMapper.class);
        job.setReducerClass(MyReducer.class);
        job.setPartitionerClass(ReducerPartitioner.class);
        job.setNumReduceTasks(2);

        job.setMapOutputKeyClass(Text.class);
        job.setMapOutputValueClass(IntWritable.class);
        job.setOutputKeyClass(Text.class);
        job.setOutputValueClass(IntWritable.class);
        job.setInputFormatClass(TextInputFormat.class);
        job.setOutputFormatClass(TextOutputFormat.class);


        FileInputFormat.addInputPath(job, new Path(userArgs[0]));
        FileOutputFormat.setOutputPath(job, new Path(userArgs[1]));

        System.exit(job.waitForCompletion(true) ? 0 : 1);

    }
}

我的数据是:

0   zero    
1   one    
2   two    
3   three    
4   four    
5   five
and so on

1 个答案:

答案 0 :(得分:0)

那个适合我:

public static class ReducerPartitioner extends Partitioner<Text, IntWritable>{

   @Override
   public int getPartition(Text key, IntWritable value, int numReduceTasks) {
      // assert that numReduceTasks is at least 2...                       
      return value.get() % 2 == 0 ? 0 : 1; 
   }
}