错误:java.io.IOException:键入map中的键不匹配:expected org.apache.hadoop.io.Text,收到org.apache.hadoop.io.LongWritable

时间:2017-05-23 19:15:56

标签: java apache hadoop mapreduce

我是hadoop的新手并试图从书中运行一个示例程序。我面临错误 错误:java.io.IOException:键入map中的键不匹配:expected org.apache.hadoop.io.Text,收到org.apache.hadoop.io.LongWritable 以下是我的代码

package com.hadoop.employee.salary;

import java.io.IOException;
import org.apache.hadoop.io.FloatWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;

public class AvgMapper extends Mapper<LongWritable,Text,Text,FloatWritable>{

public void Map(LongWritable key,Text empRec,Context con) throws  IOException,InterruptedException{
        String[] word = empRec.toString().split("\\t");
        String sex = word[3];
        Float salary = Float.parseFloat(word[8]);
        try {
            con.write(new Text(sex), new FloatWritable(salary));
        } catch (IOException e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
        } catch (InterruptedException e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
        }
    }
}

package com.hadoop.employee.salary;

import java.io.IOException;
import org.apache.hadoop.io.FloatWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Reducer;

public class AvgSalReducer extends Reducer<Text,FloatWritable,Text,Text> {

    public void reduce(Text key,Iterable<FloatWritable> valuelist,Context con)
    throws IOException,
                   InterruptedException
    {
        float total =(float)0;
        int count =0;
        for(FloatWritable var:valuelist)
        {
            total += var.get();
            System.out.println("reducer"+var.get());
            count++;
        }
        float avg =(float) total/count;
        String out = "Total: " + total + " :: " + "Average: " + avg;
        try {
            con.write(key,new Text(out));
        } catch (IOException e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
        } catch (InterruptedException e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
        }
    }
}

package com.hadoop.employee.salary;

import java.io.IOException;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

public class AvgSalary {

    public static void main(String[] args) throws IOException {
        // TODO Auto-generated method stub
        if(args.length!=2)
        {
            System.out.println("Please provide twp parameters");
        }
        Job job = new Job();
        job.setJarByClass(AvgSalary.class);//helps hadoop in finding the relevant jar if there are multiple jars
        job.setJobName("Avg Salary");
        job.setMapperClass(AvgMapper.class);
        job.setReducerClass(AvgSalReducer.class);

        job.setOutputKeyClass(Text.class);
        job.setOutputValueClass(Text.class);

        //job.setMapOutputKeyClass(Text.class);
        //job.setMapOutputValueClass(FloatWritable.class);
        FileInputFormat.addInputPath(job, new Path(args[0]));
        FileOutputFormat.setOutputPath(job,new Path(args[1]));
        try {
            System.exit(job.waitForCompletion(true)?0:1);
        } catch (ClassNotFoundException e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
        } catch (InterruptedException e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
        }
    }
}

1 个答案:

答案 0 :(得分:1)

在地图工具中,您调用了地图方法Map,它应该是map。因此,它将调用默认实现,因为您没有覆盖map方法。这导致发出相同的输入键/值类型,因此它们的键是LongWritable

将名称更改为map应修复此错误。