MapReduce作业挂起

时间:2015-03-14 20:13:41

标签: java hadoop mapreduce

我是Hadoop的MapReduce新手。我写了一个map reduce任务,我试图在我的本地机器上运行它。但是这个工作在地图100%之后挂起了。

以下是代码,我不明白我错过了什么。

我有自定义密钥类

import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.WritableComparable;

public class AirlineMonthKey implements WritableComparable<AirlineMonthKey>{

Text airlineName;
Text month;

public AirlineMonthKey(){
    super();
}

public AirlineMonthKey(Text airlineName, Text month) {
    super();
    this.airlineName = airlineName;
    this.month = month;
}

public Text getAirlineName() {
    return airlineName;
}

public void setAirlineName(Text airlineName) {
    this.airlineName = airlineName;
}

public Text getMonth() {
    return month;
}

public void setMonth(Text month) {
    this.month = month;
}

@Override
public void readFields(DataInput in) throws IOException {
    // TODO Auto-generated method stub
    this.airlineName.readFields(in);
    this.month.readFields(in);
}

@Override
public void write(DataOutput out) throws IOException {
    // TODO Auto-generated method stub
    this.airlineName.write(out);
    this.month.write(out);      
}

@Override
public int compareTo(AirlineMonthKey airlineMonthKey) {
    // TODO Auto-generated method stub
    int diff = getAirlineName().compareTo(airlineMonthKey.getAirlineName());
    if(diff != 0){
        return diff;
    }

    int m1 = Integer.parseInt(getMonth().toString());
    int m2 = Integer.parseInt(airlineMonthKey.getMonth().toString());

    if(m1>m2){
        return -1;
    }
    else 
        return 1;
}


}

和mapper以及使用自定义键的reducer类如下所示。

package com.mapresuce.secondarysort;

import java.io.IOException;
import java.io.StringReader;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.util.GenericOptionsParser;

import com.opencsv.CSVReader;

public class FlightDelayByMonth {

public static class FlightDelayByMonthMapper extends
        Mapper<Object, Text, AirlineMonthKey, Text> {
    public void map(Object key, Text value, Context context)
            throws IOException, InterruptedException {
        String str = value.toString();
        // Reading Line one by one from the input CSV.
        CSVReader reader = new CSVReader(new StringReader(str));
        String[] split = reader.readNext();
        reader.close();

        String airlineName = split[6];
        String month = split[2];
        String year = split[0];
        String delayMinutes = split[37];
        String cancelled = split[41];

        if (!(airlineName.equals("") || month.equals("") || delayMinutes
                .equals(""))) {
            if (year.equals("2008") && cancelled.equals("0.00")) {
                AirlineMonthKey airlineMonthKey = new AirlineMonthKey(
                        new Text(airlineName), new Text(month));
                Text delay = new Text(delayMinutes);
                context.write(airlineMonthKey, delay);
                System.out.println("1");
            }
        }

    }
}

public static class FlightDelayByMonthReducer extends
        Reducer<AirlineMonthKey, Text, Text, Text> {


    public void reduce(AirlineMonthKey key, Iterable<Text> values,
            Context context) throws IOException, InterruptedException {
        for(Text val : values){
            context.write(new Text(key.getAirlineName().toString()+" "+key.getMonth().toString()), val);
        }
    }
}

public static void main(String[] args) throws IOException, ClassNotFoundException, InterruptedException {   
    Configuration conf = new Configuration();
    String[] otherArgs = new GenericOptionsParser(conf, args)
            .getRemainingArgs();
    if (otherArgs.length != 2) {
        System.err.println("Usage:<in> <out>");
        System.exit(2);
    }
    Job job = new Job(conf, "Average monthly flight dealy");
    job.setJarByClass(FlightDelayByMonth.class);
    job.setMapperClass(FlightDelayByMonthMapper.class);
    job.setReducerClass(FlightDelayByMonthReducer.class);
    job.setOutputKeyClass(AirlineMonthKey.class);
    job.setOutputValueClass(Text.class);
    FileInputFormat.addInputPath(job, new Path(otherArgs[0]));
    FileOutputFormat.setOutputPath(job, new Path(otherArgs[1]));
    System.exit(job.waitForCompletion(true) ? 0 : 1);
}
}

我还在主要部分创建了一个作业和配置。不知道我错过了什么。我在当地环境中运行所有这些。

2 个答案:

答案 0 :(得分:0)

尝试在AirlineMonthKey类中编写toString,equals和hashcode的自定义实现。

请阅读以下链接。

http://hadoop.apache.org/docs/stable/api/org/apache/hadoop/io/WritableComparable.html

密钥类型实现hashCode()非常重要。

希望这可以帮到你。

答案 1 :(得分:0)

问题是我必须使用AirlineMonthKey中的默认构造函数(我做过)并初始化自定义密钥类中的实例变量(我没有)。