自定义键可写类中的readfields方法中的readUTF期间的EOFException

时间:2016-04-09 06:34:01

标签: java hadoop mapreduce eofexception

这是我的CustomWritable类,其中在readfields方法中的name的readUTF期间发生eofexception。  任何人都可以解释问题并纠正它

 import java.io.DataInput;
    import java.io.DataOutput;
    import java.io.IOException;

import org.apache.hadoop.io.WritableComparable;

public class CustomWritable implements WritableComparable<CustomWritable>/*implements Comparable<CustomWritable>*/
{

    public String name;
    public long viewcount;

    public CustomWritable(){

    }


    @Override
    public void readFields(DataInput arg0) throws IOException {
        name=arg0.readUTF(); //EOFException is occured here
        viewcount=arg0.readLong();
    }

    @Override
    public void write(DataOutput arg0) throws IOException {
        // TODO Auto-generated method stub
        arg0.writeUTF(name);
        arg0.writeLong(viewcount);
    }

    public CustomWritable(String n,long v){
        name=n;
        viewcount=v;
    }

    @Override
    public int compareTo(CustomWritable o) {
        String nae=o.getName();

        if(o.getViewcount()<this.getViewcount())
            return -1;
        else return o.getViewcount()==this.getViewcount()?0:1;
    }
    public String getName() {
        return name;
    }


    public void setName(String name) {
        this.name = name;
    }


    public long getViewcount() {
        return viewcount;
    }


    public void setViewcount(long viewcount) {
        this.viewcount = viewcount;
    }


    public String toString(){
        return name+"="+viewcount;
    }

}

这是我的mapper类:

package youtube1;
import org.apache.hadoop.io.*;
import org.apache.hadoop.mapreduce.*;

public class Mapper2 extends Mapper<LongWritable,Text,Text,CustomWritable>{

    public void map(LongWritable key, Text value,Context context) throws java.io.IOException,InterruptedException{
        String s[]=value.toString().split("\t");
        if(s.length>=5){
            CustomWritable c = new CustomWritable(s[0],Long.parseLong(s[5]));
        context.write(new Text("1"),c);
        }
        }

}

这是我的分组比较器类:

package youtube1;

import org.apache.hadoop.io.WritableComparator;

public class CustomGrouper extends WritableComparator{

    public CustomGrouper(){
        super(CustomWritable.class,true);
    }

    public int compareTo(CustomWritable c1,CustomWritable c2){
        return 0;
    }

}

这是我的Reducer类:

package youtube1;
import org.apache.hadoop.io.*;
import org.apache.hadoop.mapreduce.*;

public class Reducer2 extends Reducer<Text,CustomWritable,Text,Text>{

public void reduce(Text key,Iterable<CustomWritable> value,Context context) throws java.io.IOException,InterruptedException{
    int count = 0;
    for(CustomWritable c:value){
        if(count++<10)
        context.write(new Text(c.toString()),new Text(""));
        else break;
    }
}

}

例外情况如下:

java.lang.Exception: java.lang.RuntimeException: java.io.EOFException
    at org.apache.hadoop.mapred.LocalJobRunner$Job.runTasks(LocalJobRunner.java:462)
    at org.apache.hadoop.mapred.LocalJobRunner$Job.run(LocalJobRunner.java:529)
Caused by: java.lang.RuntimeException: java.io.EOFException
    at org.apache.hadoop.io.WritableComparator.compare(WritableComparator.java:135)
    at org.apache.hadoop.mapreduce.task.ReduceContextImpl.nextKeyValue(ReduceContextImpl.java:157)
    at org.apache.hadoop.mapreduce.task.ReduceContextImpl.nextKey(ReduceContextImpl.java:121)
    at org.apache.hadoop.mapreduce.lib.reduce.WrappedReducer$Context.nextKey(WrappedReducer.java:302)
    at org.apache.hadoop.mapreduce.Reducer.run(Reducer.java:170)
    at org.apache.hadoop.mapred.ReduceTask.runNewReducer(ReduceTask.java:627)
    at org.apache.hadoop.mapred.ReduceTask.run(ReduceTask.java:389)
    at org.apache.hadoop.mapred.LocalJobRunner$Job$ReduceTaskRunnable.run(LocalJobRunner.java:319)
    at java.util.concurrent.Executors$RunnableAdapter.call(Unknown Source)
    at java.util.concurrent.FutureTask.run(Unknown Source)
    at java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)
    at java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)
    at java.lang.Thread.run(Unknown Source)
Caused by: java.io.EOFException
    at java.io.DataInputStream.readFully(Unknown Source)
    at java.io.DataInputStream.readUTF(Unknown Source)
    at java.io.DataInputStream.readUTF(Unknown Source)
    at youtube1.CustomWritable.readFields(CustomWritable.java:22)
    at org.apache.hadoop.io.WritableComparator.compare(WritableComparator.java:129)
    ... 12 more

0 个答案:

没有答案