serde用于hive中的简单序列文件

时间:2014-01-03 07:24:25

标签: hadoop hive

我有一个包含Text键和DoubleWritable值的序列文件。当我将文件作为外部表

加载时
Create external table t (id String, data Double) STORED AS SEQUENCEFILE LOCATION '/output';

成功创建。但是,当我尝试使用select *语句查看数据时,我得到了异常

  

“异常失败   产生java.io.IOException:org.apache.hadoop.hive.serde2.SerDeException:   class org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe:expect   BytesWritable或Text对象!“

我从异常中了解到serde不正确。我试图为它实现serde,但无法使它工作。我该如何为它实现简单的serde?

1 个答案:

答案 0 :(得分:3)

解决方案稍微改变输入格式并找到解决方案。 1)异常是因为hive默认忽略序列文件的键,因此当尝试匹配模式时会出现异常。

我实现了customInputFormat

import java.io.IOException;


import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.*;
import org.apache.hadoop.mapred.FileSplit;
import org.apache.hadoop.mapred.RecordReader;
import org.apache.hadoop.util.ReflectionUtils;


public abstract class PSequenceFileKeyRecordReader<K, V> implements RecordReader<K, BytesWritable> {

  private SequenceFile.Reader in;
  private long start;
  private long end;
  private boolean more = true;
  protected Configuration conf;

  public PSequenceFileKeyRecordReader(Configuration conf, FileSplit split)
    throws IOException {
    Path path = split.getPath();
    FileSystem fs = path.getFileSystem(conf);
    this.in = new SequenceFile.Reader(fs, path, conf);
    this.end = split.getStart() + split.getLength();
    this.conf = conf;

    if (split.getStart() > in.getPosition())
      in.sync(split.getStart());                  // sync to start

    this.start = in.getPosition();
    more = start < end;
  }


  public Class getKeyClass() { return in.getKeyClass(); }


  public Class getValueClass() { return in.getValueClass(); }

  @SuppressWarnings("unchecked")
  public K createKey() {
    return (K) ReflectionUtils.newInstance(getKeyClass(), conf);
  }


  public float getProgress() throws IOException {
    if (end == start) {
      return 0.0f;
    } else {
      return Math.min(1.0f, (in.getPosition() - start) / (float)(end - start));
    }
  }

  public synchronized long getPos() throws IOException {
    return in.getPosition();
  }

  protected synchronized void seek(long pos) throws IOException {
    in.seek(pos);
  }
  public synchronized void close() throws IOException { in.close(); }


@Override
public boolean next(K key, BytesWritable value) throws IOException {
    if (!more) return false;

    long pos = in.getPosition();
    V trueValue = (V) ReflectionUtils.newInstance(in.getValueClass(), conf);
    boolean remaining = in.next((Writable)key, (Writable)trueValue);
    if (remaining) combineKeyValue(key, trueValue, value);
    if (pos >= end && in.syncSeen()) {
      more = false;
    } else {
      more = remaining;
    }
    return more;
}
protected abstract void combineKeyValue(K key, V trueValue, BytesWritable newValue);
}   

主要的阅读器类扩展了它将文本和DoubleWritable结合到BytesWritable。

import java.io.IOException;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.BytesWritable;
import org.apache.hadoop.io.DoubleWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.FileSplit;


public class DoubleTextReader extends PSequenceFileKeyRecordReader<Text, DoubleWritable>{

    public DoubleTextReader(Configuration conf, FileSplit split)
            throws IOException {
        super(conf, split);

    }

    @Override
    protected void combineKeyValue(Text key, DoubleWritable trueValue,
            BytesWritable newValue) {
            StringBuilder builder = new StringBuilder();
            builder.append(key);
            builder.append('\001');
            builder.append(trueValue.get());
            newValue.set(new BytesWritable(builder.toString().getBytes()) );

    }

    @Override
    public BytesWritable createValue() {        
        return new BytesWritable();
    }

}

自定义输入格式类

public class PSequenceFileKeyInputFormat<K, V> extends FileInputFormat<K, V> {

  public PSequenceFileKeyInputFormat() {
    setMinSplitSize(SequenceFile.SYNC_INTERVAL);
  }

  @Override
  protected FileStatus[] listStatus(JobConf job) throws IOException {
    FileStatus[] files = super.listStatus(job);
    for (int i = 0; i < files.length; i++) {
      FileStatus file = files[i];
      if (file.isDir()) {     // it's a MapFile
        Path dataFile = new Path(file.getPath(), MapFile.DATA_FILE_NAME);
        FileSystem fs = file.getPath().getFileSystem(job);
        // use the data file
        files[i] = fs.getFileStatus(dataFile);
      }
    }
    return files;
  }

  public RecordReader<K, V> getRecordReader(InputSplit split,
                                      JobConf job, Reporter reporter)
    throws IOException {

    reporter.setStatus(split.toString());

    return (RecordReader<K, V>) new DoubleTextReader(job, (FileSplit) split);
  }

可以使用命令

创建表
 Create external table t(id String, Bytes Double) STORED AS INPUTFORMAT 'PSequenceFileKeyInputFormat' OUTPUTFORMAT 'org.apache.hadoop.mapred.SequenceFileOutputFormat' location '/output';