在hadoop序列文件中添加的图像

时间:2015-02-06 10:38:34

标签: hadoop mapreduce hdfs sequencefile

我正在尝试在我的hadoop系统上运行java程序,将图像存储在序列文件中,然后尝试读取该序列文件。 我的序列已创建,但图像数据未附加到序列文件中。

我试图通过运行此命令来运行代码

sudo -u hdfs hadoop jar /usr/java_jar/ImageStorage.jar ImageStorage 12e2baa2ae0e455ac40015942b682c4b.jpg

请帮帮我。

import java.io.*;
import java.util.*;
import java.net.*;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.conf.*;
import org.apache.hadoop.io.*;
import org.apache.hadoop.mapred.*;
import org.apache.hadoop.util.*;
import org.apache.hadoop.fs.*;
import org.apache.hadoop.io.SequenceFile;
import org.apache.hadoop.io.SequenceFile.Writer.Option;
import org.apache.hadoop.io.Writable;

public class ImageStorage {
   private static void openOutputFile(String args1) throws Exception {
    String uri = "hdfs://localhost:8020/";

    Configuration conf = new Configuration();
    FileSystem fs = FileSystem.get(URI.create(uri), conf);
    Path path = new Path("hdfs://localhost:8020/user/img_data/SequenceFileCodecTest.seq");

    String string1 = "hdfs://localhost:8020/user/img_data/";
    string1 = string1 + args1;

    Path inPath = new Path(string1);

    FSDataInputStream in = null;
    Text key = new Text();
    BytesWritable value = new BytesWritable();
    SequenceFile.Writer writer = null;
    try{
        in = fs.open(inPath);
        byte buffer[] = new byte[in.available()];
        in.read(buffer);
        System.out.println(buffer);
        in.close();

        Option optPath = SequenceFile.Writer.file(path);
        Option optKey = SequenceFile.Writer.keyClass(key.getClass());
        Option optVal = SequenceFile.Writer.valueClass(value.getClass());
        Option optCom = SequenceFile.Writer.compression(SequenceFile.CompressionType.BLOCK);
        FSDataOutputStream fileOutputStream = fs.append(path);
        BufferedWriter br = new BufferedWriter(new OutputStreamWriter(fileOutputStream));

        writer.append(new Text(inPath.getName()), new BytesWritable(buffer));
        br.close();
        fileOutputStream.close();

    }catch (Exception e) {
        System.out.println("Exception MESSAGES = "+e.getMessage());
    }
    finally {
        IOUtils.closeStream(writer);
        System.out.println("last line of the code....!!!!!!!!!!");

    }




  }

   private static void openReadFile() throws Exception {
     String uri = "hdfs://localhost:8020/";

     Configuration conf = new Configuration();
     FileSystem fs = FileSystem.get(URI.create(uri), conf);
     Path path = new Path("hdfs://localhost:8020/user/img_data/SequenceFileCodecTest.seq");
     /* Reading Operations */

     org.apache.hadoop.io.SequenceFile.Reader.Option filePath = SequenceFile.Reader.file(path);
     SequenceFile.Reader sequenceFileReader = new SequenceFile.Reader(conf,filePath);

     Writable key1 = (Writable) ReflectionUtils.newInstance(
            sequenceFileReader.getKeyClass(), conf);
     Writable value1 = (Writable) ReflectionUtils.newInstance(
            sequenceFileReader.getValueClass(), conf);

     try {

        while (sequenceFileReader.next(key1, value1)) {
            System.out.printf("[%s] %s %s \n", sequenceFileReader.getPosition(), key1,value1.getClass());
        }
     } finally {
        IOUtils.closeStream(sequenceFileReader);
     }
     /* Reading operations */

}
public static void main(String[] args) throws Exception {
    openOutputFile(args[1]);
    openReadFile();
}

}

0 个答案:

没有答案