添加到hadoop序列文件中的图像

1hdlvixo  于 2021-06-04  发布在  Hadoop
关注(0)|答案(0)|浏览(247)

我尝试在hadoop系统上运行java程序,将图像存储在序列文件中,然后尝试读取序列文件。我的序列已创建,但图像数据未附加到序列文件中。
我试图通过运行这个命令来运行下面的代码
sudo-u hdfs hadoop jar/usr/java_jar/imagestorage.jar图像存储12e2baa2aee0e455ac40015942b682c4b.jpg
请帮帮我。

import java.io.*;
import java.util.*;
import java.net.*;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.conf.*;
import org.apache.hadoop.io.*;
import org.apache.hadoop.mapred.*;
import org.apache.hadoop.util.*;
import org.apache.hadoop.fs.*;
import org.apache.hadoop.io.SequenceFile;
import org.apache.hadoop.io.SequenceFile.Writer.Option;
import org.apache.hadoop.io.Writable;

public class ImageStorage {
   private static void openOutputFile(String args1) throws Exception {
    String uri = "hdfs://localhost:8020/";

    Configuration conf = new Configuration();
    FileSystem fs = FileSystem.get(URI.create(uri), conf);
    Path path = new Path("hdfs://localhost:8020/user/img_data/SequenceFileCodecTest.seq");

    String string1 = "hdfs://localhost:8020/user/img_data/";
    string1 = string1 + args1;

    Path inPath = new Path(string1);

    FSDataInputStream in = null;
    Text key = new Text();
    BytesWritable value = new BytesWritable();
    SequenceFile.Writer writer = null;
    try{
        in = fs.open(inPath);
        byte buffer[] = new byte[in.available()];
        in.read(buffer);
        System.out.println(buffer);
        in.close();

        Option optPath = SequenceFile.Writer.file(path);
        Option optKey = SequenceFile.Writer.keyClass(key.getClass());
        Option optVal = SequenceFile.Writer.valueClass(value.getClass());
        Option optCom = SequenceFile.Writer.compression(SequenceFile.CompressionType.BLOCK);
        FSDataOutputStream fileOutputStream = fs.append(path);
        BufferedWriter br = new BufferedWriter(new OutputStreamWriter(fileOutputStream));

        writer.append(new Text(inPath.getName()), new BytesWritable(buffer));
        br.close();
        fileOutputStream.close();

    }catch (Exception e) {
        System.out.println("Exception MESSAGES = "+e.getMessage());
    }
    finally {
        IOUtils.closeStream(writer);
        System.out.println("last line of the code....!!!!!!!!!!");

    }

  }

   private static void openReadFile() throws Exception {
     String uri = "hdfs://localhost:8020/";

     Configuration conf = new Configuration();
     FileSystem fs = FileSystem.get(URI.create(uri), conf);
     Path path = new Path("hdfs://localhost:8020/user/img_data/SequenceFileCodecTest.seq");
     /* Reading Operations */

     org.apache.hadoop.io.SequenceFile.Reader.Option filePath = SequenceFile.Reader.file(path);
     SequenceFile.Reader sequenceFileReader = new SequenceFile.Reader(conf,filePath);

     Writable key1 = (Writable) ReflectionUtils.newInstance(
            sequenceFileReader.getKeyClass(), conf);
     Writable value1 = (Writable) ReflectionUtils.newInstance(
            sequenceFileReader.getValueClass(), conf);

     try {

        while (sequenceFileReader.next(key1, value1)) {
            System.out.printf("[%s] %s %s \n", sequenceFileReader.getPosition(), key1,value1.getClass());
        }
     } finally {
        IOUtils.closeStream(sequenceFileReader);
     }
     /* Reading operations */

}
public static void main(String[] args) throws Exception {
    openOutputFile(args[1]);
    openReadFile();
}

}

暂无答案!

目前还没有任何答案,快来回答吧!

相关问题