org.apache.hadoop.io.Writable.write()方法的使用及代码示例

x33g5p2x  于2022-02-02 转载在 其他  
字(6.0k)|赞(0)|评价(0)|浏览(332)

本文整理了Java中org.apache.hadoop.io.Writable.write()方法的一些代码示例,展示了Writable.write()的具体用法。这些代码示例主要来源于Github/Stackoverflow/Maven等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。Writable.write()方法的具体详情如下:
包路径:org.apache.hadoop.io.Writable
类名称:Writable
方法名:write

Writable.write介绍

[英]Serialize the fields of this object to out.
[中]将此对象的字段序列化为out

代码示例

代码示例来源:origin: apache/flink

@Override
public void serialize(T record, DataOutputView target) throws IOException {
  record.write(target);
}

代码示例来源:origin: apache/flink

private void writeObject(ObjectOutputStream out) throws IOException {
  // serialize the parent fields and the final fields
  out.defaultWriteObject();
  // write the input split
  ((Writable) mapreduceInputSplit).write(out);
}

代码示例来源:origin: org.apache.hadoop/hadoop-common

@Override
public void write(DataOutput out) throws IOException {
 out.writeInt(values.length);                 // write values
 for (int i = 0; i < values.length; i++) {
  values[i].write(out);
 }
}

代码示例来源:origin: org.apache.hadoop/hadoop-common

@Override
public void write(DataOutput out) throws IOException {
 if (type == NOT_SET || instance == null)
  throw new IOException("The GenericWritable has NOT been set correctly. type="
             + type + ", instance=" + instance);
 out.writeByte(type);
 instance.write(out);
}

代码示例来源:origin: apache/hive

public static String encodeWritable(Writable key) throws IOException {
 ByteArrayOutputStream bos = new ByteArrayOutputStream();
 DataOutputStream dos = new DataOutputStream(bos);
 key.write(dos);
 dos.flush();
 return Base64.encodeBase64URLSafeString(bos.toByteArray());
}

代码示例来源:origin: apache/kylin

public static byte[] toBytes(Writable writable) {
  try {
    ByteArrayOutputStream bout = new ByteArrayOutputStream();
    DataOutputStream out = new DataOutputStream(bout);
    writable.write(out);
    out.close();
    bout.close();
    return bout.toByteArray();
  } catch (IOException e) {
    throw new RuntimeException(e);
  }
}

代码示例来源:origin: apache/hbase

@Override
 public void writeToBlock(DataOutput out) throws IOException {
  bfw.getMetaWriter().write(out);
  Writable dataWriter = bfw.getDataWriter();
  if (dataWriter != null)
   dataWriter.write(out);
 }
});

代码示例来源:origin: apache/ignite

/** {@inheritDoc} */
@Override public void write(DataOutput out, Object obj) throws IgniteCheckedException {
  assert cls.isAssignableFrom(obj.getClass()) : cls + " " + obj.getClass();
  try {
    ((Writable)obj).write(out);
  }
  catch (IOException e) {
    throw new IgniteCheckedException(e);
  }
}

代码示例来源:origin: apache/flink

@Override
public void copy(DataInputView source, DataOutputView target) throws IOException {
  ensureInstanceInstantiated();
  copyInstance.readFields(source);
  copyInstance.write(target);
}

代码示例来源:origin: apache/hive

public void writeWritable(Writable w) throws IOException {
 DataOutputStream dos = null;
 try {
  ByteArrayOutputStream baos = new ByteArrayOutputStream();
  dos = new DataOutputStream(baos);
  WritableUtils.writeString(dos, w.getClass().getName());
  w.write(dos);
  out.writeBytes(baos.toByteArray(), Type.WRITABLE.code);
  dos.close();
  dos = null;
 } finally {
  IOUtils.closeStream(dos);
 }
}

代码示例来源:origin: apache/hive

@Override
public void write(DataOutput output) throws IOException {
 String partitionInfoString = HCatUtil.serialize(partitionInfo);
 // write partitionInfo into output
 WritableUtils.writeString(output, partitionInfoString);
 WritableUtils.writeString(output, baseMapRedSplit.getClass().getName());
 Writable baseSplitWritable = (Writable) baseMapRedSplit;
 //write  baseSplit into output
 baseSplitWritable.write(output);
}

代码示例来源:origin: apache/hive

@Override
public void process(Object row, int tag) throws HiveException {
 ObjectInspector rowInspector = inputObjInspectors[0];
 try {
  Writable writableRow = serializer.serialize(row, rowInspector);
  writableRow.write(buffer);
 } catch (Exception e) {
  throw new HiveException(e);
 }
}

代码示例来源:origin: org.apache.hadoop/hadoop-common

@Deprecated
public static void cloneWritableInto(Writable dst, 
                   Writable src) throws IOException {
 CopyInCopyOutBuffer buffer = CLONE_BUFFERS.get();
 buffer.outBuffer.reset();
 src.write(buffer.outBuffer);
 buffer.moveData();
 dst.readFields(buffer.inBuffer);
}

代码示例来源:origin: apache/hive

@Override
 public void close(boolean abort) throws IOException {
  if (out == null) {
   FileSystem fs = finalOutPath.getFileSystem(jc);
   out = fs.create(finalOutPath);
  }
  for (Writable r : records) r.write(out);
  records.clear();
  out.flush();
  out.close();
 }
};

代码示例来源:origin: apache/hive

@Override
public void write(MapJoinObjectSerDeContext context, ObjectOutputStream out)
  throws IOException, SerDeException {
 AbstractSerDe serde = context.getSerDe();
 ObjectInspector objectInspector = context.getStandardOI();
 Writable container = serde.serialize(key, objectInspector);
 container.write(out);
}

代码示例来源:origin: apache/hive

private <T extends Writable> T copy(T oldWritable, T newWritable) throws IOException {
 ByteArrayOutputStream baos = new ByteArrayOutputStream();
 DataOutputStream out = new DataOutputStream(baos);
 oldWritable.write(out);
 ByteArrayInputStream bais = new ByteArrayInputStream(baos.toByteArray());
 DataInputStream in = new DataInputStream(bais);
 newWritable.readFields(in);
 return newWritable;
}

代码示例来源:origin: apache/mahout

private static void writeAndRead(Writable toWrite, Writable toRead) throws IOException {
  ByteArrayOutputStream baos = new ByteArrayOutputStream();
  try (DataOutputStream dos = new DataOutputStream(baos)){
   toWrite.write(dos);
  }

  ByteArrayInputStream bais = new ByteArrayInputStream(baos.toByteArray());
  try (DataInputStream dis = new DataInputStream(bais)) {
   toRead.readFields(dis);
  }
 }
}

代码示例来源:origin: apache/mahout

private static void writeAndRead(Writable toWrite, Writable toRead) throws IOException {
 ByteArrayOutputStream baos = new ByteArrayOutputStream();
 try (DataOutputStream dos = new DataOutputStream(baos)){
  toWrite.write(dos);
 }
 ByteArrayInputStream bais = new ByteArrayInputStream(baos.toByteArray());
 try (DataInputStream dis = new DataInputStream(bais)) {
  toRead.readFields(dis);
 }
}

代码示例来源:origin: apache/drill

@Override
public void write(MapJoinObjectSerDeContext context, ObjectOutputStream out)
  throws IOException, SerDeException {
 AbstractSerDe serde = context.getSerDe();
 ObjectInspector objectInspector = context.getStandardOI();
 Writable container = serde.serialize(key, objectInspector);
 container.write(out);
}

代码示例来源:origin: apache/hive

@Override
public void flush() throws IOException {
 if (out == null) {
  FileSystem fs = path.getFileSystem(options.getConfiguration());
  out = fs.create(path);
 }
 for (Writable r : records) r.write(out);
 records.clear();
 out.flush();
}

相关文章