org.apache.hadoop.io.Writable.readFields()方法的使用及代码示例

x33g5p2x  于2022-02-02 转载在 其他  
字(6.5k)|赞(0)|评价(0)|浏览(90)

本文整理了Java中org.apache.hadoop.io.Writable.readFields()方法的一些代码示例,展示了Writable.readFields()的具体用法。这些代码示例主要来源于Github/Stackoverflow/Maven等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。Writable.readFields()方法的具体详情如下:
包路径:org.apache.hadoop.io.Writable
类名称:Writable
方法名:readFields

Writable.readFields介绍

[英]Deserialize the fields of this object from in.

For efficiency, implementations should attempt to re-use storage in the existing object where possible.
[中]从in反序列化此对象的字段。
为了提高效率,实现应该尽可能重复使用现有对象中的存储。

代码示例

代码示例来源:origin: apache/flink

@Override
public T deserialize(T reuse, DataInputView source) throws IOException {
  reuse.readFields(source);
  return reuse;
}

代码示例来源:origin: apache/flink

@Override
public int compareSerialized(DataInputView firstSource, DataInputView secondSource) throws IOException {
  ensureReferenceInstantiated();
  ensureTempReferenceInstantiated();
  reference.readFields(firstSource);
  tempReference.readFields(secondSource);
  int comp = reference.compareTo(tempReference);
  return ascendingComparison ? comp : -comp;
}

代码示例来源:origin: apache/hive

private void loadOptimized(MapJoinBytesTableContainer container, ObjectInputStream in,
  Writable key, Writable value) throws Exception {
 int numKeys = in.readInt();
 for (int keyIndex = 0; keyIndex < numKeys; keyIndex++) {
  key.readFields(in);
  long numRows = in.readLong();
  for (long rowIndex = 0L; rowIndex < numRows; rowIndex++) {
   value.readFields(in);
   container.putRow(key, value);
  }
 }
}

代码示例来源:origin: apache/ignite

/**
 * Deserialization of Hadoop Writable object.
 *
 * @param writable Writable object to deserialize to.
 * @param bytes byte array to deserialize.
 */
public static void deserialize(Writable writable, byte[] bytes) throws IOException {
  DataInputStream dataIn = new DataInputStream(new ByteArrayInputStream(bytes));
  writable.readFields(dataIn);
  dataIn.close();
}

代码示例来源:origin: apache/flink

private void readObject(ObjectInputStream in) throws IOException, ClassNotFoundException {
    // read the parent fields and the final fields
    in.defaultReadObject();

    try {
      Class<? extends Writable> writableSplit = splitType.asSubclass(Writable.class);
      mapreduceInputSplit = (org.apache.hadoop.mapreduce.InputSplit) WritableFactories.newInstance(writableSplit);
    }

    catch (Exception e) {
      throw new RuntimeException("Unable to instantiate the Hadoop InputSplit", e);
    }

    ((Writable) mapreduceInputSplit).readFields(in);
  }
}

代码示例来源:origin: apache/hive

public static void decodeWritable(Writable w, String idStr) throws IOException {
 DataInputStream in = new DataInputStream(new ByteArrayInputStream(Base64.decodeBase64(idStr)));
 w.readFields(in);
}

代码示例来源:origin: apache/hive

public void read(MapJoinObjectSerDeContext context, ObjectInputStream in, Writable container)
throws IOException, SerDeException {
 container.readFields(in);
 read(context, container);
}

代码示例来源:origin: apache/hive

public void read(MapJoinObjectSerDeContext context, ObjectInputStream in, Writable container)
throws IOException, SerDeException {
 long numRows = in.readLong();
 for (long rowIndex = 0L; rowIndex < numRows; rowIndex++) {
  container.readFields(in);
  read(context, container);
 }
}

代码示例来源:origin: org.apache.hadoop/hadoop-common

@Override
public void readFields(DataInput in) throws IOException {
 values = new Writable[in.readInt()];          // construct values
 for (int i = 0; i < values.length; i++) {
  Writable value = WritableFactories.newInstance(valueClass);
  value.readFields(in);                       // read a value
  values[i] = value;                          // store it in values
 }
}

代码示例来源:origin: org.apache.hadoop/hadoop-common

@Override
public void readFields(DataInput in) throws IOException {
 type = in.readByte();
 Class<? extends Writable> clazz = getTypes()[type & 0xff];
 try {
  instance = ReflectionUtils.newInstance(clazz, conf);
 } catch (Exception e) {
  e.printStackTrace();
  throw new IOException("Cannot initialize the class: " + clazz);
 }
 instance.readFields(in);
}

代码示例来源:origin: org.apache.hadoop/hadoop-common

@Override
public Writable deserialize(Writable w) throws IOException {
 Writable writable;
 if (w == null) {
  writable 
   = (Writable) ReflectionUtils.newInstance(writableClass, getConf());
 } else {
  writable = w;
 }
 writable.readFields(dataIn);
 return writable;
}

代码示例来源:origin: apache/drill

public void read(MapJoinObjectSerDeContext context, ObjectInputStream in, Writable container)
throws IOException, SerDeException {
 long numRows = in.readLong();
 for (long rowIndex = 0L; rowIndex < numRows; rowIndex++) {
  container.readFields(in);
  read(context, container);
 }
}

代码示例来源:origin: apache/flink

@Override
public void copy(DataInputView source, DataOutputView target) throws IOException {
  ensureInstanceInstantiated();
  copyInstance.readFields(source);
  copyInstance.write(target);
}

代码示例来源:origin: apache/ignite

/** {@inheritDoc} */
@Override public Object read(DataInput in, @Nullable Object obj) throws IgniteCheckedException {
  Writable w = obj == null ? U.newInstance(cls) : cls.cast(obj);
  try {
    w.readFields(in);
  }
  catch (IOException e) {
    throw new IgniteCheckedException(e);
  }
  return w;
}

代码示例来源:origin: org.apache.hadoop/hadoop-common

@Deprecated
public static void cloneWritableInto(Writable dst, 
                   Writable src) throws IOException {
 CopyInCopyOutBuffer buffer = CLONE_BUFFERS.get();
 buffer.outBuffer.reset();
 src.write(buffer.outBuffer);
 buffer.moveData();
 dst.readFields(buffer.inBuffer);
}

代码示例来源:origin: apache/hive

private <T extends Writable> T copy(T oldWritable, T newWritable) throws IOException {
 ByteArrayOutputStream baos = new ByteArrayOutputStream();
 DataOutputStream out = new DataOutputStream(baos);
 oldWritable.write(out);
 ByteArrayInputStream bais = new ByteArrayInputStream(baos.toByteArray());
 DataInputStream in = new DataInputStream(bais);
 newWritable.readFields(in);
 return newWritable;
}

代码示例来源:origin: apache/mahout

private static void writeAndRead(Writable toWrite, Writable toRead) throws IOException {
  ByteArrayOutputStream baos = new ByteArrayOutputStream();
  try (DataOutputStream dos = new DataOutputStream(baos)){
   toWrite.write(dos);
  }

  ByteArrayInputStream bais = new ByteArrayInputStream(baos.toByteArray());
  try (DataInputStream dis = new DataInputStream(bais)) {
   toRead.readFields(dis);
  }
 }
}

代码示例来源:origin: apache/mahout

private static void writeAndRead(Writable toWrite, Writable toRead) throws IOException {
 ByteArrayOutputStream baos = new ByteArrayOutputStream();
 try (DataOutputStream dos = new DataOutputStream(baos)){
  toWrite.write(dos);
 }
 ByteArrayInputStream bais = new ByteArrayInputStream(baos.toByteArray());
 try (DataInputStream dis = new DataInputStream(bais)) {
  toRead.readFields(dis);
 }
}

代码示例来源:origin: apache/ignite

private void read(long ptr, int size, Writable w) {
    assert size == 4 : size;
    GridUnsafe.copyOffheapHeap(ptr, buf, GridUnsafe.BYTE_ARR_OFF, size);
    dataInput.bytes(buf, size);
    try {
      w.readFields(dataInput);
    }
    catch (IOException e) {
      throw new RuntimeException(e);
    }
  }
});

代码示例来源:origin: apache/ignite

private void read(long ptr, int size, Writable w) {
    assert size == 4 : size;
    GridUnsafe.copyOffheapHeap(ptr, buf, GridUnsafe.BYTE_ARR_OFF, size);
    dataInput.bytes(buf, size);
    try {
      w.readFields(dataInput);
    }
    catch (IOException e) {
      throw new RuntimeException(e);
    }
  }
});

相关文章