org.apache.hadoop.io.UTF8.writeString()方法的使用及代码示例

x33g5p2x  于2022-02-01 转载在 其他  
字(6.2k)|赞(0)|评价(0)|浏览(139)

本文整理了Java中org.apache.hadoop.io.UTF8.writeString()方法的一些代码示例,展示了UTF8.writeString()的具体用法。这些代码示例主要来源于Github/Stackoverflow/Maven等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。UTF8.writeString()方法的具体详情如下:
包路径:org.apache.hadoop.io.UTF8
类名称:UTF8
方法名:writeString

UTF8.writeString介绍

[英]Write a UTF-8 encoded string.
[中]写一个UTF-8编码的字符串。

代码示例

代码示例来源:origin: org.apache.hadoop/hadoop-common

@Override
 public void write(DataOutput out) throws IOException {
  UTF8.writeString(out, declaredClass.getName());
 }
}

代码示例来源:origin: org.apache.hadoop/hadoop-common

@Override
@SuppressWarnings("deprecation")
public void write(DataOutput out) throws IOException {
 out.writeLong(rpcVersion);
 UTF8.writeString(out, declaringClassProtocolName);
 UTF8.writeString(out, methodName);
 out.writeLong(clientVersion);
 out.writeInt(clientMethodsHash);
 out.writeInt(parameterClasses.length);
 for (int i = 0; i < parameterClasses.length; i++) {
  ObjectWritable.writeObject(out, parameters[i], parameterClasses[i],
                conf, true);
 }
}

代码示例来源:origin: org.apache.hadoop/hadoop-common

UTF8.writeString(out, declaredClass.getName()); // always write declared
 UTF8.writeString(out, (String)instance);
 UTF8.writeString(out, ((Enum)instance).name());
} else if (Writable.class.isAssignableFrom(declaredClass)) { // Writable
 UTF8.writeString(out, instance.getClass().getName());
 ((Writable)instance).write(out);

代码示例来源:origin: org.apache.hadoop/hadoop-common

@Override
@SuppressWarnings("deprecation")
public void write(DataOutput out) throws IOException {
 // write componentType 
 UTF8.writeString(out, componentType.getName());      
 // write length
 out.writeInt(length);
 // do the inner loop.  Walk the decision tree only once.
 if (componentType == Boolean.TYPE) {          // boolean
  writeBooleanArray(out);
 } else if (componentType == Character.TYPE) { // char
  writeCharArray(out);
 } else if (componentType == Byte.TYPE) {      // byte
  writeByteArray(out);
 } else if (componentType == Short.TYPE) {     // short
  writeShortArray(out);
 } else if (componentType == Integer.TYPE) {   // int
  writeIntArray(out);
 } else if (componentType == Long.TYPE) {      // long
  writeLongArray(out);
 } else if (componentType == Float.TYPE) {     // float
  writeFloatArray(out);
 } else if (componentType == Double.TYPE) {    // double
  writeDoubleArray(out);
 } else {
  throw new IOException("Component type " + componentType.toString()
    + " is set as the output type, but no encoding is implemented for this type.");
 }
}

代码示例来源:origin: org.apache.hadoop/hadoop-hdfs

public static int writeString(DataOutput out, String s) throws IOException {
  return org.apache.hadoop.io.UTF8.writeString(out, s);
 }
}

代码示例来源:origin: linkedin/camus

@Override
public void write(DataOutput out) throws IOException {
 UTF8.writeString(out, this.leaderId);
 out.writeInt(this.partition);
 out.writeLong(this.beginOffset);
 out.writeLong(this.offset);
 out.writeLong(this.checksum);
 out.writeUTF(this.topic);
 out.writeLong(this.time);
 out.writeUTF(this.server); // left for legacy
 out.writeUTF(this.service); // left for legacy
 this.partitionMap.write(out);
}

代码示例来源:origin: linkedin/camus

public void write(DataOutput out) throws IOException {
 UTF8.writeString(out, this.leaderId);
 out.writeInt(this.partition);
 out.writeLong(this.beginOffset);
 out.writeLong(this.offset);
 out.writeLong(this.checksum);
 out.writeUTF(this.topic);
 out.writeLong(this.time);
 out.writeUTF(this.server);
 out.writeUTF(this.service);
}

代码示例来源:origin: com.facebook.hadoop/hadoop-core

@Override
 @SuppressWarnings("deprecation")
 public void toBinary(DataOutputStream out) throws IOException {
  UTF8.writeString(out, value);
 }
}

代码示例来源:origin: org.jvnet.hudson.hadoop/hadoop-core

/** {@inheritDoc} */
public void write(DataOutput out) throws IOException {
 UTF8.writeString(out, name);
 UTF8.writeString(out, storageID);
 out.writeShort(infoPort);
}

代码示例来源:origin: com.facebook.hadoop/hadoop-core

/** {@inheritDoc} */
public void write(DataOutput out) throws IOException {
 UTF8.writeString(out, name);
 UTF8.writeString(out, storageID);
 out.writeShort(infoPort);
}

代码示例来源:origin: io.hops/hadoop-common

@Override
 public void write(DataOutput out) throws IOException {
  UTF8.writeString(out, declaredClass.getName());
 }
}

代码示例来源:origin: ch.cern.hadoop/hadoop-common

@Override
 public void write(DataOutput out) throws IOException {
  UTF8.writeString(out, declaredClass.getName());
 }
}

代码示例来源:origin: com.github.jiayuhan-it/hadoop-common

@Override
 public void write(DataOutput out) throws IOException {
  UTF8.writeString(out, declaredClass.getName());
 }
}

代码示例来源:origin: com.facebook.hadoop/hadoop-core

public void write(DataOutput out) throws IOException {
 UTF8.writeString(out, file);
 out.writeLong(start);
 out.writeLong(length);
}
public void readFields(DataInput in) throws IOException {

代码示例来源:origin: org.jvnet.hudson.hadoop/hadoop-core

public void write(DataOutput out) throws IOException {
 UTF8.writeString(out, file.toString());
 out.writeLong(start);
 out.writeLong(length);
}
public void readFields(DataInput in) throws IOException {

代码示例来源:origin: org.jvnet.hudson.hadoop/hadoop-core

public void write(DataOutput out) throws IOException {
 UTF8.writeString(out, methodName);
 out.writeInt(parameterClasses.length);
 for (int i = 0; i < parameterClasses.length; i++) {
  ObjectWritable.writeObject(out, parameters[i], parameterClasses[i],
                conf);
 }
}

代码示例来源:origin: com.facebook.hadoop/hadoop-core

public void write(DataOutput out) throws IOException {
 UTF8.writeString(out, methodName);
 out.writeInt(parameterClasses.length);
 for (int i = 0; i < parameterClasses.length; i++) {
  ObjectWritable.writeObject(out, parameters[i], parameterClasses[i],
                conf);
 }
}

代码示例来源:origin: org.jvnet.hudson.hadoop/hadoop-core

protected void writeCorruptedData(RandomAccessFile file) throws IOException {
  final String messageForPreUpgradeVersion =
   "\nThis file is INTENTIONALLY CORRUPTED so that versions\n"
   + "of Hadoop prior to 0.13 (which are incompatible\n"
   + "with this directory layout) will fail to start.\n";
 
  file.seek(0);
  file.writeInt(FSConstants.LAYOUT_VERSION);
  org.apache.hadoop.io.UTF8.writeString(file, "");
  file.writeBytes(messageForPreUpgradeVersion);
  file.getFD().sync();
 }
}

代码示例来源:origin: com.facebook.hadoop/hadoop-core

public void write(DataOutput out) throws IOException {
 UTF8.writeString(out, getBuildVersion());
 out.writeInt(getLayoutVersion());
 out.writeInt(getNamespaceID());
 out.writeLong(getCTime());
 out.writeInt(getDistributedUpgradeVersion());
}

代码示例来源:origin: org.jvnet.hudson.hadoop/hadoop-core

public void write(DataOutput out) throws IOException {
 UTF8.writeString(out, getBuildVersion());
 out.writeInt(getLayoutVersion());
 out.writeInt(getNamespaceID());
 out.writeLong(getCTime());
 out.writeInt(getDistributedUpgradeVersion());
}

相关文章