org.apache.hadoop.io.UTF8.<init>()方法的使用及代码示例

x33g5p2x  于2022-02-01 转载在 其他  
字(5.5k)|赞(0)|评价(0)|浏览(158)

本文整理了Java中org.apache.hadoop.io.UTF8.<init>()方法的一些代码示例,展示了UTF8.<init>()的具体用法。这些代码示例主要来源于Github/Stackoverflow/Maven等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。UTF8.<init>()方法的具体详情如下:
包路径:org.apache.hadoop.io.UTF8
类名称:UTF8
方法名:<init>

UTF8.<init>介绍

[英]Construct from a given string.
[中]从给定字符串构造。

代码示例

代码示例来源:origin: org.apache.hadoop/hadoop-common

public ArrayWritable(String[] strings) {
 this(UTF8.class, new Writable[strings.length]);
 for (int i = 0; i < strings.length; i++) {
  values[i] = new UTF8(strings[i]);
 }
}

代码示例来源:origin: elastic/elasticsearch-hadoop

@Test
public void testUTF8() {
  writableTypeToJson(new UTF8("some utf8"));
}

代码示例来源:origin: org.apache.hadoop/hadoop-common

UTF8 className = new UTF8();

代码示例来源:origin: io.hops/hadoop-common

public ArrayWritable(String[] strings) {
 this(UTF8.class, new Writable[strings.length]);
 for (int i = 0; i < strings.length; i++) {
  values[i] = new UTF8(strings[i]);
 }
}

代码示例来源:origin: com.facebook.hadoop/hadoop-core

public ArrayWritable(String[] strings) {
 this(UTF8.class, new Writable[strings.length]);
 for (int i = 0; i < strings.length; i++) {
  values[i] = new UTF8(strings[i]);
 }
}

代码示例来源:origin: io.prestosql.hadoop/hadoop-apache

public ArrayWritable(String[] strings) {
 this(UTF8.class, new Writable[strings.length]);
 for (int i = 0; i < strings.length; i++) {
  values[i] = new UTF8(strings[i]);
 }
}

代码示例来源:origin: com.github.jiayuhan-it/hadoop-common

public ArrayWritable(String[] strings) {
 this(UTF8.class, new Writable[strings.length]);
 for (int i = 0; i < strings.length; i++) {
  values[i] = new UTF8(strings[i]);
 }
}

代码示例来源:origin: org.jvnet.hudson.hadoop/hadoop-core

public ArrayWritable(String[] strings) {
 this(UTF8.class, new Writable[strings.length]);
 for (int i = 0; i < strings.length; i++) {
  values[i] = new UTF8(strings[i]);
 }
}

代码示例来源:origin: org.jvnet.hudson.hadoop/hadoop-core

/**  Add set owner record to edit log */
void logSetOwner(String src, String username, String groupname) {
 UTF8 u = new UTF8(username == null? "": username);
 UTF8 g = new UTF8(groupname == null? "": groupname);
 logEdit(OP_SET_OWNER, new UTF8(src), u, g);
}

代码示例来源:origin: org.jvnet.hudson.hadoop/hadoop-core

/** 
 * Add rename record to edit log
 * TODO: use String parameters until just before writing to disk
 */
void logRename(String src, String dst, long timestamp) {
 UTF8 info[] = new UTF8[] { 
  new UTF8(src),
  new UTF8(dst),
  FSEditLog.toLogLong(timestamp)};
 logEdit(OP_RENAME, new ArrayWritable(UTF8.class, info));
}

代码示例来源:origin: org.apache.hadoop/hadoop-common-test

/**
 * Test that number of "unknown" classes is propagated across multiple copies.
 */
@SuppressWarnings("deprecation")
public void testForeignClass() {
 MapWritable inMap = new MapWritable();
 inMap.put(new Text("key"), new UTF8("value"));
 inMap.put(new Text("key2"), new UTF8("value2"));
 MapWritable outMap = new MapWritable(inMap);
 MapWritable copyOfCopy = new MapWritable(outMap);
 assertEquals(1, copyOfCopy.getNewClasses());
}

代码示例来源:origin: org.apache.hadoop/hadoop-common-test

public void testNullEncoding() throws Exception {
 String s = new String(new char[] { 0 });
 DataOutputBuffer dob = new DataOutputBuffer();
 new UTF8(s).write(dob);
 assertEquals(s, new String(dob.getData(), 2, dob.getLength()-2, "UTF-8"));
}

代码示例来源:origin: org.jvnet.hudson.hadoop/hadoop-core

/** 
 * Add delete file record to edit log
 */
void logDelete(String src, long timestamp) {
 UTF8 info[] = new UTF8[] { 
  new UTF8(src),
  FSEditLog.toLogLong(timestamp)};
 logEdit(OP_DELETE, new ArrayWritable(UTF8.class, info));
}

代码示例来源:origin: ch.cern.hadoop/hadoop-mapreduce-client-jobclient

public void reduce(UTF8 key, Iterator<UTF8> values,
           OutputCollector<UTF8, UTF8> output, Reporter reporter) throws IOException 
 {
  while(values.hasNext()) {
   output.collect(key, new UTF8(values.next().toString()));
  }
 }
}

代码示例来源:origin: org.jvnet.hudson.hadoop/hadoop-core

/** 
 * Add access time record to edit log
 */
void logTimes(String src, long mtime, long atime) {
 UTF8 info[] = new UTF8[] { 
  new UTF8(src),
  FSEditLog.toLogLong(mtime),
  FSEditLog.toLogLong(atime)};
 logEdit(OP_TIMES, new ArrayWritable(UTF8.class, info));
}

代码示例来源:origin: ch.cern.hadoop/hadoop-common

public void testWritable() throws Exception {
 for (int i = 0; i < 10000; i++) {
  TestWritable.testWritable(new UTF8(getTestString()));
 }
}

代码示例来源:origin: com.github.jiayuhan-it/hadoop-common

/**
 * Test that number of "unknown" classes is propagated across multiple copies.
 */
@SuppressWarnings("deprecation")
public void testForeignClass() {
 MapWritable inMap = new MapWritable();
 inMap.put(new Text("key"), new UTF8("value"));
 inMap.put(new Text("key2"), new UTF8("value2"));
 MapWritable outMap = new MapWritable(inMap);
 MapWritable copyOfCopy = new MapWritable(outMap);
 assertEquals(1, copyOfCopy.getNewClasses());
}

代码示例来源:origin: ch.cern.hadoop/hadoop-common

/**
 * Test that number of "unknown" classes is propagated across multiple copies.
 */
@SuppressWarnings("deprecation")
public void testForeignClass() {
 MapWritable inMap = new MapWritable();
 inMap.put(new Text("key"), new UTF8("value"));
 inMap.put(new Text("key2"), new UTF8("value2"));
 MapWritable outMap = new MapWritable(inMap);
 MapWritable copyOfCopy = new MapWritable(outMap);
 assertEquals(1, copyOfCopy.getNewClasses());
}

代码示例来源:origin: ch.cern.hadoop/hadoop-common

public void testNullEncoding() throws Exception {
 String s = new String(new char[] { 0 });
 DataOutputBuffer dob = new DataOutputBuffer();
 new UTF8(s).write(dob);
 assertEquals(s, new String(dob.getData(), 2, dob.getLength()-2, "UTF-8"));
}

代码示例来源:origin: com.github.jiayuhan-it/hadoop-common

public void testNullEncoding() throws Exception {
 String s = new String(new char[] { 0 });
 DataOutputBuffer dob = new DataOutputBuffer();
 new UTF8(s).write(dob);
 assertEquals(s, new String(dob.getData(), 2, dob.getLength()-2, "UTF-8"));
}

相关文章