org.apache.hadoop.io.UTF8.fromBytes()方法的使用及代码示例

x33g5p2x  于2022-02-01 转载在 其他  
字(4.4k)|赞(0)|评价(0)|浏览(120)

本文整理了Java中org.apache.hadoop.io.UTF8.fromBytes()方法的一些代码示例,展示了UTF8.fromBytes()的具体用法。这些代码示例主要来源于Github/Stackoverflow/Maven等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。UTF8.fromBytes()方法的具体详情如下:
包路径:org.apache.hadoop.io.UTF8
类名称:UTF8
方法名:fromBytes

UTF8.fromBytes介绍

[英]Convert a UTF-8 encoded byte array back into a string.
[中]将UTF-8编码的字节数组转换回字符串。

代码示例

代码示例来源:origin: ch.cern.hadoop/hadoop-common

/**
 * Test that decoding invalid UTF8 throws an appropriate error message.
 */
public void testInvalidUTF8() throws Exception {
 byte[] invalid = new byte[] {
   0x01, 0x02, (byte)0xff, (byte)0xff, 0x01, 0x02, 0x03, 0x04, 0x05 };
 try {
  UTF8.fromBytes(invalid);
  fail("did not throw an exception");
 } catch (UTFDataFormatException utfde) {
  GenericTestUtils.assertExceptionContains(
    "Invalid UTF8 at ffff01020304", utfde);
 }
}

代码示例来源:origin: com.github.jiayuhan-it/hadoop-common

/**
 * Test for a 5-byte UTF8 sequence, which is now considered illegal.
 */
public void test5ByteUtf8Sequence() throws Exception {
 byte[] invalid = new byte[] {
   0x01, 0x02, (byte)0xf8, (byte)0x88, (byte)0x80,
   (byte)0x80, (byte)0x80, 0x04, 0x05 };
 try {
  UTF8.fromBytes(invalid);
  fail("did not throw an exception");
 } catch (UTFDataFormatException utfde) {
  GenericTestUtils.assertExceptionContains(
    "Invalid UTF8 at f88880808004", utfde);
 }
}

代码示例来源:origin: com.github.jiayuhan-it/hadoop-common

/**
 * Test that decoding invalid UTF8 throws an appropriate error message.
 */
public void testInvalidUTF8() throws Exception {
 byte[] invalid = new byte[] {
   0x01, 0x02, (byte)0xff, (byte)0xff, 0x01, 0x02, 0x03, 0x04, 0x05 };
 try {
  UTF8.fromBytes(invalid);
  fail("did not throw an exception");
 } catch (UTFDataFormatException utfde) {
  GenericTestUtils.assertExceptionContains(
    "Invalid UTF8 at ffff01020304", utfde);
 }
}

代码示例来源:origin: ch.cern.hadoop/hadoop-common

/**
 * Test for a 5-byte UTF8 sequence, which is now considered illegal.
 */
public void test5ByteUtf8Sequence() throws Exception {
 byte[] invalid = new byte[] {
   0x01, 0x02, (byte)0xf8, (byte)0x88, (byte)0x80,
   (byte)0x80, (byte)0x80, 0x04, 0x05 };
 try {
  UTF8.fromBytes(invalid);
  fail("did not throw an exception");
 } catch (UTFDataFormatException utfde) {
  GenericTestUtils.assertExceptionContains(
    "Invalid UTF8 at f88880808004", utfde);
 }
}

代码示例来源:origin: ch.cern.hadoop/hadoop-common

/**
  * Test that decoding invalid UTF8 due to truncation yields the correct
  * exception type.
  */
 public void testInvalidUTF8Truncated() throws Exception {
  // Truncated CAT FACE character -- this is a 4-byte sequence, but we
  // only have the first three bytes.
  byte[] truncated = new byte[] {
    (byte)0xF0, (byte)0x9F, (byte)0x90 };
  try {
   UTF8.fromBytes(truncated);
   fail("did not throw an exception");
  } catch (UTFDataFormatException utfde) {
   GenericTestUtils.assertExceptionContains(
     "Truncated UTF8 at f09f90", utfde);
  }
 }
}

代码示例来源:origin: com.github.jiayuhan-it/hadoop-common

/**
  * Test that decoding invalid UTF8 due to truncation yields the correct
  * exception type.
  */
 public void testInvalidUTF8Truncated() throws Exception {
  // Truncated CAT FACE character -- this is a 4-byte sequence, but we
  // only have the first three bytes.
  byte[] truncated = new byte[] {
    (byte)0xF0, (byte)0x9F, (byte)0x90 };
  try {
   UTF8.fromBytes(truncated);
   fail("did not throw an exception");
  } catch (UTFDataFormatException utfde) {
   GenericTestUtils.assertExceptionContains(
     "Truncated UTF8 at f09f90", utfde);
  }
 }
}

代码示例来源:origin: ch.cern.hadoop/hadoop-common

/**
 * Test encoding and decoding of UTF8 outside the basic multilingual plane.
 *
 * This is a regression test for HADOOP-9103.
 */
public void testNonBasicMultilingualPlane() throws Exception {
 // Test using the "CAT FACE" character (U+1F431)
 // See http://www.fileformat.info/info/unicode/char/1f431/index.htm
 String catFace = "\uD83D\uDC31";
 // This encodes to 4 bytes in UTF-8:
 byte[] encoded = catFace.getBytes("UTF-8");
 assertEquals(4, encoded.length);
 assertEquals("f09f90b1", StringUtils.byteToHexString(encoded));
 // Decode back to String using our own decoder
 String roundTrip = UTF8.fromBytes(encoded);
 assertEquals(catFace, roundTrip);
}

代码示例来源:origin: com.github.jiayuhan-it/hadoop-common

/**
 * Test encoding and decoding of UTF8 outside the basic multilingual plane.
 *
 * This is a regression test for HADOOP-9103.
 */
public void testNonBasicMultilingualPlane() throws Exception {
 // Test using the "CAT FACE" character (U+1F431)
 // See http://www.fileformat.info/info/unicode/char/1f431/index.htm
 String catFace = "\uD83D\uDC31";
 // This encodes to 4 bytes in UTF-8:
 byte[] encoded = catFace.getBytes("UTF-8");
 assertEquals(4, encoded.length);
 assertEquals("f09f90b1", StringUtils.byteToHexString(encoded));
 // Decode back to String using our own decoder
 String roundTrip = UTF8.fromBytes(encoded);
 assertEquals(catFace, roundTrip);
}

相关文章