org.apache.parquet.io.api.Binary.fromConstantByteArray()方法的使用及代码示例

x33g5p2x  于2022-01-16 转载在 其他  
字(6.5k)|赞(0)|评价(0)|浏览(128)

本文整理了Java中org.apache.parquet.io.api.Binary.fromConstantByteArray()方法的一些代码示例,展示了Binary.fromConstantByteArray()的具体用法。这些代码示例主要来源于Github/Stackoverflow/Maven等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。Binary.fromConstantByteArray()方法的具体详情如下:
包路径:org.apache.parquet.io.api.Binary
类名称:Binary
方法名:fromConstantByteArray

Binary.fromConstantByteArray介绍

暂无

代码示例

代码示例来源:origin: org.apache.spark/spark-sql_2.10

@Override
 public final Binary readBinary(int len) {
  Binary result = Binary.fromConstantByteArray(buffer, offset - Platform.BYTE_ARRAY_OFFSET, len);
  offset += len;
  return result;
 }
}

代码示例来源:origin: org.apache.spark/spark-sql_2.11

@Override
 public final Binary readBinary(int len) {
  ByteBuffer buffer = getBuffer(len);
  if (buffer.hasArray()) {
   return Binary.fromConstantByteArray(
     buffer.array(), buffer.arrayOffset() + buffer.position(), len);
  } else {
   byte[] bytes = new byte[len];
   buffer.get(bytes);
   return Binary.fromConstantByteArray(bytes);
  }
 }
}

代码示例来源:origin: org.apache.spark/spark-sql

@Override
 public final Binary readBinary(int len) {
  ByteBuffer buffer = getBuffer(len);
  if (buffer.hasArray()) {
   return Binary.fromConstantByteArray(
     buffer.array(), buffer.arrayOffset() + buffer.position(), len);
  } else {
   byte[] bytes = new byte[len];
   buffer.get(bytes);
   return Binary.fromConstantByteArray(bytes);
  }
 }
}

代码示例来源:origin: apache/hive

protected static void writeListData(ParquetWriter<Group> writer, boolean isDictionaryEncoding,
 int elementNum) throws IOException {
 SimpleGroupFactory f = new SimpleGroupFactory(schema);
 int listMaxSize = 4;
 int listElementIndex = 0;
 for (int i = 0; i < elementNum; i++) {
  boolean isNull = isNull(i);
  Group group = f.newGroup();
  int listSize = i % listMaxSize + 1;
  if (!isNull) {
   for (int j = 0; j < listSize; j++) {
    group.append("list_int32_field", getIntValue(isDictionaryEncoding, listElementIndex));
    group.append("list_int64_field", getLongValue(isDictionaryEncoding, listElementIndex));
    group.append("list_double_field", getDoubleValue(isDictionaryEncoding, listElementIndex));
    group.append("list_float_field", getFloatValue(isDictionaryEncoding, listElementIndex));
    group.append("list_boolean_field", getBooleanValue(listElementIndex));
    group.append("list_binary_field", getBinaryValue(isDictionaryEncoding, listElementIndex));
    HiveDecimal hd = getDecimal(isDictionaryEncoding, listElementIndex).setScale(2);
    HiveDecimalWritable hdw = new HiveDecimalWritable(hd);
    group.append("list_decimal_field", Binary.fromConstantByteArray(hdw.getInternalStorage()));
    listElementIndex++;
   }
  }
  for (int j = 0; j < listMaxSize; j++) {
   group.append("list_binary_field_for_repeat_test", getBinaryValue(isDictionaryEncoding, i));
  }
  writer.write(group);
 }
 writer.close();
}

代码示例来源:origin: apache/hive

HiveDecimal hd = getDecimal(isDictionaryEncoding, mapElementIndex).setScale(2);
HiveDecimalWritable hdw = new HiveDecimalWritable(hd);
Binary decimalValForMap = Binary.fromConstantByteArray(hdw.getInternalStorage());
group.addGroup("map_int32").append("key", intValForMap).append("value", intValForMap);
group.addGroup("map_int64").append("key", longValForMap).append("value", longValForMap);

代码示例来源:origin: com.alibaba.blink/flink-table

@Override
public final Binary readBinary(int len) {
  Binary result = Binary.fromConstantByteArray(buffer, offset - BYTE_ARRAY_OFFSET, len);
  offset += len;
  return result;
}

代码示例来源:origin: io.snappydata/snappy-spark-sql

@Override
 public final Binary readBinary(int len) {
  Binary result = Binary.fromConstantByteArray(buffer, offset - Platform.BYTE_ARRAY_OFFSET, len);
  offset += len;
  return result;
 }
}

代码示例来源:origin: apache/hive

group.append("value", Binary.fromConstantByteArray(w.getInternalStorage()));

代码示例来源:origin: org.lasersonlab.apache.parquet/parquet-column

@Override
public Binary slice(int start, int length) {
 return Binary.fromConstantByteArray(getBytesUnsafe(), start, length);
}
@Override

代码示例来源:origin: org.apache.parquet/parquet-column

@Override
public Binary slice(int start, int length) {
 if (isBackingBytesReused) {
  return Binary.fromReusedByteArray(value, offset + start, length);
 } else {
  return Binary.fromConstantByteArray(value, offset + start, length);
 }
}

代码示例来源:origin: org.apache.parquet/parquet-column

@Override
public Binary slice(int start, int length) {
 if (isBackingBytesReused) {
  return Binary.fromReusedByteArray(value, start, length);
 } else {
  return Binary.fromConstantByteArray(value, start, length);
 }
}

代码示例来源:origin: org.lasersonlab.apache.parquet/parquet-column

public Binary copy() {
 if (isBackingBytesReused) {
  return Binary.fromConstantByteArray(getBytes());
 } else {
  return this;
 }
}

代码示例来源:origin: org.apache.parquet/parquet-column

public Binary copy() {
 if (isBackingBytesReused) {
  return Binary.fromConstantByteArray(getBytes());
 } else {
  return this;
 }
}

代码示例来源:origin: org.apache.parquet/parquet-column

@Override
public Binary slice(int start, int length) {
 return Binary.fromConstantByteArray(getBytesUnsafe(), start, length);
}
@Override

代码示例来源:origin: org.lasersonlab.apache.parquet/parquet-column

@Override
public Binary slice(int start, int length) {
 if (isBackingBytesReused) {
  return Binary.fromReusedByteArray(value, offset + start, length);
 } else {
  return Binary.fromConstantByteArray(value, offset + start, length);
 }
}

代码示例来源:origin: org.lasersonlab.apache.parquet/parquet-column

@Override
public Binary slice(int start, int length) {
 if (isBackingBytesReused) {
  return Binary.fromReusedByteArray(value, start, length);
 } else {
  return Binary.fromConstantByteArray(value, start, length);
 }
}

代码示例来源:origin: org.apache.parquet/parquet-column

public DeltaByteArrayReader() {
 this.prefixLengthReader = new DeltaBinaryPackingValuesReader();
 this.suffixReader = new DeltaLengthByteArrayValuesReader();
 this.previous = Binary.fromConstantByteArray(new byte[0]);
}

代码示例来源:origin: org.lasersonlab.apache.parquet/parquet-column

public DeltaByteArrayReader() {
 this.prefixLengthReader = new DeltaBinaryPackingValuesReader();
 this.suffixReader = new DeltaLengthByteArrayValuesReader();
 this.previous = Binary.fromConstantByteArray(new byte[0]);
}

代码示例来源:origin: org.lasersonlab.apache.parquet/parquet-column

@Override
Binary truncateMin(Binary minValue, int length) {
 if (minValue.length() <= length) {
  return minValue;
 }
 ByteBuffer buffer = minValue.toByteBuffer();
 byte[] array;
 if (validator.checkValidity(buffer) == Validity.VALID) {
  array = truncateUtf8(buffer, length);
 } else {
  array = truncate(buffer, length);
 }
 return array == null ? minValue : Binary.fromConstantByteArray(array);
}

代码示例来源:origin: org.lasersonlab.apache.parquet/parquet-column

@Override
Binary truncateMax(Binary maxValue, int length) {
 if (maxValue.length() <= length) {
  return maxValue;
 }
 byte[] array;
 ByteBuffer buffer = maxValue.toByteBuffer();
 if (validator.checkValidity(buffer) == Validity.VALID) {
  array = incrementUtf8(truncateUtf8(buffer, length));
 } else {
  array = increment(truncate(buffer, length));
 }
 return array == null ? maxValue : Binary.fromConstantByteArray(array);
}

相关文章