本文整理了Java中org.apache.spark.unsafe.Platform.putDouble()
方法的一些代码示例,展示了Platform.putDouble()
的具体用法。这些代码示例主要来源于Github
/Stackoverflow
/Maven
等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。Platform.putDouble()
方法的具体详情如下:
包路径:org.apache.spark.unsafe.Platform
类名称:Platform
方法名:putDouble
暂无
代码示例来源:origin: org.apache.spark/spark-sql_2.10
@Override
public void putDouble(int rowId, double value) {
Platform.putDouble(null, data + rowId * 8, value);
}
代码示例来源:origin: org.apache.spark/spark-sql_2.10
@Override
public void putDoubles(int rowId, int count, double value) {
long offset = data + 8 * rowId;
for (int i = 0; i < count; ++i, offset += 8) {
Platform.putDouble(null, offset, value);
}
}
代码示例来源:origin: org.apache.spark/spark-sql_2.11
@Override
public void putDoubles(int rowId, int count, double value) {
long offset = data + 8L * rowId;
for (int i = 0; i < count; ++i, offset += 8) {
Platform.putDouble(null, offset, value);
}
}
代码示例来源:origin: org.apache.spark/spark-sql
@Override
public void putDoubles(int rowId, int count, double value) {
long offset = data + 8L * rowId;
for (int i = 0; i < count; ++i, offset += 8) {
Platform.putDouble(null, offset, value);
}
}
代码示例来源:origin: org.apache.spark/spark-sql_2.11
@Override
public void putDouble(int rowId, double value) {
Platform.putDouble(null, data + rowId * 8L, value);
}
代码示例来源:origin: org.apache.spark/spark-sql
@Override
public void putDouble(int rowId, double value) {
Platform.putDouble(null, data + rowId * 8L, value);
}
代码示例来源:origin: org.apache.spark/spark-sql_2.10
@Override
public void putDoubles(int rowId, int count, byte[] src, int srcIndex) {
if (!bigEndianPlatform) {
Platform.copyMemory(src, Platform.BYTE_ARRAY_OFFSET + srcIndex,
null, data + rowId * 8, count * 8);
} else {
ByteBuffer bb = ByteBuffer.wrap(src).order(ByteOrder.LITTLE_ENDIAN);
long offset = data + 8 * rowId;
for (int i = 0; i < count; ++i, offset += 8) {
Platform.putDouble(null, offset, bb.getDouble(srcIndex + (8 * i)));
}
}
}
代码示例来源:origin: org.apache.spark/spark-sql_2.11
@Override
public void putDoubles(int rowId, int count, byte[] src, int srcIndex) {
if (!bigEndianPlatform) {
Platform.copyMemory(src, Platform.BYTE_ARRAY_OFFSET + srcIndex,
null, data + rowId * 8L, count * 8L);
} else {
ByteBuffer bb = ByteBuffer.wrap(src).order(ByteOrder.LITTLE_ENDIAN);
long offset = data + 8L * rowId;
for (int i = 0; i < count; ++i, offset += 8) {
Platform.putDouble(null, offset, bb.getDouble(srcIndex + (8 * i)));
}
}
}
代码示例来源:origin: org.apache.spark/spark-sql
@Override
public void putDoubles(int rowId, int count, byte[] src, int srcIndex) {
if (!bigEndianPlatform) {
Platform.copyMemory(src, Platform.BYTE_ARRAY_OFFSET + srcIndex,
null, data + rowId * 8L, count * 8L);
} else {
ByteBuffer bb = ByteBuffer.wrap(src).order(ByteOrder.LITTLE_ENDIAN);
long offset = data + 8L * rowId;
for (int i = 0; i < count; ++i, offset += 8) {
Platform.putDouble(null, offset, bb.getDouble(srcIndex + (8 * i)));
}
}
}
代码示例来源:origin: io.snappydata/snappy-spark-sql
@Override
public void putDouble(int rowId, double value) {
Platform.putDouble(null, data + rowId * 8, value);
}
代码示例来源:origin: io.snappydata/snappy-spark-sql
@Override
public void putDoubles(int rowId, int count, double value) {
long offset = data + 8 * rowId;
for (int i = 0; i < count; ++i, offset += 8) {
Platform.putDouble(null, offset, value);
}
}
代码示例来源:origin: org.apache.spark/spark-catalyst
protected final void writeDouble(long offset, double value) {
if (Double.isNaN(value)) {
value = Double.NaN;
}
Platform.putDouble(getBuffer(), offset, value);
}
}
代码示例来源:origin: shunfei/indexr
@Override
public void setFloat(int ordinal, float value) {
assertIndexIsValid(ordinal);
if (Float.isNaN(value)) {
value = Float.NaN;
}
Platform.putDouble(baseObject, getFieldOffset(ordinal), value);
}
代码示例来源:origin: shunfei/indexr
@Override
public void setDouble(int ordinal, double value) {
assertIndexIsValid(ordinal);
if (Double.isNaN(value)) {
value = Double.NaN;
}
Platform.putDouble(baseObject, getFieldOffset(ordinal), value);
}
代码示例来源:origin: org.apache.spark/spark-catalyst_2.10
public void setDouble(int ordinal, double value) {
if (Double.isNaN(value)) {
value = Double.NaN;
}
assertIndexIsValid(ordinal);
Platform.putDouble(baseObject, getElementOffset(ordinal, 8), value);
}
代码示例来源:origin: org.apache.spark/spark-catalyst_2.10
public void write(int ordinal, double value) {
if (Double.isNaN(value)) {
value = Double.NaN;
}
assertIndexIsValid(ordinal);
Platform.putDouble(holder.buffer, getElementOffset(ordinal, 8), value);
}
代码示例来源:origin: org.apache.spark/spark-catalyst_2.11
public void setDouble(int ordinal, double value) {
if (Double.isNaN(value)) {
value = Double.NaN;
}
assertIndexIsValid(ordinal);
Platform.putDouble(baseObject, getElementOffset(ordinal, 8), value);
}
代码示例来源:origin: org.apache.spark/spark-catalyst_2.10
public void setNullDouble(int ordinal) {
setNullBit(ordinal);
// put zero into the corresponding field when set null
Platform.putDouble(holder.buffer, getElementOffset(ordinal, 8), (double)0);
}
代码示例来源:origin: org.apache.spark/spark-catalyst
@Override
public void setDouble(int ordinal, double value) {
assertIndexIsValid(ordinal);
setNotNullAt(ordinal);
if (Double.isNaN(value)) {
value = Double.NaN;
}
Platform.putDouble(baseObject, getFieldOffset(ordinal), value);
}
代码示例来源:origin: org.apache.spark/spark-catalyst_2.10
@Override
public void setDouble(int ordinal, double value) {
assertIndexIsValid(ordinal);
setNotNullAt(ordinal);
if (Double.isNaN(value)) {
value = Double.NaN;
}
Platform.putDouble(baseObject, getFieldOffset(ordinal), value);
}
内容来源于网络,如有侵权,请联系作者删除!