org.apache.spark.unsafe.Platform.allocateMemory()方法的使用及代码示例

x33g5p2x  于2022-01-26 转载在 其他  
字(5.2k)|赞(0)|评价(0)|浏览(159)

本文整理了Java中org.apache.spark.unsafe.Platform.allocateMemory()方法的一些代码示例,展示了Platform.allocateMemory()的具体用法。这些代码示例主要来源于Github/Stackoverflow/Maven等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。Platform.allocateMemory()方法的具体详情如下:
包路径:org.apache.spark.unsafe.Platform
类名称:Platform
方法名:allocateMemory

Platform.allocateMemory介绍

暂无

代码示例

代码示例来源:origin: org.apache.spark/spark-unsafe

@Override
public MemoryBlock allocate(long size) throws OutOfMemoryError {
 long address = Platform.allocateMemory(size);
 MemoryBlock memory = new MemoryBlock(null, address, size);
 if (MemoryAllocator.MEMORY_DEBUG_FILL_ENABLED) {
  memory.fill(MemoryAllocator.MEMORY_DEBUG_FILL_CLEAN_VALUE);
 }
 return memory;
}

代码示例来源:origin: org.apache.spark/spark-unsafe_2.11

@Override
public MemoryBlock allocate(long size) throws OutOfMemoryError {
 long address = Platform.allocateMemory(size);
 MemoryBlock memory = new MemoryBlock(null, address, size);
 if (MemoryAllocator.MEMORY_DEBUG_FILL_ENABLED) {
  memory.fill(MemoryAllocator.MEMORY_DEBUG_FILL_CLEAN_VALUE);
 }
 return memory;
}

代码示例来源:origin: io.snappydata/snappy-spark-unsafe

@Override
public MemoryBlock allocate(long size) throws OutOfMemoryError {
 long address = Platform.allocateMemory(size);
 MemoryBlock memory = new MemoryBlock(null, address, size);
 if (MemoryAllocator.MEMORY_DEBUG_FILL_ENABLED) {
  memory.fill(MemoryAllocator.MEMORY_DEBUG_FILL_CLEAN_VALUE);
 }
 return memory;
}

代码示例来源:origin: org.apache.spark/spark-unsafe_2.10

@Override
public MemoryBlock allocate(long size) throws OutOfMemoryError {
 long address = Platform.allocateMemory(size);
 MemoryBlock memory = new MemoryBlock(null, address, size);
 if (MemoryAllocator.MEMORY_DEBUG_FILL_ENABLED) {
  memory.fill(MemoryAllocator.MEMORY_DEBUG_FILL_CLEAN_VALUE);
 }
 return memory;
}

代码示例来源:origin: io.snappydata/snappy-spark-unsafe

/**
 * Uses internal JDK APIs to allocate a DirectByteBuffer while ignoring the JVM's
 * MaxDirectMemorySize limit (the default limit is too low and we do not want to require users
 * to increase it).
 */
@SuppressWarnings("unchecked")
public static ByteBuffer allocateDirectBuffer(int size) {
 try {
  Class<?> cls = Class.forName("java.nio.DirectByteBuffer");
  Constructor<?> constructor = cls.getDeclaredConstructor(Long.TYPE, Integer.TYPE);
  constructor.setAccessible(true);
  Field cleanerField = cls.getDeclaredField("cleaner");
  cleanerField.setAccessible(true);
  final long memory = allocateMemory(size);
  ByteBuffer buffer = (ByteBuffer) constructor.newInstance(memory, size);
  Cleaner cleaner = Cleaner.create(buffer, new Runnable() {
   @Override
   public void run() {
    freeMemory(memory);
   }
  });
  cleanerField.set(buffer, cleaner);
  return buffer;
 } catch (Exception e) {
  throwException(e);
 }
 throw new IllegalStateException("unreachable");
}

代码示例来源:origin: org.apache.spark/spark-unsafe

/**
 * Uses internal JDK APIs to allocate a DirectByteBuffer while ignoring the JVM's
 * MaxDirectMemorySize limit (the default limit is too low and we do not want to require users
 * to increase it).
 */
@SuppressWarnings("unchecked")
public static ByteBuffer allocateDirectBuffer(int size) {
 try {
  Class<?> cls = Class.forName("java.nio.DirectByteBuffer");
  Constructor<?> constructor = cls.getDeclaredConstructor(Long.TYPE, Integer.TYPE);
  constructor.setAccessible(true);
  Field cleanerField = cls.getDeclaredField("cleaner");
  cleanerField.setAccessible(true);
  long memory = allocateMemory(size);
  ByteBuffer buffer = (ByteBuffer) constructor.newInstance(memory, size);
  Cleaner cleaner = Cleaner.create(buffer, () -> freeMemory(memory));
  cleanerField.set(buffer, cleaner);
  return buffer;
 } catch (Exception e) {
  throwException(e);
 }
 throw new IllegalStateException("unreachable");
}

代码示例来源:origin: org.apache.spark/spark-unsafe_2.11

/**
 * Uses internal JDK APIs to allocate a DirectByteBuffer while ignoring the JVM's
 * MaxDirectMemorySize limit (the default limit is too low and we do not want to require users
 * to increase it).
 */
@SuppressWarnings("unchecked")
public static ByteBuffer allocateDirectBuffer(int size) {
 try {
  Class<?> cls = Class.forName("java.nio.DirectByteBuffer");
  Constructor<?> constructor = cls.getDeclaredConstructor(Long.TYPE, Integer.TYPE);
  constructor.setAccessible(true);
  Field cleanerField = cls.getDeclaredField("cleaner");
  cleanerField.setAccessible(true);
  long memory = allocateMemory(size);
  ByteBuffer buffer = (ByteBuffer) constructor.newInstance(memory, size);
  Cleaner cleaner = Cleaner.create(buffer, () -> freeMemory(memory));
  cleanerField.set(buffer, cleaner);
  return buffer;
 } catch (Exception e) {
  throwException(e);
 }
 throw new IllegalStateException("unreachable");
}

代码示例来源:origin: org.apache.spark/spark-unsafe_2.10

/**
 * Uses internal JDK APIs to allocate a DirectByteBuffer while ignoring the JVM's
 * MaxDirectMemorySize limit (the default limit is too low and we do not want to require users
 * to increase it).
 */
@SuppressWarnings("unchecked")
public static ByteBuffer allocateDirectBuffer(int size) {
 try {
  Class<?> cls = Class.forName("java.nio.DirectByteBuffer");
  Constructor<?> constructor = cls.getDeclaredConstructor(Long.TYPE, Integer.TYPE);
  constructor.setAccessible(true);
  Field cleanerField = cls.getDeclaredField("cleaner");
  cleanerField.setAccessible(true);
  long memory = allocateMemory(size);
  ByteBuffer buffer = (ByteBuffer) constructor.newInstance(memory, size);
  Cleaner cleaner = Cleaner.create(buffer, () -> freeMemory(memory));
  cleanerField.set(buffer, cleaner);
  return buffer;
 } catch (Exception e) {
  throwException(e);
 }
 throw new IllegalStateException("unreachable");
}

相关文章