java.awt.image.BufferedImage.setAccelerationPriority()方法的使用及代码示例

x33g5p2x  于2022-01-16 转载在 其他  
字(6.9k)|赞(0)|评价(0)|浏览(245)

本文整理了Java中java.awt.image.BufferedImage.setAccelerationPriority()方法的一些代码示例,展示了BufferedImage.setAccelerationPriority()的具体用法。这些代码示例主要来源于Github/Stackoverflow/Maven等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。BufferedImage.setAccelerationPriority()方法的具体详情如下:
包路径:java.awt.image.BufferedImage
类名称:BufferedImage
方法名:setAccelerationPriority

BufferedImage.setAccelerationPriority介绍

暂无

代码示例

代码示例来源:origin: sarxos/webcam-capture

@Override
public void rgbFrame(boolean preroll, int width, int height, IntBuffer rgb) {
  LOG.trace("New RGB frame");
  if (t1 == -1 || t2 == -1) {
    t1 = System.currentTimeMillis();
    t2 = System.currentTimeMillis();
  }
  BufferedImage tmp = new BufferedImage(width, height, BufferedImage.TYPE_INT_RGB);
  tmp.setAccelerationPriority(0);
  rgb.get(((DataBufferInt) tmp.getRaster().getDataBuffer()).getData(), 0, width * height);
  tmp.flush();
  image = tmp;
  if (starting.compareAndSet(true, false)) {
    synchronized (this) {
      this.notifyAll();
    }
    LOG.debug("GStreamer device ready");
  }
  t1 = t2;
  t2 = System.currentTimeMillis();
  fps = (4 * fps + 1000 / (t2 - t1 + 1)) / 5;
}

代码示例来源:origin: sarxos/webcam-capture

@Override
public void open() {
  if (!open.compareAndSet(false, true)) {
    return;
  }
  LOG.debug("Opening GStreamer device");
  init();
  starting.set(true);
  Dimension size = getResolution();
  image = new BufferedImage(size.width, size.height, BufferedImage.TYPE_INT_RGB);
  image.setAccelerationPriority(0);
  image.flush();
  if (caps != null) {
    caps.dispose();
  }
  caps = Caps.fromString(String.format("%s,framerate=30/1,width=%d,height=%d", format, size.width, size.height));
  filter.setCaps(caps);
  LOG.debug("Using filter caps: {}", caps);
  pipelinePlay();
  LOG.debug("Wait for device to be ready");
  // wait max 20s for image to appear
  synchronized (this) {
    try {
      this.wait(20000);
    } catch (InterruptedException e) {
      return;
    }
  }
}

代码示例来源:origin: geotools/geotools

private void initGraphics(boolean inverted) {
  initRaster(inverted);
  // lazily allocate graphics
  if (graphics == null) {
    Color drawColor = inverted ? Color.BLACK : Color.WHITE;
    bi = new BufferedImage(BINARY_COLOR_MODEL, raster, false, null);
    bi.setAccelerationPriority(0);
    graphics = bi.createGraphics();
    final int offset = antiAliasing ? 2 : 0;
    graphics.setClip(-offset, -offset, tileWidth + (offset * 2), tileHeight + (offset * 2));
    graphics.translate(-this.col * stdTileWidth, -this.row * stdTileHeight);
    graphics.setRenderingHint(
        RenderingHints.KEY_ANTIALIASING,
        antiAliasing
            ? RenderingHints.VALUE_ANTIALIAS_ON
            : RenderingHints.VALUE_ANTIALIAS_OFF);
    graphics.setColor(drawColor);
  }
}

代码示例来源:origin: stackoverflow.com

private static BufferedImage renderImg = device.getDefaultConfiguration().
    createCompatibleImage(
          device.getDisplayMode().getWidth(),
          device.getDisplayMode().getHeight());
static {
  renderImg.setAccelerationPriority(1);
}

代码示例来源:origin: stackoverflow.com

GraphicsEnvironment ge = GraphicsEnvironment.getLocalGraphicsEnvironment();
GraphicsConfiguration gc = ge.getDefaultScreenDevice().getDefaultConfiguration();
BufferedImage img = gc.createCompatibleImage(width, height, Transparency.TRANSLUCENT);
img.setAccelerationPriority(1);

代码示例来源:origin: com.googlecode.gstreamer-java/gstreamer-java

private BufferedImage getBufferedImage(int width, int height) {
  if (currentImage != null && currentImage.getWidth() == width
      && currentImage.getHeight() == height) {
    return currentImage;
  }
  if (currentImage != null) {
    currentImage.flush();
  }
  currentImage = new BufferedImage(width, height, BufferedImage.TYPE_INT_RGB);
  currentImage.setAccelerationPriority(0.0f);
  return currentImage;
}
private class RGBListener implements RGBDataSink.Listener {

代码示例来源:origin: RPTools/maptool

private BufferedImage getNewChunk() {
  BufferedImage image = null;
  if (unusedChunkList.size() > 0) {
    image = unusedChunkList.remove(0);
    ImageUtil.clearImage(image);
  } else {
    image = new BufferedImage(CHUNK_SIZE, CHUNK_SIZE, Transparency.BITMASK);
  }
  image.setAccelerationPriority(1);
  return image;
}

代码示例来源:origin: org.integratedmodelling/klab-engine

private void checkReset(int type) {
  if (resetRaster) {
    raster = RasterFactory.createBandedRaster(type, width, height, 1, null);
    bimage = new BufferedImage(width, height, BufferedImage.TYPE_INT_ARGB);
    bimage.setAccelerationPriority(1.0f);
    graphics = bimage.createGraphics();
    graphics.setPaintMode();
    graphics.setComposite(AlphaComposite.Src);
    resetRaster = false;
  }
}

代码示例来源:origin: org.integratedmodelling/klab-engine

/**
 * Constructor for the FeatureRasterizer object
 *
 * @param height
 *            Height of raster (number of grid cells)
 * @param width
 *            Width of raster (number of grid cells)
 * @param noData
 *            No Data value for raster
 */
public FeatureRasterizer(int height, int width, float noData) {
  this.height = height;
  this.width = width;
  this.noDataValue = noData;
  raster = /*
       * RasterFactory.createBandedRaster(DataBuffer.TYPE_FLOAT, width, height,
       * 1, null)
       */null;
  bimage = new BufferedImage(width, height, BufferedImage.TYPE_INT_ARGB);
  bimage.setAccelerationPriority(1.0f);
  // GraphicsEnvironment ge = GraphicsEnvironment.getLocalGraphicsEnvironment();
  graphics = bimage.createGraphics();
  graphics.setPaintMode();
  graphics.setComposite(AlphaComposite.Src);
}

代码示例来源:origin: com.github.sarxos/webcam-capture-driver-gstreamer

@Override
public void rgbFrame(boolean preroll, int width, int height, IntBuffer rgb) {
  LOG.trace("New RGB frame");
  if (t1 == -1 || t2 == -1) {
    t1 = System.currentTimeMillis();
    t2 = System.currentTimeMillis();
  }
  BufferedImage tmp = new BufferedImage(width, height, BufferedImage.TYPE_INT_RGB);
  tmp.setAccelerationPriority(0);
  rgb.get(((DataBufferInt) tmp.getRaster().getDataBuffer()).getData(), 0, width * height);
  tmp.flush();
  image = tmp;
  if (starting.compareAndSet(true, false)) {
    synchronized (this) {
      this.notifyAll();
    }
    LOG.debug("GStreamer device ready");
  }
  t1 = t2;
  t2 = System.currentTimeMillis();
  fps = (4 * fps + 1000 / (t2 - t1 + 1)) / 5;
}

代码示例来源:origin: net.sf.squirrel-sql.thirdparty-non-maven/napkinlaf

public boolean imageUpdate(Image img, int infoflags, int x, int y,
      int width, int height) {
    sketch = sketchify(img);
    sketch.setAccelerationPriority(getAccelerationPriority());
    // always return true to stay on for further events
    return true;
  }
}

代码示例来源:origin: com.github.sarxos/webcam-capture-driver-gstreamer

@Override
public void open() {
  if (!open.compareAndSet(false, true)) {
    return;
  }
  LOG.debug("Opening GStreamer device");
  init();
  starting.set(true);
  Dimension size = getResolution();
  image = new BufferedImage(size.width, size.height, BufferedImage.TYPE_INT_RGB);
  image.setAccelerationPriority(0);
  image.flush();
  if (caps != null) {
    caps.dispose();
  }
  caps = Caps.fromString(String.format("%s,framerate=30/1,width=%d,height=%d", format, size.width, size.height));
  filter.setCaps(caps);
  LOG.debug("Using filter caps: {}", caps);
  pipelinePlay();
  LOG.debug("Wait for device to be ready");
  // wait max 20s for image to appear
  synchronized (this) {
    try {
      this.wait(20000);
    } catch (InterruptedException e) {
      return;
    }
  }
}

代码示例来源:origin: uk.co.caprica/vlcj

public DirectTestPlayer(String media) throws InterruptedException, InvocationTargetException {
  image = GraphicsEnvironment.getLocalGraphicsEnvironment().getDefaultScreenDevice().getDefaultConfiguration().createCompatibleImage(width, height);
  image.setAccelerationPriority(1.0f);

代码示例来源:origin: org.geoserver/gs-wms

ImageTypeSpecifier.createFromBufferedImageType(BufferedImage.TYPE_INT_ARGB)
        .createBufferedImage(paintAreaSize, paintAreaSize);
image.setAccelerationPriority(0);

相关文章