javascript 显示网络摄像头/摄像头流与延迟- webRTC

e0bqpujr  于 2023-04-04  发布在  Java
关注(0)|答案(2)|浏览(244)

我做了一个简单的设置,获取网络摄像头/手机摄像头流,并传递它,在html 2d画布上绘制。
但是我一直不知道如何在延迟几秒钟的情况下显示流。有点像延迟镜。
我试着玩ctx.globalAlpha = 0.005;,但这给了我一个重影效果,而不是“延迟”流。
有什么想法可以实现这一点吗?

  • 下面的片段在这里不起作用,可能是因为安全问题,但这里有一支笔:

https://codepen.io/farisk/pen/LvmGGQ

var width = 0, height = 0;
  
  var canvas = document.createElement('canvas'),
      ctx = canvas.getContext('2d');
  document.body.appendChild(canvas);
  
  var video = document.createElement('video'),
      track;
  video.setAttribute('autoplay',true);
  
  window.vid = video;
  
  function getWebcam(){ 
  
    navigator.mediaDevices.getUserMedia({ video: true }).then(function(stream) {
 

      var videoTracks = stream.getVideoTracks();
      var newStream = new MediaStream(stream.getVideoTracks());

      video.srcObject = newStream;
      video.play();
      track = stream.getTracks()[0];

    }, function(e) {
      console.error('Rejected!', e);
    });
  }
  
  getWebcam();
  
  var rotation = 0,
      loopFrame,
      centerX,
      centerY,
      twoPI = Math.PI * 2;
  
  function loop(){
    
    loopFrame = requestAnimationFrame(loop);
        
    // ctx.globalAlpha = 0.005;
    ctx.drawImage(video, 0, 0, width, height);
    ctx.restore();

  }
  
  function startLoop(){ 
    loopFrame = requestAnimationFrame(loop);
  }
  
  video.addEventListener('loadedmetadata',function(){
    width = canvas.width = video.videoWidth;
    height = canvas.height = video.videoHeight;
    centerX = width / 2;
    centerY = height / 2;
    startLoop();
  });
  
  canvas.addEventListener('click',function(){
    if ( track ) {
      if ( track.stop ) { track.stop(); }
      track = null;
    } else {
      getWebcam();
    }
  });
video,
canvas {
  max-width: 100%;
  height: auto;
}
  • 下面的片段在这里不起作用,可能是因为安全问题,但这里有一支笔:

https://codepen.io/farisk/pen/LvmGGQ

kmb7vmvb

kmb7vmvb1#

您可以缓冲您的流,并播放您缓冲的内容。
要使用MediaStream实现这一点,您可以使用MediaRecorder API沿着MediaSource API
基本的想法是记录你的流,在每个新的块产生时,将其缓冲到MediaSource中。然后我们只需要等待我们想要的延迟,在开始播放之前暂停视频。

(async () => {
  const delay = 3000;
  const mimeType = `video/webm; codecs="vp8"`;
  const stream = await getStream();
  document.getElementById("realtime").srcObject = stream;
  const mediaSource = new MediaSource();
  const delayed = document.getElementById("delayed");
  delayed.src = URL.createObjectURL(mediaSource);
  await new Promise((res) =>
    mediaSource.addEventListener("sourceopen", res, { once: true })
  );
  const sourceBuffer = mediaSource.addSourceBuffer(mimeType);
  const recorder = new MediaRecorder(stream, { mimeType });
  const chunks = [];
  recorder.ondataavailable = async ({ data }) => {
    if (mediaSource.readyState !== "open" || !data.size) {
      return;
    }
    sourceBuffer.appendBuffer(await data.arrayBuffer());
  };
  delayed.pause();
  recorder.start(50);
  setTimeout(() => delayed.play(), delay);
})();

function getStream() {
  // StackSnippet only:
  // because StackSnippet don't allow the use of gUM
  // we return a MediaStream from a simple <canvas> anim
  const canvas = document.createElement("canvas");
  const ctx = canvas.getContext("2d");
  ctx.font = "30px sans-serif";
  ctx.textAlign = "center";
  ctx.textBaseline = "middle";

  function anim() {
    ctx.fillStyle = "white";
    ctx.fillRect(0, 0, canvas.width, canvas.height);
    ctx.fillStyle = "black";
    ctx.fillText(new Date().toTimeString().split(" ")[0], canvas.width / 2, canvas.height / 2);
    requestAnimationFrame(anim);
  }
  anim();
  return canvas.captureStream();
}
Realtime:<br>
<video id="realtime" autoplay muted controls></video><br> Delayed:<br>
<video id="delayed" autoplay muted controls></video>

as a fiddle,因为StackSnippets不是很gUM友好。

vmdwslir

vmdwslir2#

你可以考虑将视频数据存储在一个数组中,这可能意味着一开始会延迟播放n秒。
基本上在第1帧,您将视频馈送存储到一个数组中,并不绘制任何内容。这种情况一直发生到第1000帧(1秒)。此时开始基于数组的第一个元素绘制。
绘制该帧后,将其从阵列中删除并添加新帧。

相关问题