简体   繁体   English

<video>使用 MediaRecorder() 从<canvas>使用 canvas.captureStream() 在 firefox、chrome 上呈现不同

[英]<video> playback of recorded stream using MediaRecorder() from <canvas> using canvas.captureStream() renders differently at firefox, chromium

Using a original javascript at MediaRecorder-examples/record-canvas-to-video.jsMediaRecorder-examples/record-canvas-to-video.js使用原始javascript

Software requirements软件要求

  • Firefox 45. This is a Firefox technical demo. Firefox 45. 这是一个 Firefox 技术演示。 So it might not work on your browser, if it doesn't implement what we're demoing.因此,如果它没有实现我们正在演示的内容,它可能无法在您的浏览器上运行。 At the time of writing (January 2016), you need to download either Firefox Developer Edition or Firefox Nightly.在撰写本文时(2016 年 1 月),您需要下载 Firefox Developer Edition 或 Firefox Nightly。
window.onload = function () {
  var video = document.getElementById('video');
  var canvas = document.getElementById('canvas');
  var width = canvas.width;
  var height = canvas.height;
  var capturing = false;

  video.width = width;
  video.height = height;

  // We need the 2D context to individually manipulate pixel data
  var ctx = canvas.getContext('2d');

  // Start with a black background
  ctx.fillStyle = '#000';
  ctx.fillRect(0, 0, width, height);

  // Since we're continuously accessing and overwriting the pixels
  // object, we'll request it once and reuse it across calls to draw()
  // for best performance (we don't need to create ImageData objects
  // on every frame)
  var pixels = ctx.getImageData(0, 0, width, height);
  var data = pixels.data;
  var numPixels = data.length;

  var stream = canvas.captureStream(15);
  var recorder = new MediaRecorder(stream);

  recorder.addEventListener('dataavailable', finishCapturing);

  startCapturing();
  recorder.start();

  setTimeout(function() {
    recorder.stop();
  }, 2000);


  function startCapturing() {
    capturing = true;
    draw();
  }


  function finishCapturing(e) {
    capturing = false;
    var videoData = [ e.data ];
    var blob = new Blob(videoData, { 'type': 'video/webm' });
    var videoURL = URL.createObjectURL(blob);
    video.src = videoURL;
    video.play();
  }


  function draw() {
    // We don't want to render again if we're not capturing
    if(capturing) {
      requestAnimationFrame(draw);
    }
    drawWhiteNoise();
  }


  function drawWhiteNoise() {
    var offset = 0;

    for(var i = 0; i < numPixels; i++) {
      var grey = Math.round(Math.random() * 255);

      // The data array has pixel values in RGBA order
      // (Red, Green, Blue and Alpha for transparency)
      // We will make R, G and B have the same value ('grey'),
      // then skip the Alpha value by increasing the offset,
      // as we're happy with the opaque value we set when painting
      // the background black at the beginning
      data[offset++] = grey;
      data[offset++] = grey;
      data[offset++] = grey;
      offset++; // skip the alpha component
    }

    // And tell the context to draw the updated pixels in the canvas
    ctx.putImageData(pixels, 0, 0);
  }

};

produces errors at chromium 55在铬 55 处产生错误

Uncaught (in promise) DOMException: The play() request was interrupted by a new load request.

Failed to load resource: the server responded with a status of 416 (Requested Range Not Satisfiable)

though returns expected result at firefox 52.虽然在 Firefox 52 返回预期结果。

Adjusting javascript for use at chromium by pushing Blob at dataavailable event of MediaRecorder to an array, then concatenating blobs at stop event调整javascript通过推动在使用铬Blobdataavailable的事件MediaRecorder在一个数组,然后串联斑点stop事件

window.onload = function () {
  var blobs = [];
  var video = document.getElementById('video');
  var canvas = document.getElementById('canvas');
  var width = canvas.width;
  var height = canvas.height;
  var capturing = false;

  video.width = width;
  video.height = height;

  // We need the 2D context to individually manipulate pixel data
  var ctx = canvas.getContext('2d');

  // Start with a black background
  ctx.fillStyle = '#000';
  ctx.fillRect(0, 0, width, height);

  // Since we're continuously accessing and overwriting the pixels
  // object, we'll request it once and reuse it across calls to draw()
  // for best performance (we don't need to create ImageData objects
  // on every frame)
  var pixels = ctx.getImageData(0, 0, width, height);
  var data = pixels.data;
  var numPixels = data.length;

  var stream = canvas.captureStream(15);
  var recorder = new MediaRecorder(stream);

  recorder.addEventListener('dataavailable', finishCapturing);
  recorder.addEventListener('stop', function(e) {
    video.oncanplay = video.play;
    video.src = URL.createObjectURL(new Blob(blobs, {type:"video/webm"}));
  });
  startCapturing();
  recorder.start();

  setTimeout(function() {
        capturing = false;
    recorder.stop();
  }, 2000);


  function startCapturing() {
    capturing = true;
    draw();
  }


  function finishCapturing(e) {
    blobs.push(e.data);
  }


  function draw() {
    // We don't want to render again if we're not capturing
    if(capturing) {
      requestAnimationFrame(draw);
    }
    drawWhiteNoise();
  }


  function drawWhiteNoise() {
    var offset = 0;

    for(var i = 0; i < numPixels; i++) {
      var grey = Math.round(Math.random() * 255);

      // The data array has pixel values in RGBA order
      // (Red, Green, Blue and Alpha for transparency)
      // We will make R, G and B have the same value ('grey'),
      // then skip the Alpha value by increasing the offset,
      // as we're happy with the opaque value we set when painting
      // the background black at the beginning
      data[offset++] = grey;
      data[offset++] = grey;
      data[offset++] = grey;
      offset++; // skip the alpha component
    }

    // And tell the context to draw the updated pixels in the canvas
    ctx.putImageData(pixels, 0, 0);
  }

};

renders the recorded stream similarly to firefox.以类似于 Firefox 的方式呈现记录的流。

However, the adjustments made to play video at both firefox and chromium render with apparent minimal, though noticeable delay between the concatenated blobs.但是,为在 Firefox 和 Chromium 上播放视频所做的调整呈现出明显最小的效果,尽管连接的 blob 之间有明显的延迟。

How can we render the same visual playback of canvas.captureStream() recorded using MediaRecorder() at <video> element?我们如何呈现的相同的视觉再现canvas.captureStream()利用记录MediaRecorder()<video>元素?

plnkr http://plnkr.co/edit/KgGpkCJRvPG2T2Jy4wyH?p=preview plnkr http://plnkr.co/edit/KgGpkCJRvPG2T2Jy4wyH?p=preview

You're driving the animation from the main JS thread here, so it's possible other main thread JS activities - like the ondataavailable callback firing - could disrupt timing enough to be noticeable.您正在此处从主 JS 线程驱动动画,因此其他主线程 JS 活动(例如ondataavailable回调触发)可能会破坏足够的计时以引起注意。

Try omitting the (60) framerate from the canvas.captureStream() call.尝试从canvas.captureStream()调用中省略 (60) 帧率。

MDN says: "If not set, a new frame will be captured each time the canvas changes; if set to 0, one single frame will be captured." MDN说: “如果未设置,则每次画布更改时都会捕获新帧;如果设置为 0,则将捕获一帧。”

This should hopefully make the output more impervious to such interruptions, at the cost of shortening its length marginally.这有望使输出更不受此类中断的影响,代价是略微缩短其长度。

You can also specify a timeslice with the start method, eg recorder.start(2000) to limit when the dataavailable event is fires to avoid interruptions.您还可以使用start方法指定时间片,例如recorder.start(2000)来限制何时触发dataavailable事件以避免中断。

声明:本站的技术帖子网页,遵循CC BY-SA 4.0协议,如果您需要转载,请注明本站网址或者原文地址。任何问题请咨询:yoyou2525@163.com.

相关问题 将drawImage与MediaRecorder一起使用时,为什么canvas.captureStream中的视频为空 - Why is video from canvas.captureStream empty when using drawImage with MediaRecorder 使用 canvas.captureStream() 使用 alpha 通道捕获视频 - Capture video with alpha channel using canvas.captureStream() 使用captureStream和mediaRecorder进行画布录制 - Canvas recording using captureStream and mediaRecorder Firefox canvas.captureStream 捕获处于非活动状态的黑帧 - Firefox canvas.captureStream captures black frames as was inactive Canvas.captureStream() 在 Angular 8 中不存在 - Canvas.captureStream() does not exist in Angular 8 如何检测何时不支持`canvas.captureStream()`? - How to detect when `canvas.captureStream()` is not supported? MediaRecorderAPI:如何使用来自 canvas 的 captureStream 和来自音频文件的音频源来生成媒体 Stream - MediaRecorderAPI : How to produce a Media Stream using captureStream from canvas & an audio source from an audio file 将“navigator.mediaDevices.getUserMedia”替换为“canvas.captureStream” - Replace "navigator.mediaDevices.getUserMedia" with "canvas.captureStream" HTML 使用 MediaRecorder 记录 canvas 里面有一个视频 - HTML using MediaRecorder record canvas with a video inside 使用 WebRTC 从画布元素流式传输视频 - Stream video from a canvas element using WebRTC
 
粤ICP备18138465号  © 2020-2024 STACKOOM.COM