简体   繁体   中英

Is it possible to post process HTML5 video elements audio output with Web Audio Api?

I have an html5 video element and I need to apply different processing realtime on the video's output audio. On desktop I made it work with the WebAudio API. The Api is seemingly present on iOS also. I am able to inspect the created objects, but it doesn't modify the video's output signal.

Here's my example code:

$(function () {
   window.AudioContext = window.AudioContext||window.webkitAudioContext;
   var audioContext = new AudioContext();
     var bufferSize = 1024;
     var selectedChannel = 0;
   var effect = (function() {
            var node = audioContext.createScriptProcessor(bufferSize, 2, 2);
            node.addEventListener('audioprocess', function(e) {
                var input = e.inputBuffer.getChannelData(selectedChannel);
                var outputL = e.outputBuffer.getChannelData(0);
                var outputR = e.outputBuffer.getChannelData(1);
                for (var i = 0; i < bufferSize; i++) {
                    outputL[i] = selectedChannel==0? input[i] : 0.0;
                    outputR[i] = selectedChannel==1? input[i] : 0.0;
                }
            });
            return node;
     })();

   var streamAttached = false;
   function attachStream(video) { 
      if (streamAttached) {
                return;
            }
      var source = audioContext.createMediaElementSource(video);
            source.connect(effect);
            effect.connect(audioContext.destination);
      streamAttached = true;
   }

   function iOS_video_touch_start() {
       var video = $('#vid')[0];
       video.play();
       attachStream(video);
   }
   var needtouch = false;
   $('#vid').on('play', function () {
     attachStream(this);
   }).on('loadedmetadata', function () {

     this.play();
     this.volume=1.0;
     if (this && this.paused) {
        if (needtouch == false) {
            needtouch = true;
            this.addEventListener("touchstart", iOS_video_touch_start, true);
        }
     }
   });
   window.panToRight = function(){
            selectedChannel = 1;
     };
   window.panToLeft = function(){
            selectedChannel = 0;
     };

});

You can also check it on CP: http://codepen.io/anon/pen/pgeJQG

With the buttons you are able to toggle between the left and the right channels. On desktop browsers (Chrome, Firefox, Safari tested) it works fine.

I have also tried the older createJavaScriptNode() instead of createScriptProcessor(). I have also tried it with an alternative effect chain, which was looking like this:

var audioContext = new (window.AudioContext||window.webkitAudioContext)();
audioContext.createGain = audioContext.createGain||audioContext.createGainNode;
var gainL = audioContext.createGain();
var gainR = audioContext.createGain();
gainL.gain.value = 1;
gainR.gain.value = 1;
var merger = audioContext.createChannelMerger(2);
var splitter = audioContext.createChannelSplitter(2);

//Connect to source
source = audioContext.createMediaElementSource(video);
//Connect the source to the splitter
source.connect(splitter, 0, 0);
//Connect splitter' outputs to each Gain Nodes
splitter.connect(gainL, 0);
splitter.connect(gainR, 1);

//Connect Left and Right Nodes to the Merger Node inputs
//Assuming stereo as initial status
gainL.connect(merger, 0, 0);
gainL.connect(merger, 0, 1);

//Connect Merger output to context destination
merger.connect(audioContext.destination, 0, 0);

As you probably noticed this code was using the built in nodes only. But no luck.

So my questions are: Is this even possible on mobile? If it is, than what am I missing? If it is not, than any possible workaround? Thanks

With Chrome on Android, MediaElementSource is not currently routed to WebAudio. This is a known issue and is planned to be fixed eventually.

The technical post webpages of this site follow the CC BY-SA 4.0 protocol. If you need to reprint, please indicate the site URL or the original address.Any question please contact:yoyou2525@163.com.

 
粤ICP备18138465号  © 2020-2024 STACKOOM.COM