MediaStream Integration

The following examples illustrate WebRTC integration with the Web Audio API. They are borrowed and modified from Robert O'Callahan's MediaStream Processing API proposal.

Please note the addition of two new AudioContext methods: createMediaStreamSource() and createMediaStreamDestination(). They need to be documented more fully, but provide a simple and straightforward integration with the Web Audio API.

Examples

  1. Play video with processing effect applied to the audio track
    ECMAScript
     
    <video src="foo.webm" id="v" controls></video>
    <script>
        var audioSource = context.createMediaElementSource(document.getElementById("v"));
        var filter = context.createBiquadFilter();
        audioSource.connect(filter);
        filter.connect(context.destination);
    </script>
    
  2. Play video with processing effects mixing in out-of-band audio tracks (in sync)
    ECMAScript
     
    <video src="foo.webm" id="v"></video>
    <audio src="back.webm" id="back"></audio>
    <script>    
        var videoSource = context.createMediaElementSource(document.getElementById("v"));
        var audioSource = context.createMediaElementSource(document.getElementById("back"));
        var effect = context.createJavaScriptNode();
        effect.onaudioprocess = customAudioProcessor;
        videoSource.connect(effect);
        effect.connect(context.destination);
        audioSource.connect(context.destination);
        
        function startPlaying() {
            document.getElementById("v").play();
            document.getElementById("back").play();
        }
    </script>
    
  3. Capture microphone input and stream it out to a peer with a processing effect applied to the audio
    ECMAScript
     
    <script>
        navigator.getUserMedia('audio', gotAudio);
        function gotAudio(stream) {
            var microphone = context.createMediaStreamSource(stream);
            var filter = context.createBiquadFilter();
            var peer = context.createMediaStreamDestination();
            microphone.connect(filter);
            filter.connect(peer);
            peerConnection.addStream(peer.stream);
        }
    </script>
    
  4. Capture microphone input and visualize it as it is being streamed out to a peer and recorded
    ECMAScript
     
    <canvas id="c"></canvas>
    <script>
        navigator.getUserMedia('audio', gotAudio);
        var streamRecorder;
        function gotAudio(stream) {
            var microphone = context.createMediaStreamSource(stream);
            var analyser = context.createAnalyser();
            microphone.connect(analyser);
            analyser.connect(context.destination);
            requestAnimationFrame(drawAnimation);
            
            streamRecorder = stream.record();
            peerConnection.addStream(stream);
        }
    </script>
    
  5. Capture microphone input, visualize it, mix in another audio track and stream the result to a peer and record
    ECMAScript
     
    <canvas id="c"></canvas>
    <audio src="back.webm" id="back"></audio>
    <script>
        navigator.getUserMedia('audio', gotAudio);
        var streamRecorder;
        function gotAudio(stream) {
            var microphone = context.createMediaStreamSource(stream);
            var backgroundMusic = context.createMediaElementSource(document.getElementById("back"));
            var analyser = context.createAnalyser();
            var mixedOutput = context.createMediaStreamDestination();
            microphone.connect(analyser);
            analyser.connect(mixedOutput);
            backgroundMusic.connect(mixedOutput);
            requestAnimationFrame(drawAnimation);
        
            streamRecorder = mixedOutput.stream.record();
            peerConnection.addStream(mixedOutput.stream);
        }
    </script>
    
  6. Receive audio streams from peers, mix them with spatialization effects, and play
    ECMAScript
     
    <audio id="out" autoplay></audio>
    <script>
        peerConnection.onaddstream = function(event) {
            var peerInput = context.createMediaStreamSource(event.stream);
            var panner = context.createPanner();
            panner.setPosition(x, y, z);
            peerInput.connect(panner);
            panner.connect(context.destination);
        };
    </script>
    
  7. Seamlessly chain from the end of one input stream to another
    ECMAScript
     
    <audio src="in1.webm" id="in1" preload></audio>
    <audio src="in2.webm" id="in2"></audio>
    <script>
        var in1 = document.getElementById("in1");
        var in2 = document.getElementById("in2");
        in1.onloadeddata = function() {
          in1.onended = function() { in2.play(); };
          in1.play();
        }
    </script>
    
  8. Seamlessly switch from one input stream to another, e.g. to implement adaptive streaming
    ECMAScript
     
    <audio src="in1.webm" id="in1" preload></audio>
    <audio src="in2.webm" id="in2"></audio>
    <script>
    
    var in1 = document.getElementById("in1");
    var in2 = document.getElementById("in2");
    var source1 = context.createMediaElementSource(in1);
    var source2 = context.createMediaElementSource(in2);
    source1.connect(context.destination);
    source2.connect(context.destination);
    
    source1.play();
    
    function switchStreams() {
      in2.currentTime = in1.currentTime + 10; // arbitrary, but we should be able to complete the seek within this time
      var switchTime = context.currentTime + 10;
      in1.pause(switchTime);
      in2.play(switchTime);
    }
    </script>
    
  9. Synthesize samples from JS data
    ECMAScript
     
    <script>
        var processor = context.createJavaScriptNode(2048, 2, 2);
        processor.onaudioprocess = customAudioProcess;
        processor.connect(context.destination);
    </script>
    
  10. Trigger a sound sample to be played through the effects graph ASAP but without causing any blocking
    ECMAScript
     
    <script>
        var source = context.createBufferSource();
        source.buffer = kickDrum909;
        source.connect(context.destination);
        source.noteOn(0);
    </script>
    
  11. Trigger a sound sample to be played through the effects graph in five seconds
    ECMAScript
     
    <script>
        var source = context.createBufferSource();
        source.buffer = kickDrum909;
        var effect = context.createConvolver();
        effect.buffer = immenseCathedral;
        source.connect(effect);
        effect.connect(context.destination);
        source.noteOn(context.currentTime + 5);
    </script>
    




    Please note that examples 12, 13, and 14 involve no audio processing and thus should work with the WebRTC API as it is currently designed.

  12. Capture video from a camera and analyze it (e.g. face recognition)
    ECMAScript
     
    <script>
      navigator.getUserMedia('video', gotVideo);
      function gotVideo(stream) {
        stream.createWorkerProcessor(new Worker("face-recognizer.js"));
      }
    </script>
    
  13. Capture video, record it to a file and upload the file (e.g. Youtube)
    ECMAScript
     
    <script>
      navigator.getUserMedia('video', gotVideo);
      var streamRecorder;
      function gotVideo(stream) {
        streamRecorder = stream.record();
      }
      function stopRecording() {
        streamRecorder.getRecordedData(gotData);
      }
      function gotData(blob) {
        var x = new XMLHttpRequest();
        x.open('POST', 'uploadMessage');
        x.send(blob);
      }
    </script>
    
  14. Capture video from a canvas, record it to a file then upload
    ECMAScript
     
    <canvas width="640" height="480" id="c"></canvas>
    <script>
      var canvas = document.getElementById("c");  
      var streamRecorder = canvas.stream.record();
      function stopRecording() {
        streamRecorder.getRecordedData(gotData);
      }
      function gotData(blob) {
        var x = new XMLHttpRequest();
        x.open('POST', 'uploadMessage');
        x.send(blob);
      }
      var frame = 0;
      function updateCanvas() {
        var ctx = canvas.getContext("2d");
        ctx.clearRect(0, 0, 640, 480);
        ctx.fillText("Frame " + frame, 0, 200);
        ++frame;
      }
      setInterval(updateCanvas, 30);
    </script>