最新消息:雨落星辰是一个专注网站SEO优化、网站SEO诊断、搜索引擎研究、网络营销推广、网站策划运营及站长类的自媒体原创博客

javascript - Chrome won't play WebAudio getUserMedia via WebRTCPeer.js - Stack Overflow

programmeradmin6浏览0评论

I want to make a simple audio only stream over WebRTC, using Peer.js. I'm running the simple PeerServer locally.

The following works perfectly fine in Firefox 30, but I can't get it to work in Chrome 35. I would expect there was something wrong with the PeerJS setup, but Chrome -> Firefox works perfectly fine, while Chrome -> Chrome seems to send the stream, but won't play over speakers.

Setting up getUserMedia Note: uncommenting those lines below will let me hear the loopback in Chrome and Firefox.

navigator.getUserMedia = (navigator.getUserMedia || navigator.webkitGetUserMedia || navigator.mozGetUserMedia || navigator.msGetUserMedia);
window.AudioContext = window.AudioContext || window.webkitAudioContext;

if(navigator.getUserMedia) {
    navigator.getUserMedia({video: false, audio: true}, getMediaSuccess, getMediaError);
} else {
    alert('getUserMedia not supported.');
}

var localMediaStream;
//var audioContext = new AudioContext();

function getMediaSuccess(mediaStream) {
    //var microphone = audioContext.createMediaStreamSource(mediaStream);
    //microphone.connect(audioContext.destination);
    localMediaStream = mediaStream;
}

function getMediaError(err) {
    alert('getUserMedia error. See console.');
    console.error(err);
}

Making the connection

var peer = new Peer({host: '192.168.1.129', port: 9000});

peer.on('open', function(id) {
    console.log('My ID:', id);
});

peer.on('call', function(call) {
    console.log('answering call with', localMediaStream);
    call.answer(localMediaStream);
    //THIS WORKS IN CHROME, localMediaStream exists

    call.on('stream', function(stream) {
        console.log('streamRecieved', stream);
        //THIS WORKS IN CHROME, the stream has come through

        var audioContext = new AudioContext();
        var audioStream = audioContext.createMediaStreamSource(stream);
        audioStream.connect(audioContext.destination);
        //I HEAR AUDIO IN FIREFOX, BUT NOT CHROME

    });

    call.on('error', function(err) {
        console.log(err);
        //LOGS NO ERRORS
    });
});

function connect(id) {
    var voiceStream = peer.call(id, localMediaStream);
}

I want to make a simple audio only stream over WebRTC, using Peer.js. I'm running the simple PeerServer locally.

The following works perfectly fine in Firefox 30, but I can't get it to work in Chrome 35. I would expect there was something wrong with the PeerJS setup, but Chrome -> Firefox works perfectly fine, while Chrome -> Chrome seems to send the stream, but won't play over speakers.

Setting up getUserMedia Note: uncommenting those lines below will let me hear the loopback in Chrome and Firefox.

navigator.getUserMedia = (navigator.getUserMedia || navigator.webkitGetUserMedia || navigator.mozGetUserMedia || navigator.msGetUserMedia);
window.AudioContext = window.AudioContext || window.webkitAudioContext;

if(navigator.getUserMedia) {
    navigator.getUserMedia({video: false, audio: true}, getMediaSuccess, getMediaError);
} else {
    alert('getUserMedia not supported.');
}

var localMediaStream;
//var audioContext = new AudioContext();

function getMediaSuccess(mediaStream) {
    //var microphone = audioContext.createMediaStreamSource(mediaStream);
    //microphone.connect(audioContext.destination);
    localMediaStream = mediaStream;
}

function getMediaError(err) {
    alert('getUserMedia error. See console.');
    console.error(err);
}

Making the connection

var peer = new Peer({host: '192.168.1.129', port: 9000});

peer.on('open', function(id) {
    console.log('My ID:', id);
});

peer.on('call', function(call) {
    console.log('answering call with', localMediaStream);
    call.answer(localMediaStream);
    //THIS WORKS IN CHROME, localMediaStream exists

    call.on('stream', function(stream) {
        console.log('streamRecieved', stream);
        //THIS WORKS IN CHROME, the stream has come through

        var audioContext = new AudioContext();
        var audioStream = audioContext.createMediaStreamSource(stream);
        audioStream.connect(audioContext.destination);
        //I HEAR AUDIO IN FIREFOX, BUT NOT CHROME

    });

    call.on('error', function(err) {
        console.log(err);
        //LOGS NO ERRORS
    });
});

function connect(id) {
    var voiceStream = peer.call(id, localMediaStream);
}
Share Improve this question edited Jun 18, 2014 at 13:55 371273 asked Jun 18, 2014 at 13:37 371273371273 5,43611 gold badges50 silver badges68 bronze badges 3
  • what if you change AudioContext() to webkitAudioContext()? Just a shot in the dark as there should not really be a difference. – Benjamin Trent Commented Jun 18, 2014 at 13:46
  • tried it, no difference. I think line 2 in the first section would have taken care of that anyways. – 371273 Commented Jun 18, 2014 at 13:52
  • You're right...this is an interesting predicament. – Benjamin Trent Commented Jun 18, 2014 at 13:53
Add a comment  | 

3 Answers 3

Reset to default 11

This still appears to be an issue even in Chrome 73.

The solution that saved me for now is to also connect the media stream to a muted HTML audio element. This seems to make the stream work and audio starts flowing into the WebAudio nodes.

This would look something like:

let a = new Audio();
a.muted = true;
a.srcObject = stream;
a.addEventListener('canplaythrough', () => {
    a = null;
});

let audioStream = audioContext.createMediaStreamSource(stream);
audioStream.connect(audioContext.destination);

JSFiddle: https://jsfiddle.net/jmcker/4naq5ozc/


Original Chromium issue and workaround: https://bugs.chromium.org/p/chromium/issues/detail?id=121673#c121

New Chromium issue: https://bugs.chromium.org/p/chromium/issues/detail?id=687574 https://bugs.chromium.org/p/chromium/issues/detail?id=933677

In Chrome, it is a known bug currently where remote audio streams gathered from a peer connection are not accessible through the AudioAPI.

Latest comment on the bug:

We are working really hard towards the feature. The reason why this takes long time is that we need to move the APM to chrome first, implement a render mixer to get the unmixed data from WebRtc, then we can hook up the remote audio stream to webaudio.

It was recently patched in Firefox as I remember this being an issue on there as well in the past.

I was unable to play the stream using web audio but I did manage to play it uses a basic audio element:

 var audio = new Audio();                                                  
 audio.src = (URL || webkitURL || mozURL).createObjectURL(remoteStream);
 audio.play();
发布评论

评论列表(0)

  1. 暂无评论