I'm trying to build a real-time voice call application. My goal is to use native JS microphone api and send data via websocket to the other clients. I figured out the following code:
<script>
// Globals
var aCtx;
var analyser;
var microphone;
navigator.getUserMedia_ = ( navigator.getUserMedia
|| navigator.webkitGetUserMedia
|| navigator.mozGetUserMedia
|| navigator.msGetUserMedia);
if (navigator.getUserMedia_) {
navigator.getUserMedia_({audio: true}, function(stream) {
aCtx = new webkitAudioContext();
analyser = aCtx.createAnalyser();
microphone = aCtx.createMediaStreamSource(stream);
microphone.connect(analyser);
process();
});
};
function process(){
console.log(analyser);
setInterval(function(){
FFTData = new Float32Array(analyser.frequencyBinCount);
analyser.getFloatFrequencyData(FFTData);
console.log(FFTData); // display
},10);
}
</script>
so every 10ms I'm gonna get the buffer and send it via node. The problem is that I couldn't figure out how to play the buffer and I'm not even sure if I'm getting the buffer in right way. I've tried:
var source = audioContext.createBufferSource();
var buffer; // the result printed in the code below
var audioBuffer = audioContext.createBuffer(1, buffer.length, 44100);
audioBuffer.getChannelData(0).set(buffer);
source.buffer = audioBuffer;
source.connect(audioContext.destination);
Am I getting the buffer right? How I can play it?
I'm trying to build a real-time voice call application. My goal is to use native JS microphone api and send data via websocket to the other clients. I figured out the following code:
<script>
// Globals
var aCtx;
var analyser;
var microphone;
navigator.getUserMedia_ = ( navigator.getUserMedia
|| navigator.webkitGetUserMedia
|| navigator.mozGetUserMedia
|| navigator.msGetUserMedia);
if (navigator.getUserMedia_) {
navigator.getUserMedia_({audio: true}, function(stream) {
aCtx = new webkitAudioContext();
analyser = aCtx.createAnalyser();
microphone = aCtx.createMediaStreamSource(stream);
microphone.connect(analyser);
process();
});
};
function process(){
console.log(analyser);
setInterval(function(){
FFTData = new Float32Array(analyser.frequencyBinCount);
analyser.getFloatFrequencyData(FFTData);
console.log(FFTData); // display
},10);
}
</script>
so every 10ms I'm gonna get the buffer and send it via node. The problem is that I couldn't figure out how to play the buffer and I'm not even sure if I'm getting the buffer in right way. I've tried:
var source = audioContext.createBufferSource();
var buffer; // the result printed in the code below
var audioBuffer = audioContext.createBuffer(1, buffer.length, 44100);
audioBuffer.getChannelData(0).set(buffer);
source.buffer = audioBuffer;
source.connect(audioContext.destination);
Am I getting the buffer right? How I can play it?
Share Improve this question asked Nov 10, 2013 at 19:44 DeepsyDeepsy 3,8108 gold badges43 silver badges72 bronze badges1 Answer
Reset to default 6 +50To issue the playback of the buffer, you have to call the start
method on your AudioBufferSourceNode
instance. The problem here is: you want to playback an audio stream, and an AudioBuffer
isn't designed for this. If you keep creating AudioBuffer
objects, filling them with the data and providing them to your AudioBufferSourceNode
instance, there will surely be noticeable pauses in the sound.
You should instead keep a cache buffer, fill it as soon as data arrives and empty it at normal speed (not immediately, you have to wait to have enough milliseconds of audio in it).
The best way to do this is using the properly APIs provided: give a look at http://www.w3/TR/webaudio/#MediaStreamAudioDestinationNode-section and http://www.w3/TR/webaudio/#MediaStreamAudioSourceNode-section.