6
votes

I am trying to stream audio using node.js + ffmpeg to browsers connected in LAN only using web audio api.

Not using element because it's adding it's own buffer of 8 to 10 secs and I want to get maximum high latency possible (around 1 to 2 sec max).

Audio plays successfully but audio is very choppy and noisy.

Here is my node.js (server side) file:

var ws = require('websocket.io'), 
server = ws.listen(3000);
var child_process = require("child_process");
var i = 0;
server.on('connection', function (socket) 
{

console.log('New client connected');

var ffmpeg = child_process.spawn("ffmpeg",[
    "-re","-i",
    "A.mp3","-f",
    "f32le",
    "pipe:1"                     // Output to STDOUT
    ]);

 ffmpeg.stdout.on('data', function(data)
 {
    var buff = new Buffer(data);
    socket.send(buff.toString('base64'));
 });
});

And here is my HTML:

var audioBuffer = null;
var context = null;
window.addEventListener('load', init, false);
function init() {
    try {
        context = new webkitAudioContext();
    } catch(e) {
        alert('Web Audio API is not supported in this browser');
    }
}

var ws = new WebSocket("ws://localhost:3000/");

ws.onmessage = function(message)
{
    var d1 = base64DecToArr(message.data).buffer;
    var d2 = new DataView(d1);

    var data = new Float32Array(d2.byteLength / Float32Array.BYTES_PER_ELEMENT);
    for (var jj = 0; jj < data.length; ++jj)
    {
        data[jj] = d2.getFloat32(jj * Float32Array.BYTES_PER_ELEMENT, true);
    }

    var audioBuffer = context.createBuffer(2, data.length, 44100);
    audioBuffer.getChannelData(0).set(data);

    var source = context.createBufferSource(); // creates a sound source
    source.buffer = audioBuffer;
    source.connect(context.destination); // connect the source to the context's destination (the speakers)
    source.start(0);
};

Can any one advise what is wrong?

Regards, Nayan

2
hello nayan, i am using Web audio api , and want to record the sound played through thje web audio api,, my question stackoverflow.com/questions/21234902/… is here, can you help meJot Dhaliwal

2 Answers

5
votes

I got it working !!

All I had to do is adjust the number of channel.

I've set FFMPEG to output mono audio and it worked like a charm. Here is my new FFMOEG command:

var ffmpeg = child_process.spawn("ffmpeg",[
    "-re","-i",
    "A.mp3",
    "-ac","1","-f",
    "f32le",
    "pipe:1"                     // Output to STDOUT
    ]);
1
votes

You are taking chunks of data, creating separate nodes from them, and starting them based on network timing. For audio to sound correct, the playback of buffers must be without break, and sample-accurate timing. You need to fundamentally change your method.

The way I do this is by creating a ScriptProcessorNode which manages its own buffer of PCM samples. On process, it reads the samples into to the output buffer. This guarantees smooth playback of audio.