Live audio using ffmpeg, javascript and nodejs - javascript

I am new to this thing. Please don't hang me for the poor grammar. I am trying to create a proof of concept application which I will later extend. It does the following: We have a html page which asks for permission to use the microphone. We capture the microphone input and send it via websocket to a node js app.
JS (Client):
var bufferSize = 4096;
var socket = new WebSocket(URL);
var myPCMProcessingNode = context.createScriptProcessor(bufferSize, 1, 1);
myPCMProcessingNode.onaudioprocess = function(e) {
var input = e.inputBuffer.getChannelData(0);
socket.send(convertFloat32ToInt16(input));
}
function convertFloat32ToInt16(buffer) {
l = buffer.length;
buf = new Int16Array(l);
while (l--) {
buf[l] = Math.min(1, buffer[l])*0x7FFF;
}
return buf.buffer;
}
navigator.mediaDevices.getUserMedia({audio:true, video:false})
.then(function(stream){
var microphone = context.createMediaStreamSource(stream);
microphone.connect(myPCMProcessingNode);
myPCMProcessingNode.connect(context.destination);
})
.catch(function(e){});
In the server we take each incoming buffer, run it through ffmpeg, and send what comes out of the std out to another device using the node js 'http' POST. The device has a speaker. We are basically trying to create a 1 way audio link from the browser to the device.
Node JS (Server):
var WebSocketServer = require('websocket').server;
var http = require('http');
var children = require('child_process');
wsServer.on('request', function(request) {
var connection = request.accept(null, request.origin);
connection.on('message', function(message) {
if (message.type === 'utf8') { /*NOP*/ }
else if (message.type === 'binary') {
ffm.stdin.write(message.binaryData);
}
});
connection.on('close', function(reasonCode, description) {});
connection.on('error', function(error) {});
});
var ffm = children.spawn(
'./ffmpeg.exe'
,'-stdin -f s16le -ar 48k -ac 2 -i pipe:0 -acodec pcm_u8 -ar 48000 -f aiff pipe:1'.split(' ')
);
ffm.on('exit',function(code,signal){});
ffm.stdout.on('data', (data) => {
req.write(data);
});
var options = {
host: 'xxx.xxx.xxx.xxx',
port: xxxx,
path: '/path/to/service/on/device',
method: 'POST',
headers: {
'Content-Type': 'application/octet-stream',
'Content-Length': 0,
'Authorization' : 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx',
'Transfer-Encoding' : 'chunked',
'Connection': 'keep-alive'
}
};
var req = http.request(options, function(res) {});
The device supports only continuous POST and only a couple of formats (ulaw, aiff, wav)
This solution doesn't seem to work. In the device speaker we only hear something like white noise.
Also, I think I may have a problem with the buffer I am sending to the ffmpeg std in -> Tried to dump whatever comes out of the websocket to a .wav file then play it with VLC -> it plays everything in the record very fast -> 10 seconds of recording played in about 1 second.
I am new to audio processing and have searched for about 3 days now for solutions on how to improve this and found nothing.
I would ask from the community for 2 things:
Is something wrong with my approach? What more can I do to make this work? I will post more details if required.
If what I am doing is reinventing the wheel then I would like to know what other software / 3rd party service (like amazon or whatever) can accomplish the same thing.
Thank you.

Related

Safari iOS does not play video in offline mode

I am trying to build a service worker that retrieves a video from cache if available and fetches from online if it is not available. Here is my code for that:
self.addEventListener("fetch", function (event) {
if (event.request.headers.get("range")) {
caches.match(event.request.url).then(function (res) {
if (!res) {
log.debug(
`Range request NOT found in cache for ${event.request.url}, activating fetch...`
);
return fetch(event.request);
}
returnRangeRequest(event);
});
} else {
event.respondWith(
caches.match(event.request).then((response) => {
return response || fetch(event.request);
})
);
}
});
function returnRangeRequest(event) {
var rangeHeader = event.request.headers.get("range");
var rangeMatch = rangeHeader.match(/^bytes\=(\d+)\-(\d+)?/);
var pos = Number(rangeMatch[1]);
var pos2 = rangeMatch[2];
if (pos2) {
pos2 = Number(pos2);
}
event.respondWith(
caches
.match(event.request.url)
.then(function (res) {
return res.arrayBuffer();
})
.then(function (ab) {
let responseHeaders = {
status: 206,
statusText: "Partial Content",
headers: [
["Content-Type", "video/mp4"],
[
"Content-Range",
"bytes " +
pos +
"-" +
(pos2 || ab.byteLength - 1) +
"/" +
ab.byteLength,
],
],
};
var abSliced = {};
if (pos2 > 0) {
abSliced = ab.slice(pos, pos2 + 1);
} else {
abSliced = ab.slice(pos);
}
log.debug(
`Returning range request response`
);
return new Response(abSliced, responseHeaders);
})
.catch(function (err) {
log.error(err);
})
);
}
When I am online and I try to play the video, it works fine and it prints the debug line Range request NOT found in cache for https://example.com/vid.mp4, activating fetch...
When I have cached the video url using cache.add("https://example.com/vid.mp4");, and I try to play it, the video plays fine.
The problem arises when I turn off the Wifi on the iPad. When I try to play the video after turning of wifi, the video stays at 0:00 with a total length of 0:00.
Some of my findings:
When I have wifi on and I have the video cached, there are two requests made with bytes bytes=0-1 and then bytes=0-4444000.
When I have wifi off, the request for bytes=0-1 is made, but it stops with that.
Where am I going wrong?
Safari appears to have a very strict approach to range requests and similar issues to your problem are sometimes seen with regular online web servers.
In particular, Safari expects to see a '206' response when it sends a request with a byte range. If the server responds with a '200' request it appears Safari cannot handle this. Some other browsers seem to be ok with this - for example Chrome.
Apple provide some info on hoe to check for this:
If you are not sure whether your media server supports byte-range requests, you can open the Terminal application in OS X and use the curl command-line tool to download a short segment from a file on the server:
curl --range 0-99 http://example.com/test.mov -o /dev/null
If the tool reports that it downloaded 100 bytes, the media server correctly handled the byte-range request. If it downloads the entire file, you may need to update the media server. For more information on curl, see OS X Man Pages.
See this answer for more detail and background: https://stackoverflow.com/a/32998689/334402

File uploads through socket.io (JavaScript & FileReader)

I am creating a chat app (in React Native), but for now, I have made some tests in vanilla JavaScript. The server is a NodeJS-server.
It works with sending text messages, but now I have some questions about sending photos/videos/audio files. I'm doing a lot of research online on what's the best method to do this.
I came up with the idea to use the FileReader API and split up the file into chunks, and sending chunk by chunk via the socket.emit()-function.
This is my code so far (simplified):
Please note that I will create a React Native app, but for now (for testing), I've just created a HTML-file with an upload form.
// index.html
// the page where my upload form is
var reader = {};
var file = {};
var sliceSize = 1000 * 1024;
var socket = io('http://localhost:8080');
const startUpload = e => {
e.preventDefault();
reader = new FileReader();
file = $('#file)[0].files[0]
uploadFile(0)
}
$('#start-upload').on('click', startUpload)
const uploadFile = start => {
var slice = start + sliceSize + 1;
var blob = file.slice(start, slice)
reader.on('loadend', e => {
if (slice < file.size) {
socket.emit('message', JSON.stringify({
fileName: file.name,
fileType: file.type,
fileChunk: e.target.result
})
} else {
console.log('Upload completed!')
}
})
reader.readAsDataURl(blob)
}
// app.js
// my NodeJS server-file
var file;
var files = {};
io.on('connection', socket => {
console.log('User connected!');
// when a message is received
socket.on('message', data => {
file = JSON.parse(data)
if (!files[file.fileName]) {
// this is the first chunk received
// create a new string
files[file.fileName] = '';
}
// append the binary data
files[file.fileName] = files[file.fileName] + file.fileChunk;
})
// on disconnect
socket.on('disconnect', () => {
console.log('User disconnected!');
})
})
I did not include any checks for file type (I'm not at that point yet), I first want to make sure that this is the right thing to do.
Stuff I need to do:
Send a message (like socket.emit('uploaddone', ...)) from the client to the server to notify the server that the upload is done (and the server can emit the complete file to another user).
My questions are:
Is it okay to send chunks of binary data (base64) over a socket, or would it take up to much bandwidth?
Will I lose some quality (photos/videos/audio files) when splitting them up into chunks?
If there is a better way to do this, please let me know. I'm not asking for working code examples, just some guidance in the good direction.
You can send raw bytes over WebSocket, base64 has 33% size overhead.
Also you won't have to JSON.stringify all (and maybe large) body and parse it on client-side.
Will I lose some quality
No, underlying protocol (TCP) delivers data in-order and without corruption.
I realize this answer is a couple of months late, but just for future reference you should look into using the acknowledgment option with socket.io here
// with acknowledgement
let message = JSON.stringify({
fileName: file.name,
fileType: file.type,
fileChunk: e.target.result
})
socket.emit("message", message, (ack) => {
// send next chunk...
});

Amazon S3 - expressjs read stream runs into timeout

I'm trying to stream videos from an Amazon S3 Bucket.
Streaming works fine if I call my REST endpoint only once. But if I want to stream the video from multiple browsers at the same time, I get the following error:
TimeoutError: Connection timed out after 120000ms
My code so far:
var express = require("express");
var fs = require("fs");
var app = express();
var path = require("path");
var AWS = require("aws-sdk");
app.get("/video", function(req, res, next) {
res.set({
"Accept-Ranges": "bytes",
"Content-Type": "video/mp4",
"Content-Length": 41811600,
"Cache-Control": "max-age=31536000"
});
var stream;
if (req.headers.range) {
const size = 41811600;
const parts = req.headers.range.replace(/bytes=/, "").split("-");
const start = parseInt(parts[0], 10);
const end = parts[1] ? parseInt(parts[1], 10) : size - 1;
const length = end - start + 1;
res.set({
"Content-Range": `bytes ${start}-${end}/${size}`,
"Content-Length": length
});
stream = getReadStream(start, end);
} else {
stream = getReadStream(undefined, undefined);
}
res.writeHead(res.getHeader("Content-Range") ? 206 : 200);
return stream.pipe(res);
});
function getReadStream(startByte, endByte) {
const params = {
Bucket: "s3-fancy-test-bucket",
Key: path.join(
"SOME_KEY"
),
Range: "bytes=" + (startByte || 0) + "-" + (endByte || "")
};
const stream = getS3Instance()
.getObject(params)
.createReadStream();
stream.on("error", err => {
console.error(`Unable to get File Stream for ${params.Key} - ${err}`);
});
return stream;
}
function getS3Instance() {
AWS.config.update({
accessKeyId: "ACCESS_KEY_ID",
secretAccessKey: "SECRET",
signatureVersion: "v4",
region: "eu-central-1"
});
return new AWS.S3({ apiVersion: "2006-03-01" });
}
app.listen(process.env.PORT || 3000);
I'm not quite sure whether this is a problem with aws-sdk-js or a general problem with streams...
What am I doing wrong?
Is anyone experiencing the same problems?
In the meantime I've found out, that the behavior described above heavily depends on how the video is created. I'm using ffmpeg to convert videos to mp4. Converting videos with the faststart flag makes a big difference as far as the amount of range requests is concerned. Once I had the faststart flag in place there were a lot less range requests from the browser in order to load the video.
As there are fewer requests now, express / aws-sdk seem to be able to deal with it. There are no timeout any longer.
We are trying exactly the same thing only the reason we are doing it is because we want to restrict access to the videos through our backend before streaming the video to the user. So serving videos directly from s3 is not an option. I realized that video is streamed just fine but getObject() times out 120000 ms(aws default timeout duration) even after the stream ends successfully. There seems to be an active issue regarding this on aws-sdk repository that is still not resolved.
https://github.com/aws/aws-sdk-js/issues/2087

Send message between two independent running processes in Node.js

I've got an Adobe AIR Application on the local machine that communicates with an remote node.js server script (socket-script.js) via socket connection.
Furthermore i start a new node.js process through command line and send some additional arguments to a second server script (terminal-script.js).
Question: How can i send the arguments from the terminal-script.js to socket-script.js? Afterwards the socket-script.js should broadcast the
args to the AIR Application. Anyone an idea how to connect the two independent running processes in Node.js? Thanks.
Illustration link
Use the server to communicate between processes:
socket-script.js
var net = require('net');
var app = null;
var server = net.createServer(function(socket) {
socket.on('data', function(data){
if(data.indexOf('terminal:') >-1){
if(app){
app.write(data);
}
} else if(data.indexOf('app:') >-1){
app = socket;
}
});
});
terminal-script.js:
var net = require('net');
var client = net.connect({port: 9001}, function() {
client.write('terminal:' + process.argv[2]);
});
app:
var net = require('net');
var client = net.connect({port: 9001}, function() {
client.write('app:connect');
});
client.on('data', function(data){
if(data.indexOf('terminal:') >-1){
// got terminal data
}
});
The only way that I conceive of to make this work is something like this:
1) You'll need to have terminal-script.js be listening on a socket. Like so:
var arguments = process.args.splice(2);
var http = require('http');
http.createServer(function (req, res) {
res.writeHead(200, {'Content-Type': 'text/plain'});
res.end(arguments[0]);
}).listen(8000, '127.0.0.1');
2) Just make a request from socket-script to the terminal script:
//somewhere in socket-script use this to grab the value from the terminal script.
var http = require('http');
var options = {
host: 'terminal-script-host.com',
port: '8000',
path: '/'
};
var req = http.get(options, function(res) {
res.on('data', function (data) {
console.log('socket-script got the data from terminal-script: ' + data);
});
});
Not sure if this helps. But I can tell you that it would be nearly impossible to "inject" something into the socket-script from the terminal-script, not in a way that would work with the same request anyways.

How to create a streaming API with NodeJS

How would you go to create a streaming API with Node? just like the Twitter streaming API.
What I want to do ultimately is get the first update from the FriendFeed api, and stream when a new one becomes available (if the id is different), and later on expose it as a web service so I can use it with WebSockets on my website :).
So far I have this:
var sys = require('sys'),
http = require('http');
var ff = http.createClient(80, 'friendfeed-api.com');
var request = ff.request('GET', '/v2/feed/igorgue?num=1',
{'host': 'friendfeed-api.com'});
request.addListener('response', function (response) {
response.setEncoding('utf8'); // this is *very* important!
response.addListener('data', function (chunk) {
var data = JSON.parse(chunk);
sys.puts(data.entries[0].body);
});
});
request.end();
Which only gets the data from FriendFeed, creating the Http server with node is easy but it can't return a stream (or I haven't yet found out how).
You would want to set up a system that keeps track of incoming requests and stores their response objects. Then when it's time to stream a new event from FriendFeed, iterate through their response objects and responses[i].write('something') out to them.
Check out LearnBoost's Socket.IO-Node, you may even just be able to use that project as your framework and not have to code it yourself.
From the Socket.IO-Node example app (for chat):
io.listen(server, {
onClientConnect: function(client){
client.send(json({ buffer: buffer }));
client.broadcast(json({ announcement: client.sessionId + ' connected' }));
},
onClientDisconnect: function(client){
client.broadcast(json({ announcement: client.sessionId + ' disconnected' }));
},
onClientMessage: function(message, client){
var msg = { message: [client.sessionId, message] };
buffer.push(msg);
if (buffer.length > 15) buffer.shift();
client.broadcast(json(msg));
}
});

Categories