Amazon S3 - expressjs read stream runs into timeout - javascript

I'm trying to stream videos from an Amazon S3 Bucket.
Streaming works fine if I call my REST endpoint only once. But if I want to stream the video from multiple browsers at the same time, I get the following error:
TimeoutError: Connection timed out after 120000ms
My code so far:
var express = require("express");
var fs = require("fs");
var app = express();
var path = require("path");
var AWS = require("aws-sdk");
app.get("/video", function(req, res, next) {
res.set({
"Accept-Ranges": "bytes",
"Content-Type": "video/mp4",
"Content-Length": 41811600,
"Cache-Control": "max-age=31536000"
});
var stream;
if (req.headers.range) {
const size = 41811600;
const parts = req.headers.range.replace(/bytes=/, "").split("-");
const start = parseInt(parts[0], 10);
const end = parts[1] ? parseInt(parts[1], 10) : size - 1;
const length = end - start + 1;
res.set({
"Content-Range": `bytes ${start}-${end}/${size}`,
"Content-Length": length
});
stream = getReadStream(start, end);
} else {
stream = getReadStream(undefined, undefined);
}
res.writeHead(res.getHeader("Content-Range") ? 206 : 200);
return stream.pipe(res);
});
function getReadStream(startByte, endByte) {
const params = {
Bucket: "s3-fancy-test-bucket",
Key: path.join(
"SOME_KEY"
),
Range: "bytes=" + (startByte || 0) + "-" + (endByte || "")
};
const stream = getS3Instance()
.getObject(params)
.createReadStream();
stream.on("error", err => {
console.error(`Unable to get File Stream for ${params.Key} - ${err}`);
});
return stream;
}
function getS3Instance() {
AWS.config.update({
accessKeyId: "ACCESS_KEY_ID",
secretAccessKey: "SECRET",
signatureVersion: "v4",
region: "eu-central-1"
});
return new AWS.S3({ apiVersion: "2006-03-01" });
}
app.listen(process.env.PORT || 3000);
I'm not quite sure whether this is a problem with aws-sdk-js or a general problem with streams...
What am I doing wrong?
Is anyone experiencing the same problems?

In the meantime I've found out, that the behavior described above heavily depends on how the video is created. I'm using ffmpeg to convert videos to mp4. Converting videos with the faststart flag makes a big difference as far as the amount of range requests is concerned. Once I had the faststart flag in place there were a lot less range requests from the browser in order to load the video.
As there are fewer requests now, express / aws-sdk seem to be able to deal with it. There are no timeout any longer.

We are trying exactly the same thing only the reason we are doing it is because we want to restrict access to the videos through our backend before streaming the video to the user. So serving videos directly from s3 is not an option. I realized that video is streamed just fine but getObject() times out 120000 ms(aws default timeout duration) even after the stream ends successfully. There seems to be an active issue regarding this on aws-sdk repository that is still not resolved.
https://github.com/aws/aws-sdk-js/issues/2087

Related

How to stream different local videos using NodeJS and Express

I would like to stream different user-selected videos to my front-end. For this I am using NodeJS and Express. The source of the -element in which the video should be displayed is 'http://localhost:4201/video'.
The code I am using to stream the video looks like this:
async function loadLocalVideo(_, filePath) {
if (!filePath) {
console.log('No file selected');
return;
} else {
fs.access(filePath, (err) => {
if (err) {
console.log(`File does not exist at path ${filePath}`);
return;
}
});
}
expressApp.get('/video', function (req, res) {
const path = filePath;
const stat = fs.statSync(path);
const fileSize = stat.size;
const range = req.headers.range;
if (range) {
const parts = range.replace(/bytes=/, '').split('-');
const start = parseInt(parts[0], 10);
const end = parts[1] ? parseInt(parts[1], 10) : fileSize - 1;
if (start >= fileSize) {
res.status(416).send(
'Requested range not satisfiable\n' + start + ' >= ' + fileSize
);
return;
}
const chunksize = end - start + 1;
const file = fs.createReadStream(path, { start, end });
const head = {
'Content-Range': `bytes ${start}-${end}/${fileSize}`,
'Accept-Ranges': 'bytes',
'Content-Length': chunksize,
'Content-Type': 'video/mp4',
};
res.writeHead(206, head);
file.pipe(res);
} else {
const head = {
'Content-Length': fileSize,
'Content-Type': 'video/mp4',
};
res.writeHead(200, head);
fs.createReadStream(path).pipe(res);
}
});
}
However, when I want to stream a different video and call the same function again but with a different filePath-param the same video keeps playing. How can I stream another video and display it in the -element?
I think you are saying that you are using the same file name and path but want the backend server to change the actual contents that are streamed to the client.
This approach may confuse the caching downstream of your server - i.e. the network or browser cache may not recognise that the file has changed and it may simply serve the cached versions.
Some CDN's will allow you add edge functionality to intercept a request and then decide which of a number of options you want to return, or you could disable caching but I suspect these approaches might be overly complex.
If all you want is to be able to display different videos in a video element on your web page, it may be easier to simply change the source attribute on the video on the page itself and then call load() on the video element on the page.

Get error ERR_STREAM_PREMATURE_CLOSE when attempting to stream video from Node.JS server with pipeline

I'm attempting to stream a video from a server to the browser using the pipeline function of the stream library. It's a fairly large video (88.7MB), which means I want to stream it to the browser in 5MB chunks. I created the server following some tutorials online, however I notice that there is some weird behaviour where streams are being prematurely closed (because the browser decides that it wants the very last part of the video before the first part has even been sent).
This is the output:
21:01:29.215 Server is running on http://192.168.1.180:80
21:01:33.562 --------------------------------- New Request ---------------------------------
21:01:33.563 No range specified.
21:01:33.624 --------------------------------- New Request ---------------------------------
21:01:33.625 ( 1 ) Starting Pipe | Browser requested: bytes=0- | Start: 0 | End: 5242880 | Content Length: 5242881 | Content Range: bytes 0-5242880/93008043 | Video Size: 93008043
21:01:33.625 ( 1 ) Pipeline created
21:01:33.710 --------------------------------- New Request ---------------------------------
21:01:33.710 ( 2 ) Starting Pipe | Browser requested: bytes=92798976- | Start: 92798976 | End: 93008042 | Content Length: 209067 | Content Range: bytes 92798976-93008042/93008043 | Video Size: 93008043
21:01:33.711 ( 2 ) Pipeline created
21:01:33.711 ( 1 ) Pipeline error:
21:01:33.711 Error [ERR_STREAM_PREMATURE_CLOSE]: Premature close
at new NodeError (node:internal/errors:371:5)
at ServerResponse.onclose (node:internal/streams/end-of-stream:139:30)
at ServerResponse.emit (node:events:532:35)
at emitCloseNT (node:_http_server:845:10)
at Socket.onServerResponseClose (node:_http_server:233:5)
at Socket.emit (node:events:532:35)
at TCP.<anonymous> (node:net:687:12)
at TCP.callbackTrampoline (node:internal/async_hooks:130:17)
21:01:33.715 ( 2 ) Pipeline success
As you can see form above, the browser initially requests for the first bytes of the video yet 60ms later (before the data has even been sent) it creates a second pipeline request for the very last parts of the video (?) which makes no sense. This causes the first pipeline to error, and I don't know how to fix this. (Sending smaller chunks is not an option).
Another thing I have noticed is that it doesn't send all of the data as quickly as possible, all it does is wait until browser says it wants more data (instead of sending right away)
I haven't noticed this error appearing when other devices try to watch the video at the same time (or watching from multiple tabs) so I am wondering if this is caused because it can't create two pipelines to the same tab/device/source.
Here is my server code:
const http = require("http");
const path = require("path");
const { statSync, createReadStream } = require("fs");
const { pipeline } = require("stream")
const print = require("./../../mods/print.js")
const host = "192.168.1.180";
const port = 80;
let pipe_number = 0;
let chunk_size = 1024*1024*5
let video_path = path.normalize(__dirname + "/video.mp4")
http.createServer((req, res) => {
print(`--------------------------------- New Request ---------------------------------`)
const { range } = req.headers
const { size } = statSync(video_path)
if (range) {
pipe_number++
let this_pipe = pipe_number
const parts = range.replace(/bytes=/, "").split("-")
const start = parseInt(parts[0], 10)
const end = parts[1] ? parseInt(parts[1], 10) : Math.min(start + chunk_size, size - 1)
const content_length = (end-start) + 1
const head = {
"Content-Range": `bytes ${start}-${end}/${size}`,
"Accept-Ranges": `bytes`,
"Content-Length": content_length,
"Content-Type": `video/mp4`
};
print(`( ${this_pipe} )`, `Starting Pipe`, "| Browser requested:", range, "| Start:", start, "| End:", end, "| Content Length:", content_length, "| Content Range:", `bytes ${start}-${end}/${size}`,"| Video Size:", size)
const file = createReadStream(video_path, {start, end})
res.writeHead(206, head);
print(`( ${this_pipe} )`, "Pipeline created")
pipeline(file, res, (err) => {
if (err) {
print(`( ${this_pipe} )`, "Pipeline error:", err)
} else {
print(`( ${this_pipe} )`, "Pipeline success")
}
})
} else {
print("No range specified.")
const head = {
'Content-Length': 0,
'Content-Type': 'video/mp4',
};
res.writeHead(200, head)
res.end()
}
}).listen(port, host, () => {print(`Server is running on http://${host}:${port}`)})
I got most of this from online tutorials because of course this is something new to me. I am not using express.js here either.
You might have found the issue by now but for future googlers, I was having this issue and it turned out incorrect content was written to the response (the start/end bytes weren't present so the whole file was being written).
Also https://github.com/vercel/serve-handler/blob/master/src/index.js has a good reference for how to return 206/416 responses correctly.

How do I implement cloudinary inside my project

In this code I can use local videos to stream but how do I use cloudinary to serve my videos. I have tried to replace the filepath to cloudinary video url but it didn't work. CAn anyone please help me out on this part?
const http=require('http');
const fs=require("fs");
const path=require("path");
const server=http.createServer((req, res)=>{
// return res.end(req.url+req.method);
if(req.method==='GET' && req.url==="/"){
/*we will send a index.html page when
user visits "/" endpoint*/
/*index.html will have video component
that displays the video*/
fs.createReadStream(path.resolve(
"index.html")).pipe(res);
return;
}
//if video content is requesting
if(req.method==='GET' && req.url==="/video"){
const filepath = path.resolve("video1.mp4");
const stat = fs.statSync(filepath)
const fileSize = stat.size
const range = req.headers.range
/*when we seek the video it will put
range header to the request*/
/*if range header exists send some
part of video*/
if (range) {
//range format is "bytes=start-end",
const parts =
range.replace(/bytes=/, "").split("-");
console.log(parts);
const start = parseInt(parts[0], 10)
/*in some cases end may not exists, if its
not exists make it end of file*/
console.log(start);
const end =
parts[1] ?parseInt(parts[1], 10) :fileSize - 1
console.log(end);
//chunk size is what the part of video we are sending.
const chunksize = (end - start) + 1
/*we can provide offset values as options to
the fs.createReadStream to read part of content*/
const file = fs.createReadStream(filepath, {start, end})
const head = {
'Content-Range': `bytes ${start}-${end}/${fileSize}`,
'Accept-Ranges': 'bytes',
'Content-Length': chunksize,
'Content-Type': 'video/mp4',
}
/*we should set status code as 206 which is
for partial content*/
// because video is continuosly fetched part by part
res.writeHead(206, head);
file.pipe(res);
}else{
//if not send the video from start.
/* anyway html5 video player play content
when sufficient frames available*/
// It doesn't wait for the entire video to load.
const head = {
'Content-Length': fileSize,
'Content-Type': 'video/mp4',
}
res.writeHead(200, head);
fs.createReadStream(path).pipe(res);
}
}
/*if anything other than handler routes then send
400 status code, is for bad request*/
else{
res.writeHead(400);
res.end("bad request");
}
})
/*check if system has environment variable
for the port, otherwise defaults to 3000*/
const PORT = process.env.PORT || 3000;
//start the server
server.listen(PORT, () => {
console.log(`server listening on port:${PORT}`);
})

Live audio using ffmpeg, javascript and nodejs

I am new to this thing. Please don't hang me for the poor grammar. I am trying to create a proof of concept application which I will later extend. It does the following: We have a html page which asks for permission to use the microphone. We capture the microphone input and send it via websocket to a node js app.
JS (Client):
var bufferSize = 4096;
var socket = new WebSocket(URL);
var myPCMProcessingNode = context.createScriptProcessor(bufferSize, 1, 1);
myPCMProcessingNode.onaudioprocess = function(e) {
var input = e.inputBuffer.getChannelData(0);
socket.send(convertFloat32ToInt16(input));
}
function convertFloat32ToInt16(buffer) {
l = buffer.length;
buf = new Int16Array(l);
while (l--) {
buf[l] = Math.min(1, buffer[l])*0x7FFF;
}
return buf.buffer;
}
navigator.mediaDevices.getUserMedia({audio:true, video:false})
.then(function(stream){
var microphone = context.createMediaStreamSource(stream);
microphone.connect(myPCMProcessingNode);
myPCMProcessingNode.connect(context.destination);
})
.catch(function(e){});
In the server we take each incoming buffer, run it through ffmpeg, and send what comes out of the std out to another device using the node js 'http' POST. The device has a speaker. We are basically trying to create a 1 way audio link from the browser to the device.
Node JS (Server):
var WebSocketServer = require('websocket').server;
var http = require('http');
var children = require('child_process');
wsServer.on('request', function(request) {
var connection = request.accept(null, request.origin);
connection.on('message', function(message) {
if (message.type === 'utf8') { /*NOP*/ }
else if (message.type === 'binary') {
ffm.stdin.write(message.binaryData);
}
});
connection.on('close', function(reasonCode, description) {});
connection.on('error', function(error) {});
});
var ffm = children.spawn(
'./ffmpeg.exe'
,'-stdin -f s16le -ar 48k -ac 2 -i pipe:0 -acodec pcm_u8 -ar 48000 -f aiff pipe:1'.split(' ')
);
ffm.on('exit',function(code,signal){});
ffm.stdout.on('data', (data) => {
req.write(data);
});
var options = {
host: 'xxx.xxx.xxx.xxx',
port: xxxx,
path: '/path/to/service/on/device',
method: 'POST',
headers: {
'Content-Type': 'application/octet-stream',
'Content-Length': 0,
'Authorization' : 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx',
'Transfer-Encoding' : 'chunked',
'Connection': 'keep-alive'
}
};
var req = http.request(options, function(res) {});
The device supports only continuous POST and only a couple of formats (ulaw, aiff, wav)
This solution doesn't seem to work. In the device speaker we only hear something like white noise.
Also, I think I may have a problem with the buffer I am sending to the ffmpeg std in -> Tried to dump whatever comes out of the websocket to a .wav file then play it with VLC -> it plays everything in the record very fast -> 10 seconds of recording played in about 1 second.
I am new to audio processing and have searched for about 3 days now for solutions on how to improve this and found nothing.
I would ask from the community for 2 things:
Is something wrong with my approach? What more can I do to make this work? I will post more details if required.
If what I am doing is reinventing the wheel then I would like to know what other software / 3rd party service (like amazon or whatever) can accomplish the same thing.
Thank you.

Streaming a video file to an html5 video player with Node.js so that the video controls continue to work?

Tl;Dr - The Question:
What is the right way to handle streaming a video file to an html5 video player with Node.js so that the video controls continue to work?
I think it has to do with the way that the headers are handled. Anyway, here's the background information. The code is a little lengthy, however, it's pretty straightforward.
Streaming small video files to HTML5 video with Node is easy
I learned how to stream small video files to an HTML5 video player very easily. With this setup, the controls work without any work on my part, and the video streams flawlessly. A working copy of the fully working code with sample video is here, for download on Google Docs.
Client:
<html>
<title>Welcome</title>
<body>
<video controls>
<source src="movie.mp4" type="video/mp4"/>
<source src="movie.webm" type="video/webm"/>
<source src="movie.ogg" type="video/ogg"/>
<!-- fallback -->
Your browser does not support the <code>video</code> element.
</video>
</body>
</html>
Server:
// Declare Vars & Read Files
var fs = require('fs'),
http = require('http'),
url = require('url'),
path = require('path');
var movie_webm, movie_mp4, movie_ogg;
// ... [snip] ... (Read index page)
fs.readFile(path.resolve(__dirname,"movie.mp4"), function (err, data) {
if (err) {
throw err;
}
movie_mp4 = data;
});
// ... [snip] ... (Read two other formats for the video)
// Serve & Stream Video
http.createServer(function (req, res) {
// ... [snip] ... (Serve client files)
var total;
if (reqResource == "/movie.mp4") {
total = movie_mp4.length;
}
// ... [snip] ... handle two other formats for the video
var range = req.headers.range;
var positions = range.replace(/bytes=/, "").split("-");
var start = parseInt(positions[0], 10);
var end = positions[1] ? parseInt(positions[1], 10) : total - 1;
var chunksize = (end - start) + 1;
if (reqResource == "/movie.mp4") {
res.writeHead(206, {
"Content-Range": "bytes " + start + "-" + end + "/" + total,
"Accept-Ranges": "bytes",
"Content-Length": chunksize,
"Content-Type": "video/mp4"
});
res.end(movie_mp4.slice(start, end + 1), "binary");
}
// ... [snip] ... handle two other formats for the video
}).listen(8888);
But this method is limited to files < 1GB in size.
Streaming (any size) video files with fs.createReadStream
By utilizing fs.createReadStream(), the server can read the file in a stream rather than reading it all into memory at once. This sounds like the right way to do things, and the syntax is extremely simple:
Server Snippet:
movieStream = fs.createReadStream(pathToFile);
movieStream.on('open', function () {
res.writeHead(206, {
"Content-Range": "bytes " + start + "-" + end + "/" + total,
"Accept-Ranges": "bytes",
"Content-Length": chunksize,
"Content-Type": "video/mp4"
});
// This just pipes the read stream to the response object (which goes
//to the client)
movieStream.pipe(res);
});
movieStream.on('error', function (err) {
res.end(err);
});
This streams the video just fine! But the video controls no longer work.
The Accept Ranges header (the bit in writeHead()) is required for the HTML5 video controls to work.
I think instead of just blindly send the full file, you should first check the Accept Ranges header in the REQUEST, then read in and send just that bit. fs.createReadStream support start, and end option for that.
So I tried an example and it works. The code is not pretty but it is easy to understand. First we process the range header to get the start/end position. Then we use fs.stat to get the size of the file without reading the whole file into memory. Finally, use fs.createReadStream to send the requested part to the client.
var fs = require("fs"),
http = require("http"),
url = require("url"),
path = require("path");
http.createServer(function (req, res) {
if (req.url != "/movie.mp4") {
res.writeHead(200, { "Content-Type": "text/html" });
res.end('<video src="http://localhost:8888/movie.mp4" controls></video>');
} else {
var file = path.resolve(__dirname,"movie.mp4");
fs.stat(file, function(err, stats) {
if (err) {
if (err.code === 'ENOENT') {
// 404 Error if file not found
return res.sendStatus(404);
}
res.end(err);
}
var range = req.headers.range;
if (!range) {
// 416 Wrong range
return res.sendStatus(416);
}
var positions = range.replace(/bytes=/, "").split("-");
var start = parseInt(positions[0], 10);
var total = stats.size;
var end = positions[1] ? parseInt(positions[1], 10) : total - 1;
var chunksize = (end - start) + 1;
res.writeHead(206, {
"Content-Range": "bytes " + start + "-" + end + "/" + total,
"Accept-Ranges": "bytes",
"Content-Length": chunksize,
"Content-Type": "video/mp4"
});
var stream = fs.createReadStream(file, { start: start, end: end })
.on("open", function() {
stream.pipe(res);
}).on("error", function(err) {
res.end(err);
});
});
}
}).listen(8888);
The accepted answer to this question is awesome and should remain the accepted answer. However I ran into an issue with the code where the read stream was not always being ended/closed. Part of the solution was to send autoClose: true along with start:start, end:end in the second createReadStream arg.
The other part of the solution was to limit the max chunksize being sent in the response. The other answer set end like so:
var end = positions[1] ? parseInt(positions[1], 10) : total - 1;
...which has the effect of sending the rest of the file from the requested start position through its last byte, no matter how many bytes that may be. However the client browser has the option to only read a portion of that stream, and will, if it doesn't need all of the bytes yet. This will cause the stream read to get blocked until the browser decides it's time to get more data (for example a user action like seek/scrub, or just by playing the stream).
I needed this stream to be closed because I was displaying the <video> element on a page that allowed the user to delete the video file. However the file was not being removed from the filesystem until the client (or server) closed the connection, because that is the only way the stream was getting ended/closed.
My solution was just to set a maxChunk configuration variable, set it to 1MB, and never pipe a read a stream of more than 1MB at a time to the response.
// same code as accepted answer
var end = positions[1] ? parseInt(positions[1], 10) : total - 1;
var chunksize = (end - start) + 1;
// poor hack to send smaller chunks to the browser
var maxChunk = 1024 * 1024; // 1MB at a time
if (chunksize > maxChunk) {
end = start + maxChunk - 1;
chunksize = (end - start) + 1;
}
This has the effect of making sure that the read stream is ended/closed after each request, and not kept alive by the browser.
I also wrote a separate StackOverflow question and answer covering this issue.
Firstly create app.js file in the directory you want to publish.
var http = require('http');
var fs = require('fs');
var mime = require('mime');
http.createServer(function(req,res){
if (req.url != '/app.js') {
var url = __dirname + req.url;
fs.stat(url,function(err,stat){
if (err) {
res.writeHead(404,{'Content-Type':'text/html'});
res.end('Your requested URI('+req.url+') wasn\'t found on our server');
} else {
var type = mime.getType(url);
var fileSize = stat.size;
var range = req.headers.range;
if (range) {
var parts = range.replace(/bytes=/, "").split("-");
var start = parseInt(parts[0], 10);
var end = parts[1] ? parseInt(parts[1], 10) : fileSize-1;
var chunksize = (end-start)+1;
var file = fs.createReadStream(url, {start, end});
var head = {
'Content-Range': `bytes ${start}-${end}/${fileSize}`,
'Accept-Ranges': 'bytes',
'Content-Length': chunksize,
'Content-Type': type
}
res.writeHead(206, head);
file.pipe(res);
} else {
var head = {
'Content-Length': fileSize,
'Content-Type': type
}
res.writeHead(200, head);
fs.createReadStream(url).pipe(res);
}
}
});
} else {
res.writeHead(403,{'Content-Type':'text/html'});
res.end('Sorry, access to that file is Forbidden');
}
}).listen(8080);
Simply run node app.js and your server shall be running on port 8080. Besides video it can stream all kinds of files.

Categories