i am creating a streaming module that will stream any file on my server to the user
for this i have the following node code:
.get(function (req, res) {
if (req.query.resourceToken) {
var decoded = jwt.decode(req.query.resourceToken, require('../secret')());
var mediaObject = decoded.mediaObject;
var file = path.resolve(mediaObject.targetDir, mediaObject.file.originalname);
fs.stat(file, function (err, stats) {
if (err) {
if (err.code === 'ENOENT') {
// 404 Error if file not found
return res.sendStatus(404);
}
res.end(err);
}
var range = req.headers.range;
if (!range) {
// 416 Wrong range
return res.sendStatus(416);
}
var positions = range.replace(/bytes=/, "").split("-");
var start = parseInt(positions[0], 10);
var total = stats.size;
var end = positions[1] ? parseInt(positions[1], 10) : total - 1;
var chunksize = (end - start) + 1;
res.writeHead(206, {
"Content-Range": "bytes " + start + "-" + end + "/" + total,
"Accept-Ranges": "bytes",
"Content-Length": chunksize,
"Content-Type": mediaObject.file.mimetype
});
var stream = fs.createReadStream(file, {start: start, end: end})
.on("open", function () {
stream.pipe(res);
}).on("error", function (err) {
res.end(err);
});
});
}
else {
res.status(500).send('Missing resource token!');
}
});
Now to download a simple document i have the follow html tag:
<a target="_self" href="http://localhost:433/resource?resourceToken=token" class="btn btn-default"> <i class="fa fa-download"> </i></a>
Now when i press the button it correctly calls the method however the header range is undefined.
which means it sends the error code 416 (as shown in the above code).
Can anyone tell me why this is happening and what im missing?
Range requests serve a rather specific purpose (like enabling partial downloads during audio/video scrubbing).
Your HTML suggests that you just want to present a download link for a particular resource, and the browser won't issue a range request for that (or it might, but only after the initial request).
So your code should allow for the Range header to not exist.
Related
I would like to stream different user-selected videos to my front-end. For this I am using NodeJS and Express. The source of the -element in which the video should be displayed is 'http://localhost:4201/video'.
The code I am using to stream the video looks like this:
async function loadLocalVideo(_, filePath) {
if (!filePath) {
console.log('No file selected');
return;
} else {
fs.access(filePath, (err) => {
if (err) {
console.log(`File does not exist at path ${filePath}`);
return;
}
});
}
expressApp.get('/video', function (req, res) {
const path = filePath;
const stat = fs.statSync(path);
const fileSize = stat.size;
const range = req.headers.range;
if (range) {
const parts = range.replace(/bytes=/, '').split('-');
const start = parseInt(parts[0], 10);
const end = parts[1] ? parseInt(parts[1], 10) : fileSize - 1;
if (start >= fileSize) {
res.status(416).send(
'Requested range not satisfiable\n' + start + ' >= ' + fileSize
);
return;
}
const chunksize = end - start + 1;
const file = fs.createReadStream(path, { start, end });
const head = {
'Content-Range': `bytes ${start}-${end}/${fileSize}`,
'Accept-Ranges': 'bytes',
'Content-Length': chunksize,
'Content-Type': 'video/mp4',
};
res.writeHead(206, head);
file.pipe(res);
} else {
const head = {
'Content-Length': fileSize,
'Content-Type': 'video/mp4',
};
res.writeHead(200, head);
fs.createReadStream(path).pipe(res);
}
});
}
However, when I want to stream a different video and call the same function again but with a different filePath-param the same video keeps playing. How can I stream another video and display it in the -element?
I think you are saying that you are using the same file name and path but want the backend server to change the actual contents that are streamed to the client.
This approach may confuse the caching downstream of your server - i.e. the network or browser cache may not recognise that the file has changed and it may simply serve the cached versions.
Some CDN's will allow you add edge functionality to intercept a request and then decide which of a number of options you want to return, or you could disable caching but I suspect these approaches might be overly complex.
If all you want is to be able to display different videos in a video element on your web page, it may be easier to simply change the source attribute on the video on the page itself and then call load() on the video element on the page.
whenever readDirectory invoked i am checking if any file created Date is past 30 days i want to remove that file from direcotry. This is working as expected and removing the files but i dont think its efficient. So I want to move comapreDates method to separate file and call directory logs/St every night and check if any file expired passed 30 days period remove it. is it doable ?
service.js
function readDirectory(callback) {
var dirPath = './logs/St';
var files = fs.readdirSync(dirPath);
async.eachSeries(files, function(file, callback) {
var filePath = path.join(dirPath, file);
var fileInfo = {};
fs.stat(filePath, function(err, stats) {
if (err) {
console.info("File doesn't");
} else {
fileInfo.fileDate = stats.birthtime;
fileInfo.filename = file;
compareDates(fileInfo, filePath);
objToReturn.push(fileInfo);
callback();
}
});
}, function(err) {
//final callback when all files completed here send objToReturn to client
callback(objToReturn);
});
}
}
function compareDates(file, path) {
var timeDiff = Math.abs(currentDate.getTime() - file.fileDate.getTime());
var dayDifference = Math.ceil(timeDiff / (1000 * 3600 * 24));
console.log('Days', dayDifference);
console.log('FileName', file.filename);
if (dayDifference >= 30) {
fs.unlink(path, function(err) {
if (err) {
// file doens't exist
console.info("File doesn't exist, won't remove it.");
} else {
console.log('removed file', file);
}
});
}
}
See one of those modules:
https://www.npmjs.com/package/node-schedule
https://www.npmjs.com/package/node-cron
https://www.npmjs.com/package/node-cron-job
and more modules here:
https://www.npmjs.com/browse/keyword/schedule
https://www.npmjs.com/browse/keyword/cron
https://www.npmjs.com/browse/keyword/job
I'm using the async module on my Node server in order to loop through objects in my mongodb database, fire off requests to the Instagram API based on data within each object, and increment minTimestamp on each iteration until endTimestamp is reached.
The code below works great except for one big issue. If I increment minTimestamp by a hard-coded value (minTimestamp += 1000) everything runs great. However, when I change that one line of code to grab the latest created_time from the most recent response (minTimestamp = images[0].created_time) my loop runs once for each 'event' then stops. I get the correct incremented minTimestamp logged to my console, but the value never seems to make it to the next loop.
// modules =================================================
var express = require('express.io');
var app = express();
var port = process.env.PORT || 6060;
var io = require('socket.io').listen(app.listen(port));
var request = require('request');
var Instagram = require('instagram-node-lib');
var mongoose = require('mongoose');
var async = require('async');
var bodyParser = require('body-parser');
var methodOverride = require('method-override');
var db = require('./config/db');
var Event = require('./app/models/event');
// configuration ===========================================
mongoose.connect(db.url); // connect to our mongoDB database
// get all data/stuff of the body (POST) parameters
app.use(bodyParser.json()); // parse application/json
app.use(bodyParser.json({ type: 'application/vnd.api+json' })); // parse application/vnd.api+json as json
app.use(bodyParser.urlencoded({ extended: true })); // parse application/x-www-form- urlencoded
app.use(methodOverride('X-HTTP-Method-Override')); // override with the X-HTTP-Method- Override header in the request. simulate DELETE/PUT
app.use(express.static(__dirname + '/public')); // set the static files location /public/img will be /img for users
var baseUrl = 'https://api.instagram.com/v1/media/search?lat=';
var clientId = CLIENT-ID;
Event.find({}, function(err, events) {
async.eachSeries(events, function(event, seriesCallback) {
var name = event.event;
var latitude = event.latitude;
var longitude = event.longitude;
var distance = event.radius;
var minTimestamp = Math.floor(new Date(event.start).getTime()/1000);
var endTimestamp = Math.floor(new Date(event.end).getTime()/1000);
async.whilst(
function () { return minTimestamp < Math.floor(Date.now() / 1000) && minTimestamp < endTimestamp; },
function(requestFinishedCallback) {
console.log('sending request to Instagram for ' + name + ' with min_timestamp: ' + minTimestamp);
request(baseUrl + latitude + '&lng=' + longitude + '&distance=' + distance + '&min_timestamp=' + minTimestamp + '&client_id=' + clientId,
function (error, response, body) {
if (error) {
console.log('error');
return;
}
//JSON object with all the info about the image
var imageJson = JSON.parse(body);
var images = imageJson.data;
var numImages = images.length;
if (numImages > 0) {
console.log(numImages + ' images returned with starting time ' + images[(numImages - 1)].created_time + ' and ending time ' + images[0].created_time);
}
async.eachSeries(images, function(image, imageFinishedCallback) {
//Save the new object to DB
Event.findOneAndUpdate( { $and: [{latitude: latitude}, {radius: distance}] }, { $push: {'photos':
{ img: image.images.standard_resolution.url,
link: image.link,
username: image.user.username,
profile: image.user.profile_picture,
text: image.caption ? image.caption.text : '',
longitude: image.location.longitude,
latitude: image.location.latitude
}}},
{ safe: true, upsert: false },
function(err, model) {
console.log(err);
}
);
imageFinishedCallback();
}, function(err){
// if any of the image processing produced an error, err would equal that error
if( err ) {
// One of the iterations produced an error.
// All processing will now stop.
console.log('Images failed to process');
} else {
console.log('Images processed');
}
});
// this works
minTimestamp += 1000;
// this does not
// minTimestamp = images[0].created_time;
if (numImages > 0) {
console.log(numImages + 'images have been processed successfully and min_timestamp has been incremented to: ' + minTimestamp);
}
requestFinishedCallback();
}
);
}, function(err){
// if any of the image processing produced an error, err would equal that error
if( err ) {
// One of the iterations produced an error.
// All processing will now stop.
console.log('Event failed to process');
} else {
console.log(name + ' has been fully processed successfully with final min_timestamp of: ' + minTimestamp);
}
}
);
seriesCallback();
}, function(err){
// if any of the image processing produced an error, err would equal that error
if( err ) {
// One of the iterations produced an error.
// All processing will now stop.
console.log('Something failed to process');
} else {
console.log('All events have been processed successfully');
}
}
);
});
// routes ==================================================
require('./app/routes')(app); // configure our routes
// start app ===============================================
console.log('Magic happens on port ' + port); // shoutout to the user
exports = module.exports = app;
If you have a git repo I can look at, I can debug this much better, but... that said, I see two glaring issues:
Although you are doing a async.eachSeries, you aren't waiting to finish the findOneAndUpdate call.
Your example:
Event.findOneAndUpdate( {}, {},
function(err, model) {
console.log(err);
}
);
imageFinishedCallback();
Should be turned into this:
Event.findOneAndUpdate( {}, {},
function(err, model) {
console.log(err);
imageFinishedCallback();
}
);
Similar to the first problem, but with the async.whilst callback. You are calling async.eachSeries, but are then immediately moving to the next loop.
Your code:
function (error, response, body) {
// ...
async.eachSeries(images, function(image, imageFinishedCallback) {/* ... */},
function(err){
// ...
});
// this works
minTimestamp += 1000;
// this does not
// minTimestamp = images[0].created_time;
if (numImages > 0) {
console.log(numImages + 'images have been processed successfully and min_timestamp has been incremented to: ' + minTimestamp);
}
requestFinishedCallback();
}
Should be changed to:
function (error, response, body) {
// ...
async.eachSeries(images, function(image, imageFinishedCallback) {/* ... */},
function(err){
// ...
console.log(numImages + 'images have been processed successfully and min_timestamp has been incremented to: ' + minTimestamp);
minTimestamp = images[0].created_time;
requestFinishedCallback();
});
}
If you have a github repo to post up I can debug further, but... it looks like the issue is coming from not waiting for the async functions to finish.
Tl;Dr - The Question:
What is the right way to handle streaming a video file to an html5 video player with Node.js so that the video controls continue to work?
I think it has to do with the way that the headers are handled. Anyway, here's the background information. The code is a little lengthy, however, it's pretty straightforward.
Streaming small video files to HTML5 video with Node is easy
I learned how to stream small video files to an HTML5 video player very easily. With this setup, the controls work without any work on my part, and the video streams flawlessly. A working copy of the fully working code with sample video is here, for download on Google Docs.
Client:
<html>
<title>Welcome</title>
<body>
<video controls>
<source src="movie.mp4" type="video/mp4"/>
<source src="movie.webm" type="video/webm"/>
<source src="movie.ogg" type="video/ogg"/>
<!-- fallback -->
Your browser does not support the <code>video</code> element.
</video>
</body>
</html>
Server:
// Declare Vars & Read Files
var fs = require('fs'),
http = require('http'),
url = require('url'),
path = require('path');
var movie_webm, movie_mp4, movie_ogg;
// ... [snip] ... (Read index page)
fs.readFile(path.resolve(__dirname,"movie.mp4"), function (err, data) {
if (err) {
throw err;
}
movie_mp4 = data;
});
// ... [snip] ... (Read two other formats for the video)
// Serve & Stream Video
http.createServer(function (req, res) {
// ... [snip] ... (Serve client files)
var total;
if (reqResource == "/movie.mp4") {
total = movie_mp4.length;
}
// ... [snip] ... handle two other formats for the video
var range = req.headers.range;
var positions = range.replace(/bytes=/, "").split("-");
var start = parseInt(positions[0], 10);
var end = positions[1] ? parseInt(positions[1], 10) : total - 1;
var chunksize = (end - start) + 1;
if (reqResource == "/movie.mp4") {
res.writeHead(206, {
"Content-Range": "bytes " + start + "-" + end + "/" + total,
"Accept-Ranges": "bytes",
"Content-Length": chunksize,
"Content-Type": "video/mp4"
});
res.end(movie_mp4.slice(start, end + 1), "binary");
}
// ... [snip] ... handle two other formats for the video
}).listen(8888);
But this method is limited to files < 1GB in size.
Streaming (any size) video files with fs.createReadStream
By utilizing fs.createReadStream(), the server can read the file in a stream rather than reading it all into memory at once. This sounds like the right way to do things, and the syntax is extremely simple:
Server Snippet:
movieStream = fs.createReadStream(pathToFile);
movieStream.on('open', function () {
res.writeHead(206, {
"Content-Range": "bytes " + start + "-" + end + "/" + total,
"Accept-Ranges": "bytes",
"Content-Length": chunksize,
"Content-Type": "video/mp4"
});
// This just pipes the read stream to the response object (which goes
//to the client)
movieStream.pipe(res);
});
movieStream.on('error', function (err) {
res.end(err);
});
This streams the video just fine! But the video controls no longer work.
The Accept Ranges header (the bit in writeHead()) is required for the HTML5 video controls to work.
I think instead of just blindly send the full file, you should first check the Accept Ranges header in the REQUEST, then read in and send just that bit. fs.createReadStream support start, and end option for that.
So I tried an example and it works. The code is not pretty but it is easy to understand. First we process the range header to get the start/end position. Then we use fs.stat to get the size of the file without reading the whole file into memory. Finally, use fs.createReadStream to send the requested part to the client.
var fs = require("fs"),
http = require("http"),
url = require("url"),
path = require("path");
http.createServer(function (req, res) {
if (req.url != "/movie.mp4") {
res.writeHead(200, { "Content-Type": "text/html" });
res.end('<video src="http://localhost:8888/movie.mp4" controls></video>');
} else {
var file = path.resolve(__dirname,"movie.mp4");
fs.stat(file, function(err, stats) {
if (err) {
if (err.code === 'ENOENT') {
// 404 Error if file not found
return res.sendStatus(404);
}
res.end(err);
}
var range = req.headers.range;
if (!range) {
// 416 Wrong range
return res.sendStatus(416);
}
var positions = range.replace(/bytes=/, "").split("-");
var start = parseInt(positions[0], 10);
var total = stats.size;
var end = positions[1] ? parseInt(positions[1], 10) : total - 1;
var chunksize = (end - start) + 1;
res.writeHead(206, {
"Content-Range": "bytes " + start + "-" + end + "/" + total,
"Accept-Ranges": "bytes",
"Content-Length": chunksize,
"Content-Type": "video/mp4"
});
var stream = fs.createReadStream(file, { start: start, end: end })
.on("open", function() {
stream.pipe(res);
}).on("error", function(err) {
res.end(err);
});
});
}
}).listen(8888);
The accepted answer to this question is awesome and should remain the accepted answer. However I ran into an issue with the code where the read stream was not always being ended/closed. Part of the solution was to send autoClose: true along with start:start, end:end in the second createReadStream arg.
The other part of the solution was to limit the max chunksize being sent in the response. The other answer set end like so:
var end = positions[1] ? parseInt(positions[1], 10) : total - 1;
...which has the effect of sending the rest of the file from the requested start position through its last byte, no matter how many bytes that may be. However the client browser has the option to only read a portion of that stream, and will, if it doesn't need all of the bytes yet. This will cause the stream read to get blocked until the browser decides it's time to get more data (for example a user action like seek/scrub, or just by playing the stream).
I needed this stream to be closed because I was displaying the <video> element on a page that allowed the user to delete the video file. However the file was not being removed from the filesystem until the client (or server) closed the connection, because that is the only way the stream was getting ended/closed.
My solution was just to set a maxChunk configuration variable, set it to 1MB, and never pipe a read a stream of more than 1MB at a time to the response.
// same code as accepted answer
var end = positions[1] ? parseInt(positions[1], 10) : total - 1;
var chunksize = (end - start) + 1;
// poor hack to send smaller chunks to the browser
var maxChunk = 1024 * 1024; // 1MB at a time
if (chunksize > maxChunk) {
end = start + maxChunk - 1;
chunksize = (end - start) + 1;
}
This has the effect of making sure that the read stream is ended/closed after each request, and not kept alive by the browser.
I also wrote a separate StackOverflow question and answer covering this issue.
Firstly create app.js file in the directory you want to publish.
var http = require('http');
var fs = require('fs');
var mime = require('mime');
http.createServer(function(req,res){
if (req.url != '/app.js') {
var url = __dirname + req.url;
fs.stat(url,function(err,stat){
if (err) {
res.writeHead(404,{'Content-Type':'text/html'});
res.end('Your requested URI('+req.url+') wasn\'t found on our server');
} else {
var type = mime.getType(url);
var fileSize = stat.size;
var range = req.headers.range;
if (range) {
var parts = range.replace(/bytes=/, "").split("-");
var start = parseInt(parts[0], 10);
var end = parts[1] ? parseInt(parts[1], 10) : fileSize-1;
var chunksize = (end-start)+1;
var file = fs.createReadStream(url, {start, end});
var head = {
'Content-Range': `bytes ${start}-${end}/${fileSize}`,
'Accept-Ranges': 'bytes',
'Content-Length': chunksize,
'Content-Type': type
}
res.writeHead(206, head);
file.pipe(res);
} else {
var head = {
'Content-Length': fileSize,
'Content-Type': type
}
res.writeHead(200, head);
fs.createReadStream(url).pipe(res);
}
}
});
} else {
res.writeHead(403,{'Content-Type':'text/html'});
res.end('Sorry, access to that file is Forbidden');
}
}).listen(8080);
Simply run node app.js and your server shall be running on port 8080. Besides video it can stream all kinds of files.
I'm trying to perform a UDP scrape from public UDP trackers such as tracker.publicbt.com or tracker.openbittorrent.com using the BitTorrent UDP Tracker Protocol. My app sends a request to the tracker for a connection_id and uses that id to perform a scrape. The scrape response is returned from the tracker, with no errors to indicate a badly formed packet, but no matter what info_hash I use, I get "0" returned for the numbers of seeders, leechers and completed.
I've thoroughly checked that the packet is the right size, that the info_hash starts at the correct offset, and that the data are all correct. As far as I can see, there are no problems creating and sending the packet. This question's been open and unanswered for a few days, so I've updated and edited the code example below in the hope someone can help.
I've hardcoded an info_hash into the following example. When run on the command line this code should connect to the tracker, get a connection_id and then perform a scrape on an Ubuntu torrent info_hash, outputting various bits of info to the console.
The connection_id is split into 2 parts because it is a 64 bit integer.
var dgram = require('dgram'),
server = dgram.createSocket("udp4"),
connectionIdHigh = 0x417,
connectionIdLow = 0x27101980,
transactionId,
action,
trackerHost = "tracker.publicbt.com",
trackerPort = 80,
infoHash = "",
ACTION_CONNECT = 0,
ACTION_ANNOUNCE = 1,
ACTION_SCRAPE = 2,
ACTION_ERROR = 3,
sendPacket = function (buf, host, port) {
"use strict";
server.send(buf, 0, buf.length, port, host, function(err, bytes) {
if (err) {
console.log(err.message);
}
});
},
startConnection = function (host, port) {
"use strict";
var buf = new Buffer(16);
transactionId = Math.floor((Math.random()*100000)+1);
buf.fill(0);
buf.writeUInt32BE(connectionIdHigh, 0);
buf.writeUInt32BE(connectionIdLow, 4);
buf.writeUInt32BE(ACTION_CONNECT, 8);
buf.writeUInt32BE(transactionId, 12);
sendPacket(buf, host, port);
},
scrapeTorrent = function (host, port, hash) {
"use strict";
var buf = new Buffer(56),
tmp = '';
infoHash = hash;
if (!transactionId) {
startConnection(host, port);
} else {
buf.fill(0);
buf.writeUInt32BE(connectionIdHigh, 0);
buf.writeUInt32BE(connectionIdLow, 4);
buf.writeUInt32BE(ACTION_SCRAPE, 8);
buf.writeUInt32BE(transactionId, 12);
buf.write(infoHash, 16, buf.length);
console.log(infoHash);
console.log(buf.toString('utf8', 16, buf.length));
// do scrape
sendPacket(buf, host, port);
transactionId = null;
infoHash = null;
}
};
server.on("message", function (msg, rinfo) {
"use strict";
var buf = new Buffer(msg),
seeders,
completed,
leechers;
console.log(rinfo);
action = buf.readUInt32BE(0, 4);
transactionId = buf.readUInt32BE(4, 4);
console.log("returned action: " + action);
console.log("returned transactionId: " + transactionId);
if (action === ACTION_CONNECT) {
console.log("connect response");
connectionIdHigh = buf.readUInt32BE(8, 4);
connectionIdLow = buf.readUInt32BE(12, 4);
scrapeTorrent(trackerHost, trackerPort, infoHash);
} else if (action === ACTION_SCRAPE) {
console.log("scrape response");
seeders = buf.readUInt32BE(8, 4);
completed = buf.readUInt32BE(12, 4);
leechers = buf.readUInt32BE(16, 4);
console.log(seeders);
console.log(completed);
console.log(leechers);
} else if (action === ACTION_ERROR) {
console.log("error response");
}
});
server.on("listening", function () {
"use strict";
var address = server.address();
console.log("server listening " + address.address + ":" + address.port);
});
server.bind();
scrapeTorrent(trackerHost, trackerPort, "335990D615594B9BE409CCFEB95864E24EC702C7");
I finally worked this out, and kicked myself for not realising sooner.
An info_hash is a hex encoded string, so when it's written to the buffer needs to have it's encoding set. For example:
buf.write(infoHash, 16, buf.length, 'hex');
The UDP tracker protocol doesn't mention the encoding required, it just describes it as a 20 byte string. Hopefully this Q&A might help someone else who encounters the same problem.