Replacing fs.readFile with fs.createReadStream in Node.js - javascript

I have code for reading an image from directory and sending it to index.html.
I am trying to replace fs.readFile with fs.createReadStream but i have no idea how to implement this as i can not find a good example.
Here is what i got (index.js)
var app = require('express')();
var http = require('http').Server(app);
var io = require('socket.io')(http);
var fs = require('fs');
http.listen(3000, function () {
console.log('listening on *:3000');
});
app.get('/', function (req, res) {
res.sendFile(__dirname + '/public/views/index.html');
});
io.on('connection', function (socket) {
fs.readFile(__dirname + '/public/images/image.png', function (err, buf){
socket.emit('image', { image: true, buffer: buf.toString('base64') });
});
});
index.html
<!DOCTYPE html>
<html>
<body>
<canvas id="canvas" width="200" height="100">
Your browser does not support the HTML5 canvas tag.
</canvas>
<script src="https://cdn.socket.io/socket.io-1.2.0.js"></script>
<script>
var socket = io();
var ctx = document.getElementById('canvas').getContext('2d');
socket.on("image", function (info) {
if (info.image) {
var img = new Image();
img.src = 'data:image/jpeg;base64,' + info.buffer;
ctx.drawImage(img, 0, 0);
}
});
</script>
</body >
</html >

The below approach only uses core modules and reads the chunks from the stream.Readable instance returned from fs.createReadStream() and returns those chunks back as a Buffer. This isn't that great of an approach if you're not going to stream the chunks back. You're going to hold the file within a Buffer which resides in memory, so its only a good solution for reasonably sized files.
io.on('connection', function (socket) {
fileToBuffer(__dirname + '/public/images/image.png', (err, imageBuffer) => {
if (err) {
socket.emit('error', err)
} else {
socket.emit('image', { image: true, buffer: imageBuffer.toString('base64') });
}
});
});
const fileToBuffer = (filename, cb) => {
let readStream = fs.createReadStream(filename);
let chunks = [];
// Handle any errors while reading
readStream.on('error', err => {
// handle error
// File could not be read
return cb(err);
});
// Listen for data
readStream.on('data', chunk => {
chunks.push(chunk);
});
// File is done being read
readStream.on('close', () => {
// Create a buffer of the image from the stream
return cb(null, Buffer.concat(chunks));
});
}
HTTP Response Stream Example
Its almost always a better idea to use HTTP for streaming data since its built into the protocol and you'd never need to load the data into memory all at once since you can pipe() the file stream directly to the response.
This is a very basic example without the bells and whistles and just is to demonstrate how to pipe() a stream.Readable to a http.ServerResponse. the example uses Express but it works the exact same way using http or https from the Node.js Core API.
const express = require('express');
const fs = require('fs');
const server = express();
const port = process.env.PORT || 1337;
server.get ('/image', (req, res) => {
let readStream = fs.createReadStream(__dirname + '/public/images/image.png')
// When the stream is done being read, end the response
readStream.on('close', () => {
res.end()
})
// Stream chunks to response
readStream.pipe(res)
});
server.listen(port, () => {
console.log(`Listening on ${port}`);
});

Related

Merge Two codes

I have 2 files in Node js .I want to merge these 2, but I am facing problem..
This file calls function from python file
const app = express()
let runPy = new Promise(function(success, nosuccess) {
const { spawn } = require('child_process');
const pyprog = spawn('python', ['./ml.py']);
pyprog.stdout.on('data', function(data) {
success(data);
});
pyprog.stderr.on('data', (data) => {
nosuccess(data);
});
});
app.get('/', (req, res) => {
res.write('welcome\n');
runPy.then(function(testMLFunction) {
console.log(testMLFunction.toString());
res.end(testMLFunction);
});
})
app.listen(4000, () => console.log('Application listening on port 4000!'))
python file ml.py
def testMLFunction():
return "hello from Python"
print(testMLFunction())
Below file works on button click with post method
var fs = require('fs');
var server = http.createServer(function (req, res) {
if (req.method === "GET") {
res.writeHead(200, { "Content-Type": "text/html" });
fs.createReadStream("./form.html", "UTF-8").pipe(res);
} else if (req.method === "POST") {
var result = "";
req.on("data", function (chunk) {
console.log(chunk.toString());
result = chunk;
//body=body.toUpperCase;
});
req.on("end", function(){
res.writeHead(200, { "Content-Type": "text/html" });
res.end(result);
});
}
}).listen(3000);
how can I do that..
There are several things wrong here. I will explain as plain as possible.
You forgot to add in your code var express = require('express')
The promise you made, runPy, must be wrapped in a function, whereas your approach will instantly start the promise upon loading the script itself.
You are resolving/rejecting on first incoming output, you shouldn't do that because you won't be able to know what really happened in the shell. You need to store those output lines, this is the only way of you knowing what the script tells you.
In runPy you must resolve/reject upon pyprogr close event.
You cannot access directly the method of another script, no matter what that kind of file that is a py, sh, bat, js. However, you can access internal functions of it by passing arguments to the shell, and of course, that script must have the logic required to deal with those arguments.
When using spawn/exec you must keep in mind that YOU ARE NOT the user executing the script, the node user is, so different outcomes may occur.
Most importantly, your targeted script must PRINT/ECHO to shell, no returns! The best approach would be to print some json string, and parse it in javascript after the shell is closed, so you can have access to an object instead of a string.
Below you will find a demo for your use case, i changed the python file so it can print something.
ml.py
print('I\'m the output from ml.py')
index.js
const express = require('express');
const app = express()
let runPy = function () { // the promise is now wrapped in a function so it won't trigger on script load
return new Promise(function (success, nosuccess) {
const {spawn} = require('child_process');
const pyprog = spawn('python', ['./ml.py'], {shell: true}); // add shell:true so node will spawn it with your system shell.
let storeLines = []; // store the printed rows from the script
let storeErrors = []; // store errors occurred
pyprog.stdout.on('data', function (data) {
storeLines.push(data);
});
pyprog.stderr.on('data', (data) => {
storeErrors.push(data);
});
pyprog.on('close', () => {
// if we have errors will reject the promise and we'll catch it later
if (storeErrors.length) {
nosuccess(new Error(Buffer.concat(storeErrors).toString()));
} else {
success(storeLines);
}
})
})
};
let path = require('path');
app.use(express.json());
app.use(express.urlencoded({ extended: true })); // you need to set this so you can catch POST requests
app.all('/', (req, res) => { // i've change this from .get to .all so you can catch both get and post requests here
console.log('post params', req.body);
if(req.body.hasOwnProperty('btn-send')){
runPy()
.then(function (pyOutputBuffer) {
let message = 'You sent this params:\n' +JSON.stringify(req.body, null,2) + '\n';
message += Buffer.concat(pyOutputBuffer).toString();
res.end(message);
})
.catch(console.log)
}else{
res.sendFile(path.join(__dirname,'form.html')); // you need an absolute path to 'file.html'
}
});
app.listen(4000, () => console.log('Application listening on port 4000!'));
form.html
<div>hello there</div>
<form action="/" method="post">
<input type="text" value="" name="some-text"/>
<button type="submit" value="1" name="btn-send" >Press me!</button>
</form>

Heavy load while transfering images via socket.io over websockets

I want to stream images from one client to another over a NodeJS-based backend. I send an image every 40ms to the backend which in return emits the image to another client. My problem is, that every client pair increases the load by 5% on the backend. Each image is about 20KB.
My frontend code looks like this:
const socket = require('socket.io-client')('http://localhost:3000');
const fs = require('fs');
socket.on('connect', function () {
console.log('connected');
socket.emit('room', '123456');
});
socket.on('joined', () => {
console.log('joined the room');
const file = fs.readFileSync('./1902865_597805016961665_1536612023_n.jpg');
setInterval(() => {
socket.emit('frame', file);
}, 40);
});
The backend looks like this:
const app = require('express')();
const http = require('http').Server(app);
const io = require('socket.io')(http);
http.listen(3000, function () {
console.log('listening on *:3000');
});
io.on('connection', socket => {
socket.on('room', function (room) {
console.log(`got join request for ${room}`);
socket.join(room);
socket.myroom = room;
socket.emit('joined');
});
socket.on('disconnect', function () {
socket.to(socket.myroom).emit('disconnected');
socket.disconnect();
});
socket.frameCounter = 0;
socket.on('frame', frame => {
console.dir(`frameCounter ${socket.frameCounter}`);
socket.frameCounter += 1;
socket.to(socket.myroom).emit('frame', frame);
});
});
A working example can be found here: https://code.flyacts.com/tsc/socket-io-performance-problems/
Is this expected behavior? Or am I doing something wrong?

Testing a node.js server in the same process as the test

I am wondering if there is any disadvantage to starting a server in a process and then running tests against that server in the same process.
Obviously there are some performance concerns, but if we are testing accuracy instead of performance, are there any major concerns with code like the following?
var fs = require('fs');
var path = require('path');
var http = require('http');
var supertest = require('supertest');
var assert = require('assert');
describe('#Test - handleXml()*', function() {
var self = this;
var server;
var payload = ''; // stringified XML
var xmlPath = path.resolve('test', 'test_data', 'xml_payloads', 'IVR_OnDemandErrorCode.xml');
before(function(done) {
var config = self.config = require('univ-config')(module, this.test.parent.title, 'config/test-config');
server = createServer().on('listening', function() {
done(null);
});
});
beforeEach(function(done) {
fs.readFile(xmlPath, 'utf8', function(err, content) {
assert(err == null);
payload = content;
done();
});
});
it('should accept request Content-type "text/xml or application/xml"', function(done) {
supertest(server)
.post('/event')
.set('Content-Type', 'application/xml')
.send(payload)
.expect(200, done);
});
it('should transform XML payload into JSON object', function(done) {
supertest(server)
.post('/event')
.set('Content-type', 'application/xml')
.send(payload)
.expect(200)
.end(function(err, res) {
assert(err == null,'Error is not null');
var jsonifiedXml = JSON.parse(res.text);
assert(typeof jsonifiedXml === 'object','jsonifiedXml not an object');
done();
});
});
describe('JSONified XML', function() {
it('should have proper key casing', function(done) {
supertest(server)
.post('/event')
.set('Content-type', 'application/xml')
.send(payload)
.expect(200)
.end(function(err, res) {
assert(err == null);
var payload = JSON.parse(res.text);
payload = payload.events[0].data;
assert(payload.hasOwnProperty('ppv'),'Bad value for ppv');
assert(payload.hasOwnProperty('mac'),'Bad value for mac');
assert(payload.hasOwnProperty('appName'),'Bad value for appName');
assert(payload.hasOwnProperty('divisionId'),'Bad value for divisionId');
assert(payload.hasOwnProperty('callTime'),'Bad value for callTime');
assert(payload.hasOwnProperty('callDate'),'Bad value for callDate');
assert(payload.hasOwnProperty('ivrLOB'),'Bad value for ivrLOB');
done();
});
});
});
});
function createServer(opts) {
//Note: this is a good pattern, definitely
var handleXml = require(path.resolve('lib', 'handleXml'));
var server = http.createServer(function(req, res) {
handleXml(req, res, function(err) {
res.statusCode = err ? (err.status || 500) : 200;
res.end(err ? err.message : JSON.stringify(req.body));
});
});
server.listen(5999); //TODO: which port should this be listening on? a unused port, surely
return server;
}
That's the standard way of testing a the http endpoints in a node application. But you are not going to want to have a createServer() function in each test. You will have a common function that creates a server that you can use through out your application, including to start the production server.
You right in noticing the having the server listen to a port doesn't actually do anything for you.
For this reason, it's common to have what I call an application factory that starts everything about a server, but does not listen to a port. That way I can access the server from a test or a script. The production app gets booted from a minimal index file:
var createServer = require('./AppFactory');
var server = createServer();
server.listen(5999);

Simple Way to Implement Server Sent Events in Node.js?

I've looked around and it seems as if all the ways to implement SSEs in Node.js are through more complex code, but it seems like there should be an easier way to send and receive SSEs. Are there any APIs or modules that make this simpler?
Here is an express server that sends one Server-Sent Event (SSE) per second, counting down from 10 to 0:
const express = require('express')
const app = express()
app.use(express.static('public'))
app.get('/countdown', function(req, res) {
res.writeHead(200, {
'Content-Type': 'text/event-stream',
'Cache-Control': 'no-cache',
'Connection': 'keep-alive'
})
countdown(res, 10)
})
function countdown(res, count) {
res.write("data: " + count + "\n\n")
if (count)
setTimeout(() => countdown(res, count-1), 1000)
else
res.end()
}
app.listen(3000, () => console.log('SSE app listening on port 3000!'))
Put the above code into a file (index.js) and run it: node index
Next, put the following HTML into a file (public/index.html):
<html>
<head>
<script>
if (!!window.EventSource) {
var source = new EventSource('/countdown')
source.addEventListener('message', function(e) {
document.getElementById('data').innerHTML = e.data
}, false)
source.addEventListener('open', function(e) {
document.getElementById('state').innerHTML = "Connected"
}, false)
source.addEventListener('error', function(e) {
const id_state = document.getElementById('state')
if (e.eventPhase == EventSource.CLOSED)
source.close()
if (e.target.readyState == EventSource.CLOSED) {
id_state.innerHTML = "Disconnected"
}
else if (e.target.readyState == EventSource.CONNECTING) {
id_state.innerHTML = "Connecting..."
}
}, false)
} else {
console.log("Your browser doesn't support SSE")
}
</script>
</head>
<body>
<h1>SSE: <span id="state"></span></h1>
<h3>Data: <span id="data"></span></h3>
</body>
</html>
In your browser, open localhost:3000 and watch the SSE countdown.
I'm adding a simple implementation of SSE here. It's just one Node.js file.
You can have a look at the result here: https://glossy-ox.glitch.me/
const http = require('http');
const port = process.env.PORT || 3000;
const server = http.createServer((req, res) => {
// Server-sent events endpoint
if (req.url === '/events') {
res.writeHead(200, {
'Content-Type': 'text/event-stream',
'Cache-Control': 'no-cache',
...(req.httpVersionMajor === 1 && { 'Connection': 'keep-alive' })
});
const refreshRate = 1000; // in milliseconds
return setInterval(() => {
const id = Date.now();
const data = `Hello World ${id}`;
const message =
`retry: ${refreshRate}\nid:${id}\ndata: ${data}\n\n`;
res.write(message);
}, refreshRate);
}
// Client side
res.writeHead(200, {'Content-Type': 'text/html'});
res.end(`
<!DOCTYPE html>
<html lang="en" dir="ltr">
<head>
<meta charset="utf-8">
<title>SSE</title>
</head>
<body>
<pre id="log"></pre>
</body>
<script>
var eventSource = new EventSource('/events');
eventSource.onmessage = function(event) {
document.getElementById('log').innerHTML += event.data + '<br>';
};
</script>
</html>
`);
});
server.listen(port);
server.on('error', (err) => {
console.log(err);
process.exit(1);
});
server.on('listening', () => {
console.log(`Listening on port ${port}`);
});
If you're using express this is the easiest way https://www.npmjs.com/package/express-sse
on BE:
const SSE = require('express-sse');
const sse = new SSE();
...
app.get('/sse', sse.init);
...
sse.send('message', 'event-name');
on FE:
const EventSource = require('eventsource');
const es = new EventSource('http://localhost:3000/sse');
es.addEventListener('event-name', function (message) {
console.log('message:', message)
});
I found SSE implementation in node.js.
Github link: https://github.com/einaros/sse.js
NPM module:https://www.npmjs.com/package/sse
Will above link helps you ?
**client.js**
var eventSource = new EventSource("/route/events");
eventSource.addEventListner("ping", function(e){log(e.data)});
//if no events specified
eventSource.addEventListner("message", function(e){log(e.data)});
**server.js**
http.createServer((req, res)=>{
if(req.url.indexOf("/route/events")>=){
res.setHeader('Connection', 'keep-alive');
res.setHeader("Cache-Control", "no-cache");
res.setHeader("Content-Type", "text/event-stream");
let event = "event: ping";
let id = `id: ${Date.now()}`;
let data = {
message:`hello #${new Date().toString()}`
}
data = "data: "+JSON.stringify(data);
res.end(`${event}\n${id}\n${data}\n\n`);
}
}).listen(PORT)
After looking at the other answers I finally got this working, but what I ended up having to do was a little different.
[package.json] Use express-sse:
The exact version of express-sse is very important. The latest tries to use res.flush(), but fails and crashes the http server.
"express-sse": "0.5.1",
[Terminal] Install express-sse:
npm install
[app.js] Use the router:
app.use(app.baseUri, require('./lib/server-sent-events').router);
[server-sent-events.js] Create sse library:
The call to pause() is the equivalent of flush(), which was removed from express. It ensures you'll keep getting messages as they are sent.
var express = require('express');
const SSE = require('express-sse');
const sse = new SSE();
var router = express.Router();
router.get('/sse', sse.init)
module.exports = {
send,
router
};
async function send(message) {
sse.send(message.toProperCase(), 'message');
await pause();
}
function pause() {
return new Promise((resolve, reject) => {
setImmediate(resolve)
})
}
[your-router.js] Use the sse library and call send:
var express = require('express');
var serverSentEvents = require('../lib/server-sent-events');
var router = express.Router();
router.get('/somepath', yourhandler);
module.exports = router;
async function yourhandler (req, res, next) {
await serverSentEvents.send('hello sse!'); // <<<<<
}
[your-client-side.js] Receive the sse updates:
I recommend you keep the event.data.replace(/"/g,'') because express-sse tacks on enclosing quotes and we don't want those.
const eventSource = new EventSource('http://yourserver/sse');
eventSource.onmessage = function(event) {
document.getElementById("result").innerHTML = event.data.replace(/"/g,'') + '...';
};
You should be able to do such a thing using Socket.io. First, you will need to install it with npm install socket.io. From there, in your code you will want to have var io = require(socket.io);
You can see more in-depth examples given by Socket.IO
You could use something like this on the server:
var express = require('express');
var app = express();
var server = require('http').createServer(app);
var io = require('../..')(server);
var port = process.env.PORT || 3000;
server.listen(port, function () {
console.log('Server listening at port ' + port);
});
app.use(express.static(__dirname + '/public'));
io.on('connection', function (socket) {
socket.emit('EVENT_NAME', {data});
});
And something like this on the client:
<script src="socket_src_file_path_here"></script>
<script>
var socket = io('http://localhost');
socket.on('EVENT_NAME', function (data) {
console.log(data);
//Do whatever you want with the data on the client
});
</script>

Cannot save a remote image with node.js and request

I want to save an image with node.js and the request library. So far I have this simple code:
var request = require('request');
var fs = require('fs');
request('http://upload.wikimedia.org/wikipedia/commons/8/8c/JPEG_example_JPG_RIP_025.jpg', function(error, response, body)
{
// further logic that decides
// whether or not the image will be saved
fs.writeFile('downloaded.jpg', body, function(){});
});
But it doesn't work. The image always arrives corrupt. I assume it's an encoding error but I cannot figure out how to fix this.
var request = require('request'),
fs = require('fs'),
url = 'http://upload.wikimedia.org/wikipedia/commons/8/8c/JPEG_example_JPG_RIP_025.jpg';
request(url, {encoding: 'binary'}, function(error, response, body) {
fs.writeFile('downloaded.jpg', body, 'binary', function (err) {});
});
var fs = require('fs'),
request = require('request'),
url='http://upload.wikimedia.org/wikipedia/commons/8/8c/JPEG_example_JPG_RIP_025.jpg';
request(url).pipe(fs.createWriteStream('downloaded.jpg'));
Here's how I did it using stream and pipe, (I was using express but you may not need that)
var express = require('express');
var app = express();
var filesystem = require('fs');
var https = require('https');
var download = function(url, dest, cb) {
var file = filesystem.createWriteStream(dest);
var request = https.get(url, function(httpResponse) {
httpResponse.pipe(file);
file.on('finish', function() {
console.log("piping to file finished")
file.close(cb); // close() is async, call cb after close completes.
});
}).on('error', function(err) { // Handle errors
filesystem.unlink(dest); // Delete the file async. (But we don't check the result)
if (cb) cb(err.message);
});
};
app.get('/image', (req, res) => {
download('https://lastfm-img2.akamaized.net/i/u/64s/15cc734fb0e045e3baac02674d2092d6.png',
'porcupine.png',
() => {console.log("downloaded to porcupine.png")})
})
When I run using node server.js and hit the url localhost:3000/image, it will download and save the file to porcupine.png in the base directory.

Categories