My Node.js server receives a stream of data from an external API.
I serve my client after receiving the data completely. Like this,
async function getFile(req, res) {
const { id } = req.body;
const file = await get(process.env.FILE_API_URL + id);
res.send(file);
}
But instead of waiting to receive the whole stream, I would like to stream it to the client as soon as I have some data. Kind of like this,
function getFile(req, res) {
const { id } = req.body;
const stream = get(process.env.FILE_API_URL + id);
stream.on('data', (data) > {
res.write(data);
});
stream.on('end', res.end);
}
How can I implement this?
Related
I have an issue where I am writing data from an array to a JSON file every 10 secs on an express server, and this is causing the main page to reload whenever the writeFile function is called.
The main page makes a GET request to retrieve Entry objects in the array, however I don't understand why it is reloading when the array isn't being changed in anyway, it is just being used to write to the JSON file.
In index.js (server code):
const server = require('./app');
const { readFromFile, writeToFile } = require('./helpers/readWrite');
const port = process.env.PORT || 3000;
readFromFile();
// start the server
server.listen(port, () => {
console.log(`Listening at http://localhost:${port}`);
// set the server to save to file every 10 seconds
setInterval(writeToFile, 10000);
});
In readWrite.js:
const Entry = require('../models/entry'); // file containing array that the data is written from.
function writeToFile() {
const entriesDataStringified = JSON.stringify(Entry.all); // stringify the entriesData array
// write to the json file, overwriting any data already in the file
fs.writeFile('./data/entries.json', entriesDataStringified, (err) => {
// check for error when writing file
if (err) {
console.log(err);
} else {
console.log('File successfully written');
}
});
}
Retrieving entries on client side:
async function getPosts(e) {
try{
response = await fetch(`http://localhost:3000/search/page/${pageNum}`);
data = await response.json();
console.log(data)
data.entries.forEach(post => {
if(!postArray.includes(post)){
newestArray.push(post);
postArray.push(post);
emojiArray.push({id: post.id, emojis: {loveCount: false, laughCount: false, likeCount: false}})
};
});
console.log(emojiArray);
Post.drawAll();
pageNum++
} catch(err) {
console.log(err)
}
}
Thanks.
I ended up with having a Server who provides pictures in base64 format. The Problem is when I want to send an array of base64 Images they are to huge.
I want to send the Array in little chunks to the frontend.
I use Sequelize to retrieve the data from the database.
Does someone has an Idea How to do it.
Thats How my endpoint looks until now:
router.get('/download', async (req, res, next) => {
try {
const getAllStoneData = await StoneData.findAll();
const convertBlobToString = getAllStoneData.map((oneStone) => {
const getBuffer = oneStone.dataValues.blobImage;
const convertToString = getBuffer.toString('utf8');
const copyOneStone = {
...oneStone.dataValues,
blobImage: convertToString,
};
return copyOneStone;
});
let chunk = [];
while (convertBlobToString.length > 0) {
chunk = convertBlobToString.splice(0, 1);
res.write(chunk);
}
res.end();
} catch (error) {
res.status(400).send({ error });
}
});
When I run this I get the Error of
"UnhandledPromiseRejectionWarning: Error [ERR_HTTP_HEADERS_SENT]: Cannot set headers after they are sent to the client"
I have an azure function that sends a file to an SFTP server. It may be called multiple times so to save time connecting to the SFTP server I would like to reuse the SFTP connection.
const Client = require("ssh2-sftp-client");
const sftp = new Client();
let sftpConnected = false;
const sendToSFTP = async (data, location, context) => {
await setConn(context)
try{
await sftp.put(data, location);
}catch(err){
context.log.error('sftp put error: ' + err);
}
}
const setConn = async (context) => {
if (sftpConnected) return
try{
await sftp.connect({
host: 'myserver',
username: 'user',
passphrase: 'pwd',
});
sftpConnected = true;
}catch(err){
context.log.error('sftp connect error: ' + err);
}
}
sftp.on('close', () => {
sftpConnected = false;
sftp.end();
});
sftp.on('end', () => {
sftpConnected = false;
sftp.end();
});
exports.sendToSFTP = sendToSFTP;
This works when testing locally but when deployed to Azure the close and end listeners do not seem to be called when the Azure function goes idle. When I try to use the sftp connection after a period of time there is no connection there and the call to the sftp server times out.
Has anyone solved this issue?
I'm learning node.js. I have a client form, from which I send to my server some data and refactor it somehow in a module makeRequest. The module returns a promise, which I want to resolve with response to the client.
So I might think that I could do something like this:
let post = '';
const server = http.createServer(function (req, res) {
const readHtml = fs.createReadStream(__dirname + '/view/form.html', 'utf8');
readHtml.pipe(res);
});
server.on('request', processPost);
function processPost(request, response) {
let body = '';
if (request.method === 'POST') {
request.on('data', function (data) {
body += data;
if (body.length > 1e6) {
request.connection.destroy();
}
});
request.on('end', function () {
post = JSON.parse(body);
makeRequest(post.text)
.then(data => {
response.end(data);
})
.catch(err => {
console.log(err);
});
});
}
}
server.listen(3000, 'localhost');
This for me means that as soon as I retreive the refactored data I immediately send it back to client with response.end(data).
But suddenly the data does not return to the client if I do like this. It does, if I write response.end(data) after request.on('end'). But this is synchronous way and therefore when I run such code, there will be no data anyhow.
What can I do to make it work as I want to?
If, as said in a discussion below the question, you just want a default page accessed in a browser, make it conditional
const server = http.createServer(function(req, res) {
if ( req.method == "GET" ) {
const readHtml = fs.createReadStream(__dirname + '/view/form.html', 'utf8');
readHtml.pipe(res);
}
});
This way both GETs and POSTs are handled correctly.
My advice would be to use a more organized approach using any framework, Express should do fine. This way you could be a more clean approach to define different routes and request types.
I am currently building a Web based SSH client. I am using node and SSH2 module to connect to Linux machine . The issue is trying to identify when the server is waiting for a response from the client say "sudo" .
this.onWsMessage = function(packet) {
log('Message Received from ');
if (cmd != '') {
log('----------------------------------------->' + cmd);
source[this.getId()].sshStream.write(cmd + '\n');
}
};
var client = clients[cid];
sshClient
.on('ready', function() {
console.log('CONNECTED');
sshClient.shell({pty: true}, function(err, stream) {
_shellHandler(err, stream, client);
});
})
.on('error', function(err) {
console.log('ERROR', err);
})
.connect(serverInfo);
}
function _shellHandler(err, stream, client) {
source[client.getId()].sshStream = stream;
stream
.on('close', function() {
log('ssh connection close');
//sshClient.end();
this._client.end();
})
.on('data', function(data) {
console.log(stream);
var msg = data.toString().replace(msgPattern, '');
this.sendUTF(msg, 'ssh');
}.bind(client));
}
I have been going through the documentation and i was unable to identify any event that trigger as a result.
There is no easy way to do this. Basically you have to either buffer stream output and continually search that buffer for expected (sudo) prompts or use a module like streamsearch to search the stream for a particular string without explicitly buffering and continual searching.