To send a PDF file from a Node.js server to a client I use the following code:
const pdf = printer.createPdfKitDocument(docDefinition);
const chunks = [];
pdf.on("data", (chunk) => {
chunks.push(chunk);
});
pdf.on("end", () => {
const pdfBuffered = `data:application/pdf;base64, ${Buffer.concat(chunks).toString("base64")}`;
res.setHeader("Content-Type", "application/pdf");
res.setHeader("Content-Length", pdfBuffered.length);
res.send(pdfBuffered);
});
pdf.end();
Everything is working correctly, the only issue is that the stream here is using callback-approach rather then async/await.
I've found a possible solution:
const { pipeline } = require("stream/promises");
async function run() {
await pipeline(
fs.createReadStream('archive.tar'),
zlib.createGzip(),
fs.createWriteStream('archive.tar.gz')
);
console.log('Pipeline succeeded.');
}
run().catch(console.error);
But I can't figure out how to adopt the initial code to the one with stream/promises.
You can manually wrap your PDF code in a promise like this and then use it as a function that returns a promise:
function sendPDF(docDefinition) {
return new Promise((resolve, reject) => {
const pdf = printer.createPdfKitDocument(docDefinition);
const chunks = [];
pdf.on("data", (chunk) => {
chunks.push(chunk);
});
pdf.on("end", () => {
const pdfBuffered =
`data:application/pdf;base64, ${Buffer.concat(chunks).toString("base64")}`;
resolve(pdfBuffered);
});
pdf.on("error", reject);
pdf.end();
});
}
sendPDF(docDefinition).then(pdfBuffer => {
res.setHeader("Content-Type", "application/pdf");
res.setHeader("Content-Length", pdfBuffer.length);
res.send(pdfBuffer);
}).catch(err => {
console.log(err);
res.sendStatus(500);
});
Because there are many data events, you can't promisify just the data portion. You will still have to listen for each data event and collect the data.
You can only convert a callback-API to async/await if the callback is intended to only be executed once.
The one you found online works, because you're just waiting for the whole stream to finish before the callback runs once. What you've got is callbacks that execute multiple times, on every incoming chunk of data.
There are other resources you can look at to make streams nicer to consume, like RXJS, or this upcoming ECMAScript proposal to add observables to the language. Both of these are designed to handle the scenario when a callback can execute multiple times — something that async/await can not do.
I am trying to pass data out of my get request from resp.on function. I want to use 'var url' to make a separate get request from which I will parse data again. I am able to console.log variables from inside the function but not return (or access from outside). This seems to be a scoping or async issue.
const https = require('https');
https.get('https://collectionapi.metmuseum.org/public/collection/v1/objects', (resp) => {
let data = '';
// A chunk of data has been recieved.
resp.on('data', (chunk) => {
data += chunk;
});
// The whole response has been received. Print out the result.
resp.on('end', () => {
var json_data = JSON.parse(data);
var total = json_data.total
var random_objectID = Math.floor(Math.random()*total)
var url = 'https://collectionapi.metmuseum.org/public/collection/v1/objects/' + random_objectID
console.log(url);
});
}).on("error", (err) => {
console.log("Error: " + err.message);
})
//'url' becomes unknown here. I want to pass it to another get request.
It's both an async and a scope issue!
If you declare var url; in the outermost scope you'll be able to set it inside that callback as intended, but since that's happening asynchronously you won't be able to use the value outside the scope unless you are checking after the callback completes.
Alternatively, you could wrap the whole thing in a promise, e.g.
const https = require('https');
new Promise((resolve,reject)=>{
let targetUrl = 'https://collectionapi.metmuseum.org/public/collection/v1/objects';
https.get(targetUrl,resp=>{
// ...
resp.on('end', () => {
// ...
resolve(url);
});
});
}).then(url=>{
// do stuff with that URL
});
If your goal is to automate fetching data from web resources, I'd recommend checking out the request module, which also has a promisified variant.
So I started this simple project in node.js where the client sends a POST request containing in it's body a windows CMD command.
The server receives the POST request, extracts the CMD command, and after running it, responds with the output of that command.
This worked fine when I had one request being sent out, but then I set up a system to repeatedly ask the user for the command and then send the POST request with the inputted command as it's body.
Here is the client-side code for this: (server side not included as it's irrelevant)
var http = require("http");
var readline = require("readline");
var rl = readline.createInterface(process.stdin, process.stdout);
var options = {
hostname: "localhost",
port: 3001,
path: "/",
method: "POST",
headers: {
"Content-Type": "text/plain", // all command must be string
"Content-Length": 0 // changes based on what the command is
}
};
function requestCommand(hostname, port, command) {
// Some of the options of the request need to be changed based on
// what the command is
options.hostname = hostname;
options.port = port;
options.headers["Content-Length"] = command.length;
var req = http.request(options, function(res) {
console.log(`Got response. Status code: ${res.statusCode}`);
var resData = "";
res.setEncoding("utf-8");
res.on("data", function(chunk){
resData += chunk
});
res.on("end", function(){
return resData;
})
})
req.on("error", function (e){
return "\n\n\n ---------------\nERROR OCCURED IN THE REQUEST.\nREQUEST NOT SENT\n--------------" + e.stack;
})
req.write(command);
req.end();
}
rl.setPrompt("What command would you like to request from the server?: ");
rl.prompt();
rl.on("line", function(data){
if (data === "exit") {
console.log("\n\n\Exiting appplication...\n\n");
process.exit();
} else {
console.log("processing..");
var out = requestCommand("localhost", 3001, data);
console.log(`\n\n${out}\n\n`);
rl.prompt();
}
});
If I run this without creating the server first, instead of getting the error message, I get undefined.
Currently, I think this is because the requestCommand function ends before the error is handled (before the error event is emitted and the callback function that returns the error is called), since the callback function for http.request is obviously asynchronous, and before the server responds or an error is emitted, the function ends and hence return nothing (undefined)
So my question is: Can I keep that function running until the asynchronous command is done?
Or if this is not possible, is there a different approach to this? How would you send requests to this server upon a certain event fired by the user, such as data input?
EDIT: I am really not interested in using a 3rd party modules as I already can. This project is really pointless and only here for me to learn, so I am only using core modules for this. More specifically, I am only using HTTP for making the requests (not sync-request, e.t.c..)
requestCommand has to return a promise that gets resolved with resData and rejected on error:
function requestCommand(hostname, port, command) {
return new Promise((resolve, reject) => { // immeadiately return a Promise that can be resolved/rejected later
// Some of the options of the request need to be changed based on
// what the command is
options.hostname = hostname;
options.port = port;
options.headers["Content-Length"] = command.length;
var req = http.request(options, function(res) {
console.log(`Got response. Status code: ${res.statusCode}`);
var resData = "";
res.setEncoding("utf-8");
res.on("data", function(chunk){
resData += chunk
});
res.on("end", function(){
resolve(resData); // resolve, return won't work here
});
});
req.on("error", function (e){
reject(e); // reject here, don't handle it
});
req.write(command);
req.end();
});
}
That way, you can simply make the request handler async and then await the function call:
rl.on("line", async function(data){ // make the function async so that we can work with promises more easily
if (data === "exit") {
console.log("\n\n\Exiting appplication...\n\n");
process.exit();
} else {
console.log("processing..");
try {
var out = await requestCommand("localhost", 3001, data); // "await" lets async code look like sync code, but it is still async
console.log(`\n\n${out}\n\n`);
} catch(e) { // the reject() call will end up here
console.log("invalid command " + e.stack); // handle here
}
rl.prompt();
}
});
I am new to Javascript and just got stuck with async callbacks with Javascript using Node.js.
I first set up the Facebook webhook and make a Webhook POST request
Here is my code :
routes.js
**To set up facebook webhook**
var facebook_handler = require('../controllers/botkit').handler
module.exports = function (app) {
// public pages=============================================
// root
app.get('/', function (req, res) {
res.render('home')
})
app.get('/webhook', function (req, res) {
// Check to see which webhook password (FACEBOOK_VERIFY_TOKEN) to check for, from incoming request.
if (process.env.PORT ||process.env.VCAP_APP_PORT ) {
FB_VERIFY_TOKEN = process.env.FACEBOOK_VERIFY_TOKEN
} else {
FB_VERIFY_TOKEN = process.env.FACEBOOK_VERIFY_TOKEN_DEV
}
// This enables subscription to the webhooks
if (req.query['hub.mode'] === 'subscribe' && req.query['hub.verify_token'] === FB_VERIFY_TOKEN) {
res.send(req.query['hub.challenge'])
}
else {
res.send('Incorrect verify token')
}
})
app.post('/webhook', function (req, res) {
console.log("\n CALL HANDLER FUNCTION ---- \n");
facebook_handler(req.body)
console.log("call handler done");
res.send('okay')
})
}
From Above code, i make a POST request to Facebook webhook and get the details of the FB message and then process the webhook POST request in another file BotKit.js
Botkit.js
var request = require('request');
require('dotenv').load();
var handler = function (obj) {
console.log("Message received from FB \n");
if (obj.entry ) {
for (var e = 0; e < obj.entry.length; e++) {
for (var m = 0; m < obj.entry[e].messaging.length; m++) {
var facebook_message = obj.entry[e].messaging[m]
test_message = facebook_message.message.text;
translatorEnglish (test_message) // calling the watson translator api to get translation for the received facebook message.
}
}
}
Above code process webhook POST request and call the Translator function ( translation POST request )
Translator Function
var translationusername = "1234"
var translationpassowrd = "1234"
var transURL = "https://gateway.watsonplatform.net/language-
translator/api/v2/translate";
translatorEnglish = function(test_message) {
console.log("this should be called when translator called:" +test_message);
var parameters = {
text: test_message,
model_id: 'es-en'
};
languageTranslator.translate(
parameters,
function(error, response, body) {
if (error)
console.log(error)
else
english_message = response.translations[0].translation
console.log("The response should be:" +english_message);
translate = false
//console.log(JSON.stringify(response, null, 2));
}
);
};
The problem is the translation POST request is not executed until the Call Handler i.e the webhook POST request is completed. The translation POST request always executes after Webhook POST is completed.
Is there are way i can execute the Translator POST request within the Webhook POST request before the Webhook POST request is complete.
Something like this
Webhook POST --> execute --> Translation POST execute and complete ---> Webhook POST complete
First of all,languageTranslator.translate function is an async function so it returns and doesn't waits for it's callback to be done. So if you want to be sure that the callback finished, you should do the next commands after it's finished by utilizing new callbacks or promise.then function.
Secondly, your translatorEnglish function neither have callback, nor is a promise. Hence, nobody waits for its async function call (languageTranslator.translate) to be finished. Thus, You should change it to either a promise format or callback format (also you can use async await format which is like promise).
Thirdly, anyway translatorEnglish function will be an async function and you want to use it inside a for loop in handle function which means you may have more than one async function to wait for and it makes it hard to be handled using callback. So I suggest you to use promise in this case and in order to wait for all promises to be completed you can use
Promise.all function.
Don't forget, you should make all those functions promise and call res.send in facebook_handler().then function.
extras:
If you want to convert a function with callback to promise without changing its implementation (promisify) you can use node 8 util.promisify or bluebird Promise.promisify functions.
Take a look at this question and answer too.
How could I make the 'request' module in Node.js load things in a synchronous fashion? The best advice I've seen is to somehow use the callback to get the function to not return itself until it is done. I am trying to use the 'request' function inline in code (things need to be processed based on that data that can't be placed in callbacks).
So how could I use the callback of the 'request' module to keep it from returning itself until it is finished with loading the resource?
What I'm doing is running a loop that downloads two values from an API, and then has to do some math based on those values. While the math could be done in callbacks... the loop would advance without the values it needs to perform the next operation. (So stopping the loop from advancing until the data is ready would solve the issue)
/* loop */ {
/* URL Generation */
request( {url: base + u_ext}, function( err, res, body ) {
var split1 = body.split("\n");
var split2 = split1[1].split(", ");
ucomp = split2[1];
});
request( {url: base + v_ext}, function( err, res, body ) {
var split1 = body.split("\n");
var split2 = split1[1].split(", ");
vcomp = split2[1];
});
/* math which needs to be after functions get variables and before loop advances */
}
The short answer is: don't. (...) You really can't. And that's a good thing
I'd like to set the record straight regarding this:
NodeJS does support Synchronous Requests. It wasn't designed to support them out of the box, but there are a few workarounds if you are keen enough, here is an example:
var request = require('sync-request'),
res1, res2, ucomp, vcomp;
try {
res1 = request('GET', base + u_ext);
res2 = request('GET', base + v_ext);
ucomp = res1.split('\n')[1].split(', ')[1];
vcomp = res2.split('\n')[1].split(', ')[1];
doSomething(ucomp, vcomp);
} catch (e) {}
When you pop the hood open on the 'sync-request' library you can see that this runs a synchronous child process in the background. And as is explained in the sync-request README it should be used very judiciously. This approach locks the main thread, and that is bad for performance.
However, in some cases there is little or no advantage to be gained by writing an asynchronous solution (compared to the certain harm you are doing by writing code that is harder to read).
This is the default assumption held by many of the HTTP request libraries in other languages (Python, Java, C# etc), and that philosophy can also be carried to JavaScript. A language is a tool for solving problems after all, and sometimes you may not want to use callbacks if the benefits outweigh the disadvantages.
For JavaScript purists this may rankle of heresy, but I'm a pragmatist so I can clearly see that the simplicity of using synchronous requests helps if you find yourself in some of the following scenarios:
Test Automation (tests are usually synchronous by nature).
Quick API mash-ups (ie hackathon, proof of concept works etc).
Simple examples to help beginners (before and after).
Be warned that the code above should not be used for production. If you are going to run a proper API then use callbacks, use promises, use async/await, or whatever, but avoid synchronous code unless you want to incur a significant cost for wasted CPU time on your server.
In 2018, you can program the "usual" style using async and await in Node.js.
Below is an example, that wraps request callback in a promise and then uses await to get the resolved value.
const request = require('request');
// wrap a request in an promise
function downloadPage(url) {
return new Promise((resolve, reject) => {
request(url, (error, response, body) => {
if (error) reject(error);
if (response.statusCode != 200) {
reject('Invalid status code <' + response.statusCode + '>');
}
resolve(body);
});
});
}
// now to program the "usual" way
// all you need to do is use async functions and await
// for functions returning promises
async function myBackEndLogic() {
try {
const html = await downloadPage('https://microsoft.com')
console.log('SHOULD WORK:');
console.log(html);
// try downloading an invalid url
await downloadPage('http:// .com')
} catch (error) {
console.error('ERROR:');
console.error(error);
}
}
// run your async function
myBackEndLogic();
Though asynchronous style may be the nature of node.js and generally you should not do this, there are some times you want to do this.
I'm writing a handy script to check an API and want not to mess it up with callbacks.
Javascript cannot execute synchronous requests, but C libraries can.
https://github.com/dhruvbird/http-sync
Aredridels answer is relatively good (upvoted it), but I think it lacks the loop equivalent. This should help you:
Sync code equivalent:
while (condition) {
var data = request(url);
<math here>
}
return result;
Async code for serial execution:
function continueLoop() {
if (!condition) return cb(result);
request(url, function(err, res, body) {
<math here>
continueLoop()
})
}
continueLoop()
You should take a look at library called async
and try to use async.series call for your problem.
See sync-request: https://github.com/ForbesLindesay/sync-request
Example:
var request = require('sync-request');
var res = request('GET', 'http://example.com');
console.log(res.getBody());
You can use retus to make cross-platform synchronous HTTP requests. It's a library based on sync-request but with a few comfort features I added:
const retus = require("retus");
const { body } = retus("https://google.com");
//=> "<!doctype html>..."
That's it!
These days if you want to do things sequentially, you'd do something like:
for (const task of tasks) {
const response = await request(...);
}
This code (above) runs the requests in order, but is not synchronous (meaning it does not block the main thread). If you really need synchronous, then most of these libraries boil down to something like:
const child = require('child_process');
const buffer = child.execSync(`curl https://example.com/foo.json`);
const data = JSON.parse(buffer.toString('utf8'));
You can do something exactly similar with the request library, but this is sync using const https = require('https'); or const http = require('http');, which should come with node.
Here is an example,
const https = require('https');
const http_get1 = {
host : 'www.googleapis.com',
port : '443',
path : '/youtube/v3/search?arg=1',
method : 'GET',
headers : {
'Content-Type' : 'application/json'
}
};
const http_get2 = {
host : 'www.googleapis.com',
port : '443',
path : '/youtube/v3/search?arg=2',
method : 'GET',
headers : {
'Content-Type' : 'application/json'
}
};
let data1 = '';
let data2 = '';
function master() {
if(!data1)
return;
if(!data2)
return;
console.log(data1);
console.log(data2);
}
const req1 = https.request(http_get1, (res) => {
console.log(res.headers);
res.on('data', (chunk) => {
data1 += chunk;
});
res.on('end', () => {
console.log('done');
master();
});
});
const req2 = https.request(http_get2, (res) => {
console.log(res.headers);
res.on('data', (chunk) => {
data2 += chunk;
});
res.on('end', () => {
console.log('done');
master();
});
});
req1.end();
req2.end();
The easiest solution I came up for myself was to use the node's native "child_request" and simply call exec with a simple curl command inside.... all dependencies and asynchronity hassle me way too much for the simple rare cases in which I simply want to "save the http response to a variable in node"
As of this writing all answers are either:
Sync but with a control flow (such as using async library)
Sync, but blocking [which means that all other threads on Node are stopped, which is bad performance wise], such as retus or sync-request.
Out of date, such as http-request.
Check out the non-blocking sync-request, based on deasync.
Or see some of the answers in this thread, such as this, which use deasync directly.
The short answer is: don't. If you want code that reads linearly, use a library like seq. But just don't expect synchronous. You really can't. And that's a good thing.
There's little or nothing that can't be put in a callback. If they depend on common variables, create a closure to contain them. What's the actual task at hand?
You'd want to have a counter, and only call the callback when the data is there:
var waiting = 2;
request( {url: base + u_ext}, function( err, res, body ) {
var split1 = body.split("\n");
var split2 = split1[1].split(", ");
ucomp = split2[1];
if(--waiting == 0) callback();
});
request( {url: base + v_ext}, function( err, res, body ) {
var split1 = body.split("\n");
var split2 = split1[1].split(", ");
vcomp = split2[1];
if(--waiting == 0) callback();
});
function callback() {
// do math here.
}
Update 2018: node.js supports async/await keywords in recent editions, and with libraries that represent asynchronous processes as promises, you can await them. You get linear, sequential flow through your program, and other work can progress while you await. It's pretty well built and worth a try.