JS await function with empty Promise - javascript

I have a question, I somewhat understand how Promises work but I would like to wait for a function to finish even if it doesn't return anything. I have a similar code but it doesn't wait for b to finish before starting the extra steps on a.
For example in this case I want to create tables in a sqlite db, then load data and run some tests. The thing here is that it's starting to run the tests before finishing to load the data and the arrays created in the loadData method are coming empty. I would even prefer not to return those arrays but to always wait for this process to be completed before running tests.
async function initialize(){
try{
console.log("Beginning connection");
db = await connectToDB();
console.log(`Database: ${db}`)
db.serialize(function() {
console.log("Creating tables")
db.run(queries.__parent_company);
db.run(queries.parent_company);
db.run(queries.__sales_rep);
db.run(queries.sales_rep);
db.run(queries.advertiser);
console.log("Tables created");
});
let response = await loadData();
console.log("Response", response)
testData();
}
catch(e){
throw e;
}
}
function loadData() {
return new Promise(async (resolve, reject) => {
console.log("Beggning data insertion");
try{
let insertedAdvertisers = [];
let insertedSales = [];
let insertedCOmpanies = [];
data.rows.forEach(async row => {
let advertiser = await loadAdvertisers(row);
let sale = await loadSalesRep(row);
let company = await loadParentCompany(row);
insertedAdvertisers.push(advertiser);
insertedSales.push(sale);
insertedCOmpanies.push(company);
console.log("HERE", insertedAdvertisers, insertedSales, insertedCOmpanies)
})
resolve({insertedAdvertisers, insertedSales, insertedCOmpanies});
}
catch(e){
console.log(e);
reject(e);
}
});
}

Taking about Promises, I advice you to read about the event loop and how it works.
What happens in your code that JS knows that this is a promise so it will move it to callback queue until it finishes and continues executing your code and then push it to the stack to be executed.
so if you want to execute a certain logic after an async function finishes you need to provide it as a callback or use .then()
async function a() {
b().then(()=>{
// your code that you want to be executed after a finishes
})
}
This image might illustrate what I'm trying to explain

Related

How to execute variable number of async calls(coming dynamically at runtime) serially?

I am making a chrome extension (mv3). Based on user activity, the content.js passes a message to the background.js which then calls an async function to add data in Google Docs using Docs API.
I want each request to execute only after the previous one has finished running. I am using chrome.runtime.sendMessage to send a message from content.js and don't see a way of calling background.js serially from there. So I need a way of executing them one by one in background.js only. The order of these requests is also important (but if the order of the requests gets changed by one/two places, I think that would still be okay from a user perspective).
I tried something and it is working but I am not sure if I am missing some edge cases, because I was unable to find the approach in any other answers -
Semaphore-like queue in javascript?
Run n number of async function before calling another method in nodejs
JavaScript: execute async function one by one
The approach I used is: I use a stack like structure to store requests, use setInterval to check for any pending requests and execute them serially.
content.js:
chrome.runtime.sendMessage({message});
background.js:
let addToDocInterval = "";
let addToDocCalls = [];
async function addToDoc(msg) {
// Await calls to doc API
}
async function addToDocHelper() {
if(addToDocCalls.length === 0)
return;
clearInterval(addToDocInterval)
while(addToDocCalls.length > 0) {
let msg = addToDocCalls.shift();
await addToDoc(msg);
}
addToDocInterval = setInterval(addToDocHelper, 1000);
}
chrome.runtime.onMessage.addListener((msg) => {
// Some other logic
addToDocCalls.push(msg);
})
addToDocInterval = setInterval(addToDocHelper, 1000);
Is this approach correct? Or is there any better way to do this?
I'd suggest changing several things.
Don't use timers polling the array. Just initiate processing the array anytime you add a new item to the array.
Keep a flag on whether if you're already processing the array so you don't start duplicate processing.
Use a class to encapsulate this functionality into an object.
Encapsulate the addToDocCalls array and adding to it so your class is managing it and outside code just calls a function to add to it which also triggers the processing. Basically, you're making it so callers don't have to know how the insides work. They just call helper.addMsg(msg) and the class instance does all the work.
Here's an implementation:
async function addToDoc(msg) {
// Await calls to doc API
}
class docHelper {
constructor() {
this.addToDocCalls = [];
this.loopRunning = false;
}
addMsg(msg) {
// add item to the queue and initiate processing of the queue
this.addToDocCalls.push(msg);
this.process();
}
async process() {
// don't run this loop twice if we're already running it
if (this.loopRunning) return;
try {
this.loopRunning = true;
// process all items in the addToDocCalls we have
while(this.addToDocCalls.length > 0) {
let msg = addToDocCalls.shift();
await addToDoc(msg);
}
} finally {
this.loopRunning = false;
}
}
}
const helper = new docHelper();
chrome.runtime.onMessage.addListener((msg) => {
// Some other logic
helper.addMsg(msg);
});
So, process() will run until the array is empty. Any interim calls to addMsg while process() is running will add more items to array and will call process() again, but the loopRunning flag will keep it from starting duplicate processing loops. If addMsg() is called while process is not running, it will start the process loop.
P.S. You also need to figure out what sort of error handling you want if addToDoc(msg) rejects. This code protects the this.loopRunning flag if it rejects, but doesn't actually handle a reject error. In code like this that is processing a queue, often times all you can really do is log the error and move on, but you need to decide what is the proper course of action on a rejection.
You don't need to use setTimeout. You do not even need a while loop.
let addToDocInterval = "";
let addToDocCalls = [];
let running = false;
async function addToDoc(msg) {
// Await calls to doc API
}
async function addToDocHelper() {
if(running || addToDocCalls.length === 0)
return;
running = true;
let msg = addToDocCalls.shift();
await addToDoc(msg);
running = false;
addToDocHelper();
}
chrome.runtime.onMessage.addListener((msg) => {
// Some other logic
addToDocCalls.push(msg);
addToDocHelper();
});
The code should be self explanatory. There is no magic.
Here is a generic way to run async tasks sequentially (and add more tasks to the queue at any time).
const tasks = [];
let taskInProgress = false;
async function qTask(newTask) {
if (newTask) tasks.push(newTask);
if (tasks.length === 0) return;
if (taskInProgress) return;
const nextTask = tasks.shift();
taskInProgress = true;
try {
await nextTask();
} finally {
taskInProgress = false;
//use setTimeout so call stack can't overflow
setTimeout(qTask, 0);
}
}
//the code below is just used to demonstrate the code above works
async function test() {
console.log(`queuing first task`);
qTask(async () => {
await delay(500); //pretend this task takes 0.5 seconds
console.log('first task started');
throw 'demonstrate error does not ruin task queue';
console.log('first task finished');
});
for (let i = 0; i < 5; i++) {
console.log(`queuing task ${i}`)
qTask(async () => {
await delay(200); //pretend this task takes 0.2 seconds
console.log(`task ${i} ran`);
});
}
await delay(1000); //wait 1 second
console.log(`queuing extra task`);
qTask(async () => {
console.log('extra task ran');
});
await delay(3000); //wait 3 seconds
console.log(`queuing last task`);
qTask(async () => {
console.log('last task ran');
});
}
test();
function delay(ms) {
return new Promise(resolve => {
setTimeout(resolve, ms);
});
}

Await only valid in async function, waiting for a document to write [duplicate]

I wrote this code in lib/helper.js:
var myfunction = async function(x,y) {
....
return [variableA, variableB]
}
exports.myfunction = myfunction;
Then I tried to use it in another file :
var helper = require('./helper.js');
var start = function(a,b){
....
const result = await helper.myfunction('test','test');
}
exports.start = start;
I got an error:
await is only valid in async function
What is the issue?
The error is not refering to myfunction but to start.
async function start() {
....
const result = await helper.myfunction('test', 'test');
}
// My function
const myfunction = async function(x, y) {
return [
x,
y,
];
}
// Start function
const start = async function(a, b) {
const result = await myfunction('test', 'test');
console.log(result);
}
// Call start
start();
I use the opportunity of this question to advise you about an known anti pattern using await which is : return await.
WRONG
async function myfunction() {
console.log('Inside of myfunction');
}
// Here we wait for the myfunction to finish
// and then returns a promise that'll be waited for aswell
// It's useless to wait the myfunction to finish before to return
// we can simply returns a promise that will be resolved later
// useless async here
async function start() {
// useless await here
return await myfunction();
}
// Call start
(async() => {
console.log('before start');
await start();
console.log('after start');
})();
CORRECT
async function myfunction() {
console.log('Inside of myfunction');
}
// Here we wait for the myfunction to finish
// and then returns a promise that'll be waited for aswell
// It's useless to wait the myfunction to finish before to return
// we can simply returns a promise that will be resolved later
// Also point that we don't use async keyword on the function because
// we can simply returns the promise returned by myfunction
function start() {
return myfunction();
}
// Call start
(async() => {
console.log('before start');
await start();
console.log('after start');
})();
Also, know that there is a special case where return await is correct and important : (using try/catch)
Are there performance concerns with `return await`?
To use await, its executing context needs to be async in nature
As it said, you need to define the nature of your executing context where you are willing to await a task before anything.
Just put async before the fn declaration in which your async task will execute.
var start = async function(a, b) {
// Your async task will execute with await
await foo()
console.log('I will execute after foo get either resolved/rejected')
}
Explanation:
In your question, you are importing a method which is asynchronous in nature and will execute in parallel. But where you are trying to execute that async method is inside a different execution context which you need to define async to use await.
var helper = require('./helper.js');
var start = async function(a,b){
....
const result = await helper.myfunction('test','test');
}
exports.start = start;
Wondering what's going under the hood
await consumes promise/future / task-returning methods/functions and async marks a method/function as capable of using await.
Also if you are familiar with promises, await is actually doing the same process of promise/resolve. Creating a chain of promise and executes your next task in resolve callback.
For more info you can refer to MDN DOCS.
When I got this error, it turned out I had a call to the map function inside my "async" function, so this error message was actually referring to the map function not being marked as "async". I got around this issue by taking the "await" call out of the map function and coming up with some other way of getting the expected behavior.
var myfunction = async function(x,y) {
....
someArray.map(someVariable => { // <- This was the function giving the error
return await someFunction(someVariable);
});
}
I had the same problem and the following block of code was giving the same error message:
repositories.forEach( repo => {
const commits = await getCommits(repo);
displayCommit(commits);
});
The problem is that the method getCommits() was async but I was passing it the argument repo which was also produced by a Promise. So, I had to add the word async to it like this: async(repo) and it started working:
repositories.forEach( async(repo) => {
const commits = await getCommits(repo);
displayCommit(commits);
});
If you are writing a Chrome Extension and you get this error for your code at root, you can fix it using the following "workaround":
async function run() {
// Your async code here
const beers = await fetch("https://api.punkapi.com/v2/beers");
}
run();
Basically you have to wrap your async code in an async function and then call the function without awaiting it.
The current implementation of async / await only supports the await keyword inside of async functions Change your start function signature so you can use await inside start.
var start = async function(a, b) {
}
For those interested, the proposal for top-level await is currently in Stage 2: https://github.com/tc39/proposal-top-level-await
async/await is the mechanism of handling promise, two ways we can do it
functionWhichReturnsPromise()
.then(result => {
console.log(result);
})
.cathc(err => {
console.log(result);
});
or we can use await to wait for the promise to full-filed it first, which means either it is rejected or resolved.
Now if we want to use await (waiting for a promise to fulfil) inside a function, it's mandatory that the container function must be an async function because we are waiting for a promise to fulfiled asynchronously || make sense right?.
async function getRecipesAw(){
const IDs = await getIds; // returns promise
const recipe = await getRecipe(IDs[2]); // returns promise
return recipe; // returning a promise
}
getRecipesAw().then(result=>{
console.log(result);
}).catch(error=>{
console.log(error);
});
If you have called async function inside foreach update it to for loop
Found the code below in this nice article: HTTP requests in Node using Axios
const axios = require('axios')
const getBreeds = async () => {
try {
return await axios.get('https://dog.ceo/api/breeds/list/all')
} catch (error) {
console.error(error)
}
}
const countBreeds = async () => {
const breeds = await getBreeds()
if (breeds.data.message) {
console.log(`Got ${Object.entries(breeds.data.message).length} breeds`)
}
}
countBreeds()
Or using Promise:
const axios = require('axios')
const getBreeds = () => {
try {
return axios.get('https://dog.ceo/api/breeds/list/all')
} catch (error) {
console.error(error)
}
}
const countBreeds = async () => {
const breeds = getBreeds()
.then(response => {
if (response.data.message) {
console.log(
`Got ${Object.entries(response.data.message).length} breeds`
)
}
})
.catch(error => {
console.log(error)
})
}
countBreeds()
In later nodejs (>=14), top await is allowed with { "type": "module" } specified in package.json or with file extension .mjs.
https://www.stefanjudis.com/today-i-learned/top-level-await-is-available-in-node-js-modules/
This in one file works..
Looks like await only is applied to the local function which has to be async..
I also am struggling now with a more complex structure and in between different files. That's why I made this small test code.
edit: i forgot to say that I'm working with node.js.. sry. I don't have a clear question. Just thought it could be helpful with the discussion..
function helper(callback){
function doA(){
var array = ["a ","b ","c "];
var alphabet = "";
return new Promise(function (resolve, reject) {
array.forEach(function(key,index){
alphabet += key;
if (index == array.length - 1){
resolve(alphabet);
};
});
});
};
function doB(){
var a = "well done!";
return a;
};
async function make() {
var alphabet = await doA();
var appreciate = doB();
callback(alphabet+appreciate);
};
make();
};
helper(function(message){
console.log(message);
});
A common problem in Express:
The warning can refer to the function, or where you call it.
Express items tend to look like this:
app.post('/foo', ensureLoggedIn("/join"), (req, res) => {
const facts = await db.lookup(something)
res.redirect('/')
})
Notice the => arrow function syntax for the function.
The problem is NOT actually in the db.lookup call, but right here in the Express item.
Needs to be:
app.post('/foo', ensureLoggedIn("/join"), async function (req, res) {
const facts = await db.lookup(something)
res.redirect('/')
})
Basically, nix the => and add async function .
"await is only valid in async function"
But why? 'await' explicitly turns an async call into a synchronous call, and therefore the caller cannot be async (or asyncable) - at least, not because of the call being made at 'await'.
Yes, await / async was a great concept, but the implementation is completely broken.
For whatever reason, the await keyword has been implemented such that it can only be used within an async method. This is in fact a bug, though you will not see it referred to as such anywhere but right here. The fix for this bug would be to implement the await keyword such that it can only be used TO CALL an async function, regardless of whether the calling function is itself synchronous or asynchronous.
Due to this bug, if you use await to call a real asynchronous function somewhere in your code, then ALL of your functions must be marked as async and ALL of your function calls must use await.
This essentially means that you must add the overhead of promises to all of the functions in your entire application, most of which are not and never will be asynchronous.
If you actually think about it, using await in a function should require the function containing the await keyword TO NOT BE ASYNC - this is because the await keyword is going to pause processing in the function where the await keyword is found. If processing in that function is paused, then it is definitely NOT asynchronous.
So, to the developers of javascript and ECMAScript - please fix the await/async implementation as follows...
await can only be used to CALL async functions.
await can appear in any kind of function, synchronous or asynchronous.
Change the error message from "await is only valid in async function" to "await can only be used to call async functions".

JavaScript event handler working with an async data store API causing race condition

I need to update some data every time certain browser event fires (for example, when a browser tab closes):
chrome.tabs.onRemoved.addListener(async (tabId) => {
let data = await getData(); // async operation
... // modify data
await setData(data); // async operation
});
The problem is, when multiple such event triggers in quick succession, the async getData() could return stale result in subsequent invocation of the event handler before setData() gets a chance to finish in earlier ones, leading to inconsistent result.
If the event handler can execute synchronously then this problem wouldn't occur, but getData() and setData() both are async operations.
Is this a race condition? What's the recommended pattern to handle this type of logic?
--- Update ---
To provide more context, getData() and setData() are simply promisified version of some Chrome storage API:
async function getData() {
return new Promise(resolve => {
chrome.storage.local.get(key, function(data) => {
// callback
});
});
}
async function setData() {
return new Promise(resolve => {
chrome.storage.local.set({ key: value }, function() => {
// callback
});
});
}
I wrapped the API call in a Promise for readability purposes, but I think it's an async op either way?
You have a fairly classic race condition for a data store with an asynchronous API and the race condition is even worse if you use asynchronous operations in the processing of the data (between the getData() and setData(). The asynchronous operations allow another event to run in the middle of your processing, ruining the atomicity of your sequence of events.
Here's an idea for how to put the incoming tabId in a queue and make sure you're only processing one of these events at a time:
const queue = [];
chrome.tabs.onRemoved.addListener(async (newTabId) => {
queue.push(newTabId);
if (queue.length > 1) {
// already in the middle of processing one of these events
// just leave the id in the queue, it will get processed later
return;
}
async function run() {
// we will only ever have one of these "in-flight" at the same time
try {
let tabId = queue[0];
let data = await getData(); // async operation
... // modify data
await setData(data); // async operation
} finally {
queue.shift(); // remove this one from the queue
}
}
while (queue.length) {
try {
await run();
} catch(e) {
console.log(e);
// decide what to do if you get an error
}
}
});
This could be made more generic so it could be reusably used in more than place (each with their own queue) like this:
function enqueue(fn) {
const queue = [];
return async function(...args) {
queue.push(args); // add to end of queue
if (queue.length > 1) {
// already processing an item in the queue,
// leave this new one for later
return;
}
async function run() {
try {
const nextArgs = queue[0]; // get oldest item from the queue
await fn(...nextArgs); // process this queued item
} finally {
queue.shift(); // remove the one we just processed from the queue
}
}
// process all items in the queue
while (queue.length) {
try {
await run();
} catch(e) {
console.log(e);
// decide what to do if you get an error
}
}
}
}
chrome.tabs.onRemoved.addListener(enqueue(async function(tabId) {
let data = await getData(); // async operation
... // modify data
await setData(data); // async operation
}));
JS ascync/await does not really turns JS code synchronous.
What you cold do is block the event loop on getData using Promisse.all.
So,
chrome.tabs.onRemoved.addListener(async (tabId) => {
... // turns in a composition
await setData(Promise.all([getData])[0]); // async composition
});
You cold do a async composition with a block on event loop, when the event is triggered, the VM will have a list with events, and a block on the await getData.
In fact does not exist async composition, is just a trick with the VM to block the event loop and awaits to the result of an operation cause the VM process this as a list and lists don't wait.
Be careful with your code to become readable when using compositions.

Await for file to load, then run a function on each 'line' but wait for the return on each line

I have been trying to understand Promises and I'm hitting a brick wall.
==Order I want the code to run==
I need a .txt file to load each line into an array.
WAIT for this to happen.
Run a Function on each entry that returns and array.
WAIT for each index of the array to be processed before doing the next.
==My Functions==
Call this function to start the program.
async function start(){
var data = await getData();
console.log(data);
for (var i = 0; i < data.length; i++){
console.log(await searchGoogle(data[i]));
}
}
'await' for the data from getData
async function getData(){
return new Promise(function(resolve, reject){
fs.readFile('./thingsToGoogle.txt', function(err, data) {
if(err) throw err;
var array = data.toString().split("\n");
resolve(array);
});
});
}
Then call searchGoogle on each index in the array.
async function searchGoogle(toSearch) {
(async() => {
const browser = await puppeteer.launch();
const page = await browser.newPage();
await page.goto('https://www.google.com/');
await page.type('input[name=q]', toSearch);
try {
console.log('Setting Search' + toSearch);
await page.evaluate(() => {
let elements = document.getElementsByClassName('gNO89b');
for (let element of elements)
element.click();
});
await page.waitForNavigation();
} catch (err) {
console.log(err)
}
try {
console.log("Collecting Data");
const[response] = await Promise.all([
page.waitForNavigation(),
await page.click('.rINcab'),
]);
} catch (err) {
console.log("Error2: " + err)
}
let test = await page.$$('.LC20lb');
// console.log(test);
allresults = [];
for (const t of test) {
const label = await page.evaluate(el => el.innerText, t);
if (label != "") {
allresults.push(label);
}
}
await browser.close();
resolve(allresults);
})();
}
The problem is that this does not work. it does not wait for the file to load.
Picture of Node JS output.
Hopefully the screen shot has uploaded, but you can see it stacking the SearchGoogle function console.logs;
console.log('Setting..')
console.log('Setting..')
console.log('Collecting..')
console.log('Collecting..')
When it should be
console.log('Setting..')
console.log('Collecting..')
console.log('Setting..')
console.log('Collecting..')
This is the 'first' time sort of dealing with promises, i have done a lot of reading up on them and done bits of code to understand them, however when I have tried to apply this knowledge I am struggling. Hope someone can help.
-Peachman-
Queue with concurrent Limit (using p-queue)
You need a queue with concurrency limit. You will read every single line and add them to a queue. We will be using readline and p-queue module for this.
First, create a queue with concurrency of 1.
const {default: PQueue} = require('p-queue');
const queue = new PQueue({concurrency: 1});
Then, create our reader instance.
const fs = require('fs');
const readline = require('readline');
const rl = readline.createInterface({
input: fs.createReadStream('your-input-file.txt')
});
For every line of the file, add an entry to the queue.
rl.on('line', (line) => {
console.log(`Line from file: ${line}`);
queue.add(() => searchGoogle(line));
});
That's it! If you want to process 10 lines at once, just change the concurrency line. It will still read one line at a time, but the queue will limit how many searchGoogle is invoked.
Optional Fixes: Async Await
Your code has the following structure,
async yourFunction(){
(async()=>{
const browser = await puppeteer.launch();
// ... rest of the code
})()
}
While this might run as intended, you will have a hard time debugging because you will be creating an anonymous function every time you run yourFunction.
The following is enough.
async yourFunction(){
const browser = await puppeteer.launch();
// ... rest of the code
}
Here's a way to process them that lets you process N URLs at a time where you can adjust the value of N. My guess is that you want it set to a value of between 5 and 20 in order to keep your CPU busy, but not use too many server resources.
Here's an outline of how it works:
It uses the line-by-line module to read a file line by line and (unlike the built-in readline interface), this module pauses line events when you call .pause() which is important in this implementation.
It maintains a numInFlight counter that tells you how many lines are in the midst of processing.
You set a maxInFlight constant to the maximum number of lines you want to be processed in parallel.
It maintains a resultCntr that helps you keep results in the proper order.
It creates the readline interface and establishes a listener for the line event. This will start the stream flowing with line events.
On each line event, we increment our numInFlight counter. If we have reached the maximum number allowed in flight, we pause the readline stream so it won't produce any more line events. If we haven't reached the max in flight yet, then more line events will flow until we do reach the max.
We pass that line off to your existing searchGoogle() function.
When that line is done processing, we save the result in the appropriate spot in the array, decrement the numInFlight counter and resume the stream (in case it was previously paused).
We check if we're all done (by checking if numInFlight is 0 and if we've reached the end of our file). If we are done, resolve the master promise with the results.
If we're not all done, then there will either be more line events coming or more searchGoogle() functions in flight that will finish, both of which will check again to see if we're done.
Note that the way this is designed to work is that errors on any given URL are just put into the result array (the error object is in the array) and processing continues on the rest of the URLs with an eventual resolved promise. Errors while reading the input file will terminate processing and reject the return promise.
Here's the code:
const fs = require('fs');
const Readline = require('line-by-line');
function searchAll(file) {
return new Promise(function(resolve, reject) {
const rl = new Readline(file);
// set maxInFlight to something between 5 and 20 to optimize performance by
// running multiple requests in flight at the same time without
// overusing memory and other system resources.
const maxInFlight = 1;
let numInFlight = 0;
let resultCntr = 0;
let results = [];
let doneReading = false;
function checkDone(e) {
if (e) {
reject(e);
} else if (doneReading && numInFlight === 0) {
resolve(results);
}
}
rl.on('line', async (url) => {
if (url) {
let resultIndex = resultCntr++;
try {
++numInFlight;
if (numInFlight >= maxInFlight) {
// stop flowing line events when we hit maxInFlight
rl.pause();
}
let result = await searchGoogle(url);
// store results in order
results[resultIndex] = result;
} catch(e) {
// store error object as result
results[resultIndex] = e;
} finally {
--numInFlight;
rl.resume();
checkDone();
}
}
}).on('end', () => {
// all done reading here, may still be some processing in flight
doneReading = true;
checkDone();
}).on('error', (e) => {
doneReading = true;
checkDone(e);
});
});
}
FYI, you can set maxInFlight to a value of 1 and it will read process the URLs one at a time, but the whole point of writing this type of function is so that you can likely get better performance by setting it to a value higher than 1 (I'm guessing 5-20).

Why doesn't the code after await run right away? Isn't it supposed to be non-blocking?

I have a hard time understanding how async and await works behind the scenes. I know we have promises which make our non blocking code by using the "then" function we can place all the work we need to do after the promise is resolved. and the work we want to do parallel to promise we just write it outside our then function. Hence the code becomes non blocking. However i don't understand how the async await makes non-blocking code.
async function myAsyncFunction() {
try {
let data = await myAPICall('https://jsonplaceholder.typicode.com/posts/1');
// It will not run this line until it resolves await.
let result = 2 + 2;
return data;
}catch (ex){
return ex;
}
}
See the above code. I cannot move forward until the API call is resolved. If it makes my code blocking code, how is it any better then promises? Or is there something I missed about async and await? Where do i put my code that is not dependent to the await call? so it can keep on working without await holding the execution?
I am adding a Promise code that i would like to replicate in an async await example.
function myPromiseAPI() {
myAPICall('https://jsonplaceholder.typicode.com/posts/1')
.then(function (data) {
// data
});
// runs parallel
let result = 2 + 2;
}
Just as its name implies, the await keyword will cause the function to "wait" until the corresponding promise resolves before executing the next line. The whole point of await is to provide a way to wait for an asynchronous operation to complete before continuing.
The difference between this and blocking code is that the world outside the function can continue executing while the function is waiting for the asynchronous operations to finish.
async and await are just syntactic sugar on top of promises. They allow you to write code that looks a lot like ordinary synchronous code even though it uses promises under the covers. If we translated your example there to something that explicitly worked with the promises, it would look something like:
function myAsyncFunction() {
return myAPICall('https://jsonplaceholder.typicode.com/posts/1')
.then(function (data) {
let result = 2 + 2;
return data;
})
.catch(function (ex) {
return ex;
});
}
As we can see here, the let result = 2 + 2; line is inside a .then() handler, which means it's not going to execute until myAPICall() has resolved. It's the same when you use await. await just abstracts away the .then() for you.
One thing to bear in mind (and I think the point you're looking for) is that you don't have to use await right away. If you wrote your function like this, then you could execute your let result = 2 + 2; line right away:
const timeout =
seconds => new Promise(res => setTimeout(res, seconds * 1000));
function myAPICall() {
// simulate 1 second wait time
return timeout(1).then(() => 'success');
}
async function myAsyncFunction() {
try {
console.log('starting');
// just starting the API call and storing the promise for now. not waiting yet
let dataP = myAPICall('https://jsonplaceholder.typicode.com/posts/1');
let result = 2 + 2;
// Executes right away
console.log('result', result);
// wait now
let data = await dataP;
// Executes after one second
console.log('data', data);
return data;
} catch (ex) {
return ex;
}
}
myAsyncFunction();
After some clarification, I can see that what you really wanted to know about is how to avoid having to wait for two async operations one by one and instead have them execute in parallel. Indeed, if you use one await after the other, the second won't start executing until the first has finished:
const timeout =
seconds => new Promise(res => setTimeout(res, seconds * 1000));
function myAPICall() {
// simulate 1 second wait time
return timeout(1).then(() => 'success');
}
async function myAsyncFunction() {
try {
console.log('starting');
let data1 = await myAPICall('https://jsonplaceholder.typicode.com/posts/1');
// logs after one second
console.log('data1', data1);
let data2 = await myAPICall('https://jsonplaceholder.typicode.com/posts/2');
// logs after one more second
console.log('data2', data2);
} catch (ex) {
return ex;
}
}
myAsyncFunction();
To avoid this, what you can do is start both async operations by executing them without awaiting them, assigning their promises to some variables. Then you can await both promises:
const timeout =
seconds => new Promise(res => setTimeout(res, seconds * 1000));
function myAPICall() {
// simulate 1 second wait time
return timeout(1).then(() => 'success');
}
async function myAsyncFunction() {
try {
console.log('starting');
// both lines execute right away
let dataP1 = myAPICall('https://jsonplaceholder.typicode.com/posts/1');
let dataP2 = myAPICall('https://jsonplaceholder.typicode.com/posts/2');
let data1 = await dataP1;
let data2 = await dataP2;
// logs after one second
console.log('data1', data1);
console.log('data2', data2);
} catch (ex) {
return ex;
}
}
myAsyncFunction();
One alternative way to do this is to use Promise.all() with some array decomposition:
const timeout =
seconds => new Promise(res => setTimeout(res, seconds * 1000));
function myAPICall() {
// simulate 1 second wait time
return timeout(1).then(() => 'success');
}
async function myAsyncFunction() {
try {
console.log('starting');
// both myAPICall invocations execute right away
const [data1, data2] = await Promise.all([
myAPICall('https://jsonplaceholder.typicode.com/posts/1'),
myAPICall('https://jsonplaceholder.typicode.com/posts/2'),
]);
// logs after one second
console.log('data1', data1);
console.log('data2', data2);
} catch (ex) {
return ex;
}
}
myAsyncFunction();

Categories