Second async await function is not calling - javascript

I am trying to unzip a file first and then await for that unzipping file to be complete before i loop through each file and upload it to an S3 bucket. The first function unzipPromise is running fine, and everything is getting unzipped in the proper directory, but the uploadS3Promise is not running at all. I am not getting errors through this process, it just runs and unzips the file and never touches the uploadS3Promise function.
function unzipInput(file, client, project_number, oldpath, newpath) {
path = `zips/${client}/${project_number}/${file}/`;
function unzipPromise() {
return new Promise((resolve, reject) => {
fse.mkdirsSync(path);
fs.rename(oldpath, newpath, err => {
if (err) {
throw err;
}
});
fs.createReadStream(newpath).pipe(unzip.Extract({ path }));
});
}
function uploadS3Promise() {
console.log("running");
return new Promise((resolve, reject) => {
// fs.unlinkSync(newpath);
fs.readdirSync(newpath).forEach(file => {
uploadToS3(file, client, project_number, path);
console.log(file, "test");
});
if (err) reject(err);
else resolve("success");
});
}
// New code with async:
(async () => {
try {
await unzipPromise();
await uploadS3Promise();
} catch (e) {
console.error(e);
}
})();
}

uploadS3Promise doesn't run because the code is still awaiting for unzipPromise to complete. The code will not execute further unless you resolve or reject a promise.
So in you code ...
function unzipPromise(){
...
resolve(...)
...
}
On an unrelated note, I think it would be more readable not to name function names end in promise. Like just call them unzip and uploadS3. We don't name our function usually by return types right, like we never say intIndexOf, and so on.

You supposed to call resolve after unzip path is done or reject if error occured.
And since streams are EventEmitter, you can listen to events and interact with it
const stream = fs.createReadStream(newpath).pipe(unzip.Extract({ path }))
stream
.on('error', (err) => reject(err))
.on('finish', () => resolve())

Personally, I'd use a .then to break the process down a bit.
unzipPromise().then(res => {
console.log('resolved promise 1');
uploadS3Promise();
} rej => {
console.log('rejected promise 1, sorry!');
});
Also- "unzipInput" is never resolved or rejected.

Related

Retry getting a file after waiting a set timeframe in Node

I am trying to download a report that is generated daily on the first request to the report's endpoint.
When the report is being created, the endpoint returns a HTTP 202.
I have the following code to handle some errors and redirects, as well as trying to "sleep" for 60 seconds before continuing try the endpoint again. Unfortunately, the second console log to tell me the download completed is never called, though the file does indeed download successfully and the filestream closes.
// Main function
run()
async function run() {
await getReport()
await processReport()
}
async function getReport() {
console.log(`Downloading ${reportFileName}`)
await downloadFile(url, reportFileName)
console.log(`Downloaded ${reportFileName} successfully.`) // This is never called?
}
async function downloadFile (url, targetFile) {
return await new Promise((resolve, reject) => {
https.get(url, async response => {
const code = response.statusCode ?? 0
if (code >= 400) {
return reject(new Error(response.statusMessage))
}
// handle redirects
if (code > 300 && code < 400 && !!response.headers.location) {
resolve(downloadFile(response.headers.location, targetFile))
return
}
// handle file creation pending
if (code == 202) {
console.log(`Report: ${reportFileName} is still being generated, trying again in ${timeToSleepMs/1000} seconds...`)
await sleep(timeToSleepMs)
resolve(downloadFile(url, targetFile))
return
}
// make download directory regardless of if it exists
fs.mkdirSync(outputPath, { recursive: true }, (err) => {
if (error) throw error;
});
// save the file to disk
const fileWriter = fs
.createWriteStream(`${outputPath}/${targetFile}`)
.on('finish', () => {
resolve({})
})
response.pipe(fileWriter)
}).on('error', error => {
reject(error)
})
})
}
Finally my sleep function:
let timeToSleepMs = (60 * 1000)
function sleep(ms) {
return new Promise((resolve) => {
setTimeout(resolve, ms);
});
}
I'm pretty sure this has to do with some sort of async issue because that always seems to be my issue with Node, but I'm not sure how to handle it. I just want to fetch a file and download it locally, retrying if I get a HTTP 202. If there's a better way, please let me know!
tl;dr - How do I properly handle waiting for a HTTP 202 response to turn into a HTTP 200 when the file is generated, then continue executing code after the file is downloaded?
await downloadFile(url, reportFileName) invokes a recursive function and waits for the promise from the outermost invocation to resolve. But if the function calls itself recursively in
await sleep(timeToSleepMs)
return downloadFile(url, targetFile)
this outermost promise is never resolved.
Replace the two lines above with
await sleep(timeToSleepMs)
resolve(downloadFile(url, targetFile))
return
then the resolution of the outermost promise is the resolution of the recursively invoked second-outermost promise, and so on.
Heiko already identified the problem: when you return from the callback, you never resolve the promise. To avoid such mistakes in general, it is recommended not to mix the promisification of a function and the business logic. Do use async/await only in the latter, do not pass async functions as callbacks. In your case, that would be
function sleep(ms) {
return new Promise((resolve) => {
setTimeout(resolve, ms);
});
}
function httpsGet(url) {
return new Promise((resolve, reject) => {
https.get(url, resolve).on('error', reject);
});
}
function writeFile(path, readableStream) {
return new Promise((resolve, reject) => {
const fileWriter = fs
.createWriteStream(path)
.on('finish', resolve)
.on('error', reject);
readableStream.pipe(fileWriter);
});
}
Then you can easily write a straightforward function without any callbacks:
async function downloadFile (url, targetFile) {
const response = httpsGet(url);
const code = response.statusCode ?? 0
if (code >= 400) {
throw new Error(response.statusMessage);
}
// handle redirects
if (code > 300 && code < 400 && !!response.headers.location) {
return downloadFile(response.headers.location, targetFile)
}
// handle file creation pending
if (code == 202) {
console.log(`Report: ${reportFileName} is still being generated, trying again in ${timeToSleepMs/1000} seconds...`)
await sleep(timeToSleepMs)
return downloadFile(url, targetFile)
}
// make download directory regardless of if it exists
fs.mkdirSync(outputPath, { recursive: true });
// save the file to disk
await writeFile(`${outputPath}/${targetFile}`, response);
}

Piping got.stream to a file

I am refactoring some code that was using http module in Node to use got instead. I tried the following:
function get(url, filePath) {
return new Promise((resolve, reject) => {
got.stream(url).on
("response", response => {
const newFile = fs.createWriteStream(filePath);
response.pipe(newFile);
newFile.on("finish", () => {
newFile.close(resolve());
});
newFile.on("error", err => {
reject(err);
});
}).on
("error", err => {
reject(err);
});
});
}
The finish event never fired. The file (filePath) is created with 0 bytes.
The block of code using newFile was something that worked when I was using the Node http module.
What is the proper way to pipe got.stream to a file?
Per the got() documentation, you want to pipe the stream directly to your file and if you use pipeline() to do it, it will collect errors and report completion.
const pipeline = promisify(stream.pipeline);
const fsp = require('fs').promises;
function get(url, filePath) {
return pipeline(
got.stream(url),
fs.createWriteStream(filePath)
);
}
// usage
get(...).then(() => {
console.log("all done");
}).catch(err => {
console.log(err);
});
FYI, the point of got.stream() is to return a stream that you can directly use as a stream and since you want it to go to a file, you can pipe that stream to that file. I use pipeline() instead of .pipe() because pipeline has much more complete error handling that .pipe(), though in non-error conditions, .pipe() would also work.
Here's a version that cleans up the output file if there's an error:
function get(url, filePath) {
return pipeline(
got.stream(url),
fs.createWriteStream(filePath)
).catch(err => {
fsp.unlink(filePath).catch(err => {
if (err.code !== 'ENOENT') {
// trying to delete output file upon error
console.log('error trying to delete output file', err);
}
});
throw err;
});
}

My mocha/chai assert runs before the actual code

I am trying to test a function but my assert statement always runs before my actual code. I'm testing the function createAppointment
My test:
describe("Appointments", function() {
it("should be able to create a new appointment", function(err) {
let newAppts = [];
testRequests.forEach(request => {
createAppointment(testDb, request, function(err, id) {
if (err) return err;
newAppts.push(id);
return id;
});
});
assert.equal(newAppts.length, 5);
});
});
I am expecting the length of newAppts to be 5 but it returns 0 every time since the assert runs before the forEach is done. What is the best to to go about adding a callback?
Here's a test that works for me:
describe("Appointments", function () {
it("should be able to create 5 new appointments", function (done) {
// promisify createAppointment, build an array, pass to Promise.all()
Promise.all(testRequests.map(request => new Promise((resolve, reject) => {
createAppointment(testDb, request, function (err, id) {
if (err) reject(err);
resolve(id);
});
})))
.then(newAppts => {
// if all Promises resolve, check result
assert.equal(newAppts.length, 5);
// and tell mocha we're done
done();
})
.catch(err => {
// if any one of the Promises fails, we're also done
done(err);
});
});
});
If you simply want to test async code, it's enough to return the Promise. This will simply check if it resolves.
If your test is supposed to check the result though, don't return the Promise; instead use the it callback's done parameter to tell mocha that the async test has finished, after calling assert to check the result.

Refactoring Complicated Nested Node.js Function

I have the following snippet of code below. It currently works, but I'm hoping to optimize/refactor it a bit.
Basically, it fetches JSON data, extracts the urls for a number of PDFs from the response, and then downloads those PDFs into a folder.
I'm hoping to refactor this code in order to process the PDFs once they are all downloaded. Currently, I'm not sure how to do that. There are a lot of nested asynchronous functions going on.
How might I refactor this to allow me to tack on another .then call before my error handler, so that I can then process the PDFs that are downloaded?
const axios = require("axios");
const moment = require("moment");
const fs = require("fs");
const download = require("download");
const mkdirp = require("mkdirp"); // Makes nested files...
const getDirName = require("path").dirname; // Current directory name...
const today = moment().format("YYYY-MM-DD");
function writeFile(path, contents, cb){
mkdirp(getDirName(path), function(err){
if (err) return cb(err)
fs.writeFile(path, contents, cb)
})
};
axios.get(`http://federalregister.gov/api/v1/public-inspection-documents.json?conditions%5Bavailable_on%5D=${today}`)
.then((res) => {
res.data.results.forEach((item) => {
download(item.pdf_url).then((data) => {
writeFile(`${__dirname}/${today}/${item.pdf_file_name}`, data, (err) => {
if(err){
console.log(err);
} else {
console.log("FILE WRITTEN: ", item.pdf_file_name);
}
})
})
})
})
.catch((err) => {
console.log("COULD NOT DOWNLOAD FILES: \n", err);
})
Thanks for any help you all can provide.
P.S. –– When I simply tack on the .then call right now, it fires immediately. This means that my forEach loop is non-blocking? I thought that forEach loops were blocking.
The current forEach will run synchronously, and will not wait for the asynchronous operations to complete. You should use .map instead of forEach so you can map each item to its Promise from download. Then, you can use Promise.all on the resulting array, which will resolve once all downloads are complete:
axios.get(`http://federalregister.gov/api/v1/public-inspection-documents.json?conditions%5Bavailable_on%5D=${today}`)
.then(processResults)
.catch((err) => {
console.log("COULD NOT DOWNLOAD FILES: \n", err);
});
function processResults(res) {
const downloadPromises = res.data.results.map((item) => (
download(item.pdf_url).then(data => new Promise((resolve, reject) => {
writeFile(`${__dirname}/${today}/${item.pdf_file_name}`, data, (err) => {
if(err) reject(err);
else resolve(console.log("FILE WRITTEN: ", item.pdf_file_name));
});
}))
));
return Promise.all(downloadPromises)
.then(() => {
console.log('all done');
});
}
If you wanted to essentially block the function on each iteration, you would want to use an async function in combination with await instead.

Javascript Promises : Can I know which part of promise chain caused error?

(Please excuse my English)
I am learning about javascript promises, now.
Below sample code is a simple javascript code for node.js(my node.js version is v10.0.0), which asynchronously reads and parses a JSON file using promise chain.
const fs = require("fs");
function readFileAsync(filename) {
return new Promise((resolve, reject) => {
fs.readFile(filename, 'utf8', (error, result) => {
if (error)
reject(error);
else
resolve(result);
});
});
}
readFileAsync('test.json')
.then(res => JSON.parse(res))
.then(res => { console.log('JSON=', res); })
.catch(err => { console.log(err.message); });
I found that this sample code generates different formats of error messages.
For example, if it cannot find 'test.json', the error message is...
ENOENT: no such file or directory, open '/home/node/test.json'
If it cannot parse 'test.json', the error message is...
Unexpected token / in JSON at position 31
I want to modify the sample code to generate same format of error message always containing JSON file name.
To do so, firstly I should know which part of promise chain caused error. How can I know?
There are two ways to arrived what you want.
Promise.then has two arguments, see below code and you can get more information here
readFileAsync('test.json')
.then(res => JSON.parse(res))
.then(res => { console.log('JSON=', res); }, error => {
// here can catch error of previous then function
});
Another way is modify the function readFileAsync
function readFileAsync(filename) {
return new Promise(resolve => {
fs.readFile(filename, (error, result) => {
if (error)
resolve(null); // you can resolve whatever you want
else
resolve(result);
});
});
}
And .catch() will not catch any error of readFileAsync.
Below sample code is a my solution. Thank you, Bergi and Stephen.
I choose this solution because I want to know exactly where in the chain the error occurred and what is the error.
const fs = require("fs");
function readFileAsync(filename) {
return new Promise((resolve, reject) => {
fs.readFile(filename, 'utf8', (error, result) => {
if (error)
reject(error);
else
resolve(result);
});
});
}
function readJsonAsync(filename, fnCallBack) {
function fnMessage(n, str) {
console.log(`[${n}:${filename}]`, str);
}
readFileAsync(filename)
.then(
res => JSON.parse(res),
err => { fnMessage(-1, err.message); }
).then(
res => {
// if some errors occured at the previous step, res === undefined
if (res !== undefined)
fnCallBack(filename, res);
},
err => { fnMessage(-2, err.message); }
);
}
function printJSON(filename, json) {
console.log(`JSON[${filename}]:`, json);
}
readJsonAsync('test.json', printJSON);
My solution has a prerequisite. The prerequisite is...
There is no simple way to break a promise chain even if some errors
occured at previous steps of the chain.
Is this prerequisite right?

Categories