I'm trying to download several files from a server. I ran into error memory leak with for, forEach and map, so I use this callback function and it works:
.then(files => {
const downloadFile = callback => {
if (files.length > 0) {
let file = files.shift();
client.download(`/${file}`, `./${file}`, () => {
console.log(`Downloaded: ${file}`);
downloadFile(callback);
});
}
else {
callback();
}
};
downloadFile(() => {
console.log('All done');
})
})
I'd like to convert it into a Promise function but I'm stuck, I've tried new Promise((resolve, reject)=>{}) and Promise.all() but it only returns the first file.
You can use map() with your files array to produce one promise for each file. Then you can can call Promise.all() to know when they have all been downloaded.
Since we don't know anything about your client.download() method, we are guessing that it's doing what it should. It's strange that it's callback doesn't take a parameter with the actual file data.
let promises = files.map(file => {
return new Promise((resolve, reject) => {
client.download(`/${file}`, `./${file}`, () => {
// should this call back have some data or error checking?
console.log(`Downloaded: ${file}`);
});
})
})
Promise.all(promises)
.then(() => console.log("all done"))
Since this is happening inside another then() you could just return Promise.all(promises) to handle it downstream.
Related
I am confused because I cannot seem to extract a value from an asynchronous operation. Basically I have to parse a csv file and the operation is asynchronous as shown below.
const csv = require('csv-parser')
const fs = require('fs')
const results = [];
fs.createReadStream('courses.csv')
.pipe(csv())
.on('data', (data) => results.push(data))
.on('end', () => {
console.log(results);
});
I am unable to completly extract and isolate the results variable from that stream. I have tried doing this by wrapping it with a promise but it shows pending. Here is what I am doing.
const getData = () => {
const prom = new Promise((res, rej) => {
fs.createReadStream('courses.csv')
.pipe(csv())
.on('data', (data) => results.push(data))
.on('end', () => {
res(results);
});
})
return prom;
}
async function GIVEMEMYVALUE() {
var result = await getData();
return result;
};
let data = GIVEMEMYVALUE();
console.log(data);
I have read other questions relating to promises but I still don't see what I am doing wrong. I can do whatever I want with the results variable inside the 'end' callback but cannot seem to extract it(for whatever reason I want to extract it.)
Is it wrong to want to extract that value outside the scope of the 'end' callback ?
Can everything I possibly want to do with the results be done inside the callback ?
I have already gone through How do I return the response from an asynchronous call? but don't quite get it as it doesn't mention anything about pending promises.
GIVEMEMYVALUE returns also an promise. However you could shorten your processes alot:
const getData = () =>
new Promise((res, rej) => {
fs.createReadStream("courses.csv")
.pipe(csv())
.on("data", data => results.push(data))
.on("end", () => {
res(results);
});
});
getData().then(data => {
console.log(data);
});
async/ await does not make your code work synchronous. As soon as you put async infront of your function, your function automatically returns an promise and acts like an promise.
I am executing multiple promises with the following snippet:
await Promise.all([promise1, promise2, promise3]);
What I would like to achieve is to rollback the effects of the successful promises on the case of a failure from Promise.all().
In more specific terms, this means that the above will do some file encryptions, but if one fails, I would like to delete the other two (or one) files that were encrypted successfully so as to have consistent and clean file groups.
From what I've read this means that I would need two steps:
1. Catching the errors for each promise so that Promise.all() won't throw an error.
2. The puzzling part: Having another Promise.all() sort of:
await Promise.all([rollbackPromise1, rollbackPromise2, rollbackPromise3]);
This one seems to be the tricky part: Should I execute all the rollbacks independent of the promise that failed? This means that I should do another catch for every error such that the Promise.all() waits for every rollback to finish.
Is this the best way to do this, I find it pretty inefficient and ugly in terms of code.
You could create your own function implementing the asynchronous call of the functions and performing a rollback if required.
// Function that'll perform a promise.all and rollback if required
async function allWithRollback(promises) {
// using the map we are going to wrap the promise inside of a new one
return Promise.all(promises.map(([
func,
rollbackFunc,
], xi) => ((async() => {
try {
await func;
console.log('One Function succeed', xi);
} catch (err) {
console.log('One Function failed, require rollback', xi);
await rollbackFunc();
}
})())));
}
// Call the custom Promise.all
allWithRollback([
[
// First param is the promise
okPromise(),
// Second param is the rollback function to execute
() => {},
],
[okPromise(), () => {}],
[errPromise(), rollback1],
[errPromise(), rollback2],
[okPromise(), () => {}],
]);
// ---------
async function okPromise() {
return true;
}
async function errPromise() {
throw new Error('no one read this');
}
async function rollback1() {
console.log('Performed the rollback1');
}
async function rollback2() {
console.log('Performed the rollback2');
}
You can create a naive solution as follows:
const errorHandlers = []
function enc1 () {
return new Promise((resolve, reject) => {
setTimeout(() => {
resolve('str')
}, 1000)
errorHandlers.push(() => {
console.log('handler 1')
})
})
}
function enc2 () {
return new Promise((resolve, reject) => {
setTimeout(() => {
resolve('str')
}, 2000)
errorHandlers.push(() => {
console.log('handler 2')
})
})
}
function enc3 () {
return new Promise((resolve, reject) => {
setTimeout(() => {
reject('str')
}, 3000)
errorHandlers.push(() => {
console.log('handler 3')
})
})
}
Promise.all([enc1(), enc2(), enc3()]).then(() => {
console.log('all resovled')
}).catch((e) => {
errorHandlers.forEach(handler => handler(e))
})
It'd give you option to handle the 'global' error in each promise. Before creating promise all, you can reset the errorHandlers to prevent multiple errorHandler execution
What I want to do is this:
Loop over a collection of data, for each data element make a call to an API, wait that the promise fail or resolve, pause for 30sec... then do this again for the next data element until there is nothing to iterate over in the collection ... finally display a 'done' message.
So far this is the code I wrote, gathering ideas in other SO questions, and this is not working the way I'd like.
populateDB();
// these 2 helper functions were found on SO
function timeout(ms) {
return new Promise(resolve => setTimeout(resolve, ms));
}
async function sleep(fn, ...args) {
await timeout(30000);
return fn(...args);
}
// this is the main function that makes the api calls
function populateDB() {
for (let stock of universe.universe) {
sleep(() => {
// actual API call
return alpha.data
.daily(stock)
.then(data => {
// write data to the db when promise resolves
db.get("stocks")
.push({ [stock]: polishData(data) })
.write();
})
.catch(err => console.log(err));
});
}
console.log("Done!");
}
All the promised are still chaining one after another there is no pause. I don't think I understand Promises enough to debug this... what would be the code that works the way I'd like it to ?
Use async/await in your populateDB function:
async function populateDB() {
for (let stock of universe.universe) {
await sleep(() => {
// actual API call
return alpha.data
.daily(stock)
.then(data => {
// write data to the db when promise resolves
db.get("stocks")
.push({ [stock]: polishData(data) })
.write();
})
.catch(err => console.log(err));
});
}
console.log("Done!");
}
I'm working with a module called findit, which recursive find files within a target folder and return the events "file", "error", "end":
const finder = findit("folder/");
finder.on("file", (file) => {});
finder.on("error", ((error) => {});
finder.on("end", () => {});
The end event will be called when the finder finds all files... but, inside the file event i'm making an async operation, that separate files only if they have something inside:
const finder = findit("folder/");
let neededFiles = [];
finder.on("file", (file) => {
// async operation here to store only the files that i want
// neededFiles = [...neededFiles, file];
});
finder.on("error", ((error) => {});
finder.on("end", () => {
console.log(neededFiles); // empty array
});
The neededFile will be empty because the async operation has not finished yet. My question is: What chances do i need to do to wait the async operation in the end event?
Thanks.
Since you've provided only a scratch of you app, I tried to build around that, and show how you could wrap that into a promise to handle the async part.
function find(folder){
return new Promise((resolve, reject) => {
const finder = findit(folder);
const files = [];
finder.on("file", (file) => {
//do stuff like
//files.push(valueOrPromise);
//or
//files.push( find(anotherPath) );
});
finder.on("error", reject);
finder.on("end", () => {
//this line finally "returns" the result.
//to this point you can modify files as you wish
resolve( Promise.all(files) );
//hint, if you're working with recursive stuff, you may have nested Arrays, so you should flatten the result
//resolve( Promise.all(files).then(values => values.reduce((a,b) => a.concat(b), [])) )
});
})
}
Usually people ask at this point: why do I need promises? Because they implement state management of async tasks; so why would you want to implement it yourself?
And why no use the 'end' inside the 'file'? some like:
finder.on("file", (file) => {
// async operation here to store only the files that i want
// neededFiles = [...neededFiles, file]; // GENERATE A PROMISE SO U CAN CHAIN THEN FUNCTION
neededFilesPromise.then(function(neededFiles){
finder.on("end", () => {
console.log(neededFiles); // empty array
});
}).catch(function() {
finder.on("error", ((error) => {});;
})
});
I would say this is the perfect use case for Promise.all(). So something like this. You might want to 'promisify' your file operations so it is even cleaner (if you are using something like bluebird to replace the native promises). I am not sure if your lib is return the content of the file or a filename
const fs = require('fs');
require('bluebird').promisifyAll(fs);
const finder = findit("folder/");
let neededFiles = [];
finder.on("file", (file) => {
// if file is the file content
neededFiles.push(/* result of your op above */);
// if file is a file name
neededFiles.push(fs.readFile(/* file name */));
});
finder.on("error", ((error) => {});
finder.on("end", () => {
Promise.all(neededFiles)
.then((nf) => {
console.log(nf); // you should now have something here...
});
});
I'm trying to extend some existing code with additional promises, but they are a new topic for me at the moment and i'm obviously missing something. This is running as part of a build scrip for npm.
All i am currently trying to make happen is for the final then to be called after the pack operation has happened for each architecture. I have tried wrapping it in a
return new Promise
But at the moment i am not returning anything from that function so i'm not sure what i should include in the resolve call at the end. If i just call the resolve with a true nothing happens, and wrapping it in a promise seems to cause the function to not actually run, and no errors are caught anywhere?
I'm guessing i am going about this completely wrong, all i want to achieve is to run another function once the previous one has completed?
Here's the code as it stands with the additional .then that i can't get to be called.
function build(cfg) {
return new Promise((resolve, reject) => {
webpack(cfg, (err, stats) => {
if (err) return reject(err);
resolve(stats);
});
});
}
function startPack() {
console.log('start pack...');
build(electronCfg)
.then(() => build(cfg))
.then(() => del('release'))
.then(paths => {
if (shouldBuildAll) {
// build for all platforms
const archs = ['ia32', 'x64'];
const platforms = ['linux', 'win32', 'darwin'];
platforms.forEach(plat => {
archs.forEach(arch => {
pack(plat, arch, log(plat, arch));
});
});
} else {
// build for current platform only
pack(os.platform(), os.arch(), log(os.platform(), os.arch()));
}
})
.then(() => {
console.log('then!');
})
.catch(err => {
console.error(err);
});
}
function pack(plat, arch, cb) {
// there is no darwin ia32 electron
if (plat === 'darwin' && arch === 'ia32') return;
const iconObj = {
icon: DEFAULT_OPTS.icon + (() => {
let extension = '.png';
if (plat === 'darwin') {
extension = '.icns';
} else if (plat === 'win32') {
extension = '.ico';
}
return extension;
})()
};
const opts = Object.assign({}, DEFAULT_OPTS, iconObj, {
platform: plat,
arch,
prune: true,
'app-version': pkg.version || DEFAULT_OPTS.version,
out: `release/${plat}-${arch}`,
'osx-sign': true
});
packager(opts, cb);
}
You didn't say what log is, but if it's a plain logging function, then it looks like you're passing in undefined (the result from calling log(...)) as the cb argument to pack. Perhaps you meant:
pack(plat, arch, () => log(plat, arch));
In any case, this won't do anything to wait for packing to finish. I don't know why you're not seeing any console output, but if you're looking for this output to happen after all the packing has finished, then you need to wrap packager in a promise. Something like:
var pack = (plat, arch) => new Promise(resolve => {
// ...
packager(opts, resolve);
});
And then use Promise.all instead of forEach to do all the packaging (in parallel if that's OK):
.then(paths => {
if (!shouldBuildAll) {
return pack(os.platform(), os.arch());
}
return Promise.all(['linux', 'win32', 'darwin'].map(plat =>
Promise.all(['ia32', 'x64'].map(arch => pack(plat, arch))));
})
.then(() => console.log('then!'))
.catch(err => console.error(err));