Async promises recursion with unknow supply of async values - javascript

I'm banging my head around async promises recursion. I have bunch of promises that resolve when async data is download (combined by Promise.all). But sometimes in the data that I just download there is link to another data, that must be download (recursion). The best explanation is showing code I guess. Comments are in code.
(I have tried various combinations to no avail.)
var urls = ['http://czyprzy.vdl.pl/file1.txt', 'http://czyprzy.vdl.pl/file2.txt', 'http://czyprzy.vdl.pl/file3.txt'];
var urlsPromise = [];
var secondPart = [];
var thirdPart = [];
function urlContent(url, number) {
return new Promise(function (resolve) {
var dl = request(url, function (err, resp, content) {
if (err || resp.statusCode >= 400) {
return resolve({number : number, url : url, error : 'err'});
}
if (!err && resp.statusCode == 200) {
if (content.indexOf('file') !== -1) // if there is 'file' inside content we need (would like to :) download this new file by recursion
{
content = content.slice(content.indexOf('file') + 4);
content =+ content; // (number to pass later on, so we know what file we are working on)
url = 'http://czyprzy.vdl.pl/file' + content + '.txt'; // (we build new address)
//urlsPromise.push(urlContent(url, content)); // this will perform AFTER Promise.all(urlsPromise) so we simply can't do recurention (like that) here
secondPart.push(urlContent(url, content)); // if we use another promise array that put resolved items to that array everything will work just fine - but only till first time, then we would need to add another (thirdPart) array and use another Promise.all(thirdPart)... and so on and so on... --- the problem is I don't know how many files there will be, so it means I have no idea how many 'parts' for Promise.all I need to create, some kind of asynchronous loop/recursion would save me here, but I don't know how to do that properly so the code can run in proper order
}
return resolve({number : number, url : url}); // this goes to 'urlsPromise' array
}
});
});
}
if (urls.length !== 0) {
for (var i = 0; i < urls.length; i++)
{urlsPromise.push(urlContent(urls[i], i + 1));}
}
Promise.all(urlsPromise).then(function(urlsPromise) {
console.log('=======================================');
console.log('urlsPromise:\n');
console.log(urlsPromise); // some code/calculations here
}).then(function() {
return Promise.all(secondPart).then(function(secondPart) {
console.log('=======================================');
console.log('secondPart:\n');
console.log(secondPart); // some code/calculations here
secondPart.forEach(function(item)
{
thirdPart.push(urlContent(item.url, item.number + 3));
});
});
}).then(function() {
return Promise.all(thirdPart).then(function(thirdPart) {
console.log('=======================================');
console.log('thirdPart:\n');
console.log(thirdPart); // some code/calculations here
});
}).then(function()
{
console.log();
console.log('and so on and so on...');
});
//// files LINKING (those files do exist on live server - just for testing purposes):
// file1->file4->file7->file10 /-/ file1 content: file4 /-/ file4 content: file7 /-/ file7 content: file10
// file2->file5->file8->file11 /-/ file2 content: file5 /-/ file5 content: file8 /-/ file8 content: file11
// file3->file6->file9->file12 /-/ file3 content: file6 /-/ file6 content: file9 /-/ file9 content: file12
//// the console.log output looks like this:
// =======================================
// urlsPromise:
// [ { number: 1, url: 'http://czyprzy.vdl.pl/file4.txt' },
// { number: 2, url: 'http://czyprzy.vdl.pl/file5.txt' },
// { number: 3, url: 'http://czyprzy.vdl.pl/file6.txt' } ]
// =======================================
// secondPart:
// [ { number: 4, url: 'http://czyprzy.vdl.pl/file7.txt' },
// { number: 5, url: 'http://czyprzy.vdl.pl/file8.txt' },
// { number: 6, url: 'http://czyprzy.vdl.pl/file9.txt' } ]
// =======================================
// thirdPart:
// [ { number: 7, url: 'http://czyprzy.vdl.pl/file10.txt' },
// { number: 8, url: 'http://czyprzy.vdl.pl/file11.txt' },
// { number: 9, url: 'http://czyprzy.vdl.pl/file12.txt' } ]
// and so on and so on...

The await keyword can massively simplify this. You won't need to use a self recursive function. This demo fakes the server call with a randomly sized array.
https://jsfiddle.net/mvwahq19/1/
// setup: create a list witha random number of options.
var sourceList = [];
var numItems = 10 + Math.floor(Math.random() * 20);
for (var i = 0; i < numItems; i++)
{
sourceList.push(i);
}
sourceList.push(100);
var currentIndex = 0;
// a function which returns a promise. Imagine it is asking a server.
function getNextItem() {
var item = sourceList[currentIndex];
currentIndex++;
return new Promise(function(resolve) {
setTimeout(function() {
resolve(item);
}, 100);
});
}
async function poll() {
var collection = [];
var done = false;
while(!done) {
var item = await getNextItem();
collection.push(item);
console.log("Got another item", item);
if (item >= 100) {
done = true;
}
}
console.log("Got all items", collection);
}
poll();
You can write a normal for loop except the contents use await.

This answer was provided thanks to trincot - https://stackoverflow.com/users/5459839/trincot
When I asked him this question directly, he support me with time and knowledge and give this excellent answer.
CODE:
//// files LINKING (those files do exist on live server - just for testing purposes):
// file1->file4(AND file101)->file7->file10 /-/ file1 content: file4 /-/ file4 content: file7 /-/ file7 content: file10 /-/ file10 content: EMPTY /-/ file101 content: EMPTY
// file2->file5(AND file102)->file8->file11 /-/ file2 content: file5 /-/ file5 content: file8 /-/ file8 content: file11 /-/ file11 content: EMPTY /-/ file102 content: EMPTY
// file3->file6(AND file103)->file9->file12 /-/ file3 content: file6 /-/ file6 content: file9 /-/ file9 content: file12 /-/ file12 content: EMPTY /-/ file103 content: EMPTY
var urls = ['http://czyprzy.vdl.pl/file1.txt', 'http://czyprzy.vdl.pl/file2.txt', 'http://czyprzy.vdl.pl/file3.txt'];
var urlsPromise = [];
function requestPromise(url) {
return new Promise(function(resolve, reject) {
request(url, function (err, resp, content) {
if (err || resp.statusCode != 200) reject(err || resp.statusCode);
else resolve(content);
});
});
}
async function urlContent(url, number) {
var arr = [];
let content = await requestPromise(url);
while (content.indexOf(';') !== -1)
{
var semiColon = content.indexOf(';');
var fileLink = content.slice(content.indexOf('file'), semiColon + 1);
content = content.replace(fileLink, ''); // we need to remove the file link so we won't iterate over it again, we will add to the array only new links
var fileLinkNumber = fileLink.replace('file', '');
fileLinkNumber = fileLinkNumber.replace(';', '');
fileLinkNumber =+ fileLinkNumber;
url = 'http://czyprzy.vdl.pl/file' + fileLinkNumber + '.txt'; // we build new address
arr.push({url, fileLinkNumber});
}
if (content.indexOf('file') !== -1)
{
var fileLinkNumber = content.slice(content.indexOf('file') + 4);
fileLinkNumber =+ fileLinkNumber;
url = 'http://czyprzy.vdl.pl/file' + fileLinkNumber + '.txt';
arr.push({url, fileLinkNumber});
}
var newArr = arr.map(function(item)
{
return urlContent(item.url, item.fileLinkNumber); // return IS important here
});
return [].concat(arr, ...await Promise.all(newArr));
}
async function doing() {
let urlsPromise = [];
for (let i = 0; i < urls.length; i++) {
urlsPromise.push(urlContent(urls[i], i + 1));
}
let results = [].concat(...await Promise.all(urlsPromise)); // flatten the array of arrays
console.log(results);
}
//// this is only to show Promise.all chaining - so you can do async loop, and then wait for some another async data - in proper chain.
var test_a = ['http://czyprzy.vdl.pl/css/1.css', 'http://czyprzy.vdl.pl/css/2.css', 'http://czyprzy.vdl.pl/css/cssa/1a.css', 'http://czyprzy.vdl.pl/css/cssa/2a.css'];
var promisesTest_a = [];
function requestStyle(url)
{
return new Promise(function(resolve, reject)
{
request(url, function(error, response, content)
{
if (response.statusCode === 200 && !error)
{resolve(content);}
else
{reject(error);}
});
});
}
for (var i = 0; i < test_a.length; i++)
{promisesTest_a.push(requestStyle(test_a[i]));}
Promise.all(promisesTest_a).then(function(promisesTest_a)
{
console.log(promisesTest_a);
}).then(function()
{
console.log('\nNow we start with #imports...\n');
}).then(function()
{
return doing();
}).then(function()
{
console.log('ALL DONE!');
});
COMMENT:
At first the explanation what is [...] - destructured rest parameters (just in case if you don't know it).
var arr = [];
var array1 = ['one', 'two', 'three']
var array2 = [['four', 'five', ['six', 'seven']], 'eight', 'nine', 'ten'];
arr = array1.concat(array2);
console.log(arr); // it does not flattern the array - it just concatenate them (join them together)
console.log('---');
// however
arr = array1.concat(...array2);
console.log(arr); // notice the [...] - as you can see it flatern the array - 'four' and 'five' are pull out of an array - think of it as level up :) remember that it pull up WHOLE array that is deeper - so 'six' and 'seven' are now 1 level deep (up from 2 levels deep, but still in another array).
console.log('---');
// so
arr = [].concat(...arr);
console.log(arr); // hurrrray our array is flat (single array without nested elements)
console.log();
All files (links) that are ready to be download (those 3 starting ones in a urls array) are downloaded almost immediately (synchronous loop over array that contain them - one after the other, but very fast, right away cause we simply iterate over them in synchronous way).
Then, when we have their contents (cause we Await till content is downloaded - so we got a resolved promise data here) we start to look for info about other possible urls (files) related to the one we already got, to download them (via async recursion).
When we found all the info about possible additional urls/files (presented in an array of regexs - matches), we push it to data array (named arr in our code) and download them (thanks to the mutation of url).
We download them by return the async urlContent function that need to Await for requestPromise promise (so we have the resolve/rejected data in urlContent so if needed we can mutate it - build proper url to get the next file/content).
And so on, so on, till we "iterate" (download) over all files. Every time the urlContent is called, it return an array of promises (promises variable) that initially are pending. When we await Promise.all(promises) the execution only resumes at that spot when ALL those promises have been resolved. And so, at that moment, we have the values for each of these promises. Each of these is an array. We use one big concat to nit all those arrays together into one big array, also including the elements of arr (we need to remmeber that it can be more then 1 file to download from file we have already download - that is why we store values in data array - named arr in code - which store promiseReques function resolved/rejected values). This "big" array is the value, with which a promise is resolved. Recall that this promise is the one that was returned already by this current function context, at the time the first await was executed.
This is important part - so it (urlContent) returns (await) a single promise and that (returned) promise is resolved with an array as value. Note that an async function returns the promise to the caller immediately, when the first await is encountered. The return statement in an async function determines what the value is with which that returned promise is resolved. In our case that is an array.
So urlContent at every call return an promise - resolved value in a array - [...] (destructured rest parameters - returns a promise that eventually resolves to an array), that is collected by our async doing function (cause 3 urls was fired at start - every one has it own urlContent function... path), that collect (Await!) all those arrays from Promise.all(urlsPromise), and when they are resolved (we await for them to be resolved and passed by Promise.all) it 'return' your data (results variable). To be precise, doing returns a promise (because it is async). But the way that we call doing, we show we are not interested in what this promise resolves to, and in fact, since doing does not have a return statement, that promise resolves to UNDEFINED (!). Anyway, we don't use it - we merely output the results to the console.
One thing that can be confusing with async functions is that the return statement is not executed when the function returns (what is in a name, right!? ;). The function has already returned when it executed the first await. When eventually it executes the return statement, it does not really return a value, but it resolves "its own" promise; the one it had returned earlier. If we would really want to separate output from logic, we should not do console.log(results) there, but do return results, and then, where we call doing, we could do doing.then(console.log); Now we do use the promise returned by doing!
I would reserve the verb "to return" for what the caller of a function gets back from it synchronously.
I would use "to resolve" for the action that sets a promise to a resolved state with a value, a value that can be accessed with await or .then().

Related

Pushing mongoDB results to an array, but the array remains empty

This is my first question on stack overflow, so bear with me.
So I have a controller function that is rendering my google maps api, and I am trying to loop through the results from mongoDB and push them to an array so that I can pass it to a script tag on the ejs. I am able to console.log(myArr) within the for loop and get results, but not outside of it (just above the res.render). I am assuming my that my problem is my result within res.render is receiving an empty array.
Please help me I have been stuck on this problem for days now. Thank you, Andrew
function showMap(req, res) {
let myArr = [];
db.collection("poppies").find().toArray(function (err, result) {
for (let i = 0; i < result.length; i++) {
myArr.push(result[i].Address);
};
});
res.render('map', {
result: JSON.stringify(myArr)
})
};
Asynchronous Javascript allows you to execute operations without waiting for the processing thread to become free.
Imagine the data loads from your DB only after 3 seconds - you should "wait" to get the value from your DB before running the next line code.
In your case you use myArr "outside" without await -Or- promises -Or- callbacks) - So the value is an empty array.
IMPORTANT: The idea of Asynchronous Javascript is a topic for a course
(No way to cover this issue by StackOverflow Answer).
Not working
length is: undefined
function helloWorld() {
let items = db.collection("newspapers").find({}).toArray();
return (items);
};
const result = helloWorld();
console.log("length is: " + result.length); /* length is: undefined */
Working
await for the value from helloWorld() function.
function helloWorld() {
let items = db.collection("newspapers").find({}).toArray();
return (items);
};
const result = await helloWorld();
console.log("length is: " + result.length); /* length is: 337 */
In practice
Promise chaining for example:
For this database:
[
{
"Newspaper": "The New York Times"
},
{
"Newspaper": "Washington Post"
}
]
const dbName = 'my_database';
await client.connect();
const db = client.db(dbName);
myArr = [];
db.collection("newspapers").find({}).toArray()
.then(
res => {
for (let i = 0; i < res.length; i++) {
myArr.push(res[i].Newspaper);
};
console.log(`The length is ${res.length} documents`) /* The length is 2 documents */
console.log(myArr); /* [ 'The New York Times', 'Washington Post' ] */
},
err => console.error(`Something went wrong: ${err}`),
);
try/catch Async/await example (Readable code pattern):
const helloWorld = (async () => {
try {
let items = await db.collection("newspapers").find({}).limit(2).toArray();
return (items);
} catch (e) {
console.error(
`Unable to establish a collection: ${e}`,
)
}
})
const result = await helloWorld();
console.log("length is: " + result.length); /* length is: 2 */
More examples here:
https://docs.mongodb.com/drivers/node/fundamentals/promises

Push inside forEach with query not working properly

I'm working with mongodb stitch/realm and I'm trying to modify objects inside an array with a foreach and also pushing ids into a new array.
For each object that i'm modifying, I'm also doing a query first, after the document is found I start modifying the object and then pushing the id into another array so I can use both arrays later.
The code is something like this:
exports = function(orgLoc_id, data){
var HttpStatus = require('http-status-codes');
// Access DB
const db_name = context.values.get("database").name;
const db = context.services.get("mongodb-atlas").db(db_name);
const orgLocPickupPointCollection = db.collection("organizations.pickup_points");
const orgLocStreamsCollection = db.collection("organizations.streams");
const streamsCollection = db.collection("streams");
let stream_ids = [];
data.forEach(function(stream) {
return streamsCollection.findOne({_id: stream.stream_id}, {type: 1, sizes: 1}).then(res => { //if I comment this query it will push without any problem
if(res) {
let newId = new BSON.ObjectId();
stream._id = newId;
stream.location_id = orgLoc_id;
stream.stream_type = res.type;
stream.unit_price = res.sizes[0].unit_price_dropoff;
stream._created = new Date();
stream._modified = new Date();
stream._active = true;
stream_ids.push(newId);
}
})
})
console.log('stream ids: ' + stream_ids);
//TODO
};
But when I try to log 'stream_ids' it's empty and nothing is shown. Properties stream_type and unit_price are not assigned.
I've tried promises but I haven't had success
It's an asynchronous issue. You're populating the value of the array inside a callback. But because of the nature of the event loop, it's impossible that any of the callbacks will have been called by the time the console.log is executed.
You mentioned a solution involving promises, and that's probably the right tack. For example something like the following:
exports = function(orgLoc_id, data) {
// ...
let stream_ids = [];
const promises = data.map(function(stream) {
return streamsCollection.findOne({ _id: stream.stream_id }, { type: 1, sizes: 1 })
.then(res => { //if I comment this query it will push without any problem
if (res) {
let newId = new BSON.ObjectId();
// ...
stream_ids.push(newId);
}
})
})
Promise.all(promises).then(function() {
console.log('stream ids: ' + stream_ids);
//TODO
// any code that needs access to stream_ids should be in here...
});
};
Note the change of forEach to map...that way you're getting an array of all the Promises (I'm assuming your findOne is returning a promise because of the .then).
Then you use a Promise.all to wait for all the promises to resolve, and then you should have your array.
Side note: A more elegant solution would involve returning newId inside your .then. In that case Promise.all will actually resolve with an array of the results of all the promises, which would be the values of newId.

How can I return different values from a function depending on code inside an Axios promise? NodeJS - a

I have a block of code that calls an Api and saves results if there are differences or not. I would like to return different values for DATA as layed out on the code. But this is obviously not working since Its returning undefined.
let compare = (term) => {
let DATA;
//declare empty array where we will push every thinkpad computer for sale.
let arrayToStore = [];
//declare page variable, that will be the amount of pages based on the primary results
let pages;
//this is the Initial get request to calculate amount of iterations depending on result quantities.
axios.get('https://api.mercadolibre.com/sites/MLA/search?q='+ term +'&condition=used&category=MLA1652&offset=' + 0)
.then(function (response) {
//begin calculation of pages
let amount = response.data.paging.primary_results;
//since we only care about the primary results, this is fine. Since there are 50 items per page, we divide
//amount by 50, and round it up, since the last page can contain less than 50 items
pages = Math.ceil(amount / 50);
//here we begin the for loop.
for(i = 0; i < pages; i++) {
// So for each page we will do an axios request in order to get results
//Since each page is 50 as offset, then i should be multiplied by 50.
axios.get('https://api.mercadolibre.com/sites/MLA/search?q='+ term +'&condition=used&category=MLA1652&offset=' + i * 50)
.then((response) => {
const cleanUp = response.data.results.map((result) => {
let image = result.thumbnail.replace("I.jpg", "O.jpg");
return importante = {
id: result.id,
title: result.title,
price: result.price,
link: result.permalink,
image: image,
state: result.address.state_name,
city: result.address.city_name
}
});
arrayToStore.push(cleanUp);
console.log(pages, i)
if (i === pages) {
let path = ('./compare/yesterday-' + term +'.json');
if (fs.existsSync(path)) {
console.log("Loop Finished. Reading data from Yesterday")
fs.readFile('./compare/yesterday-' + term +'.json', (err, data) => {
if (err) throw err;
let rawDataFromYesterday = JSON.parse(data);
// test
//first convert both items to check to JSON strings in order to check them.
if(JSON.stringify(rawDataFromYesterday) !== JSON.stringify(arrayToStore)) {
//Then Check difference using id, otherwise it did not work. Using lodash to help.
let difference = _.differenceBy(arrayToStore[0], rawDataFromYesterday[0],'id');
fs.writeFileSync('./compare/New'+ term + '.json', JSON.stringify(difference));
//if they are different save the new file.
//Then send it via mail
console.log("different entries, wrote difference to JSON");
let newMail = mail(difference, term);
fs.writeFileSync('./compare/yesterday-' + term +'.json', JSON.stringify(arrayToStore));
DATA = {
content: difference,
message: "These were the differences, items could be new or deleted.",
info: "an email was sent, details are the following:"
}
return DATA;
} else {
console.log("no new entries, cleaning up JSON");
fs.writeFileSync('./compare/New'+ term + '.json', []);
DATA = {
content: null,
message: "There were no difference from last consultation",
info: "The file" + './compare/New'+ term + '.json' + ' was cleaned'
}
return DATA;
}
});
} else {
console.error("error");
console.log("file did not exist, writing new file");
fs.writeFileSync('./compare/yesterday-' + term +'.json', JSON.stringify(arrayToStore));
DATA = {
content: arrayToStore,
message: "There were no registries of the consultation",
info: "Writing new file to ' " + path + "'"
}
return DATA;
}
}
})
}
}).catch(err => console.log(err));
}
module.exports = compare
So I export this compare function, which I call on my app.js.
What I want is to make this compare function return the DATA object, so I can display the actual messages on the front end,
My hopes would be, putting this compare(term) function inside a route in app.js like so:
app.get("/api/compare/:term", (req, res) => {
let {term} = req.params
let data = compare(term);
res.send(data);
})
But as I said, Its returning undefined. I tried with async await, or returning the whole axios first axios call, but Im always returning undefined.
Thank you

Node.js Readline, get the current line number

I have the following implementation where everything works but this line:
lineNumber: line.lineNumber
this line returns undefined, I am adding a full fragment of code below, my question is: Does Readline provide a standard way to get the line number somehow? Or I have to implement my counter to keep track of the line number, which would be simple but I don't want if there's a standard way of doing it?
/**
* Search for occurrences of the specified pattern in the received list of files.
* #param filesToSearch - the list of files to search for the pattern
* #returns {Promise} - resolves with the information about the encountered matches for the pattern specified.
*/
const findPattern = (filesToSearch) => {
console.log(filesToSearch);
return new Promise((resolve, reject) => {
var results = [ ];
// iterate over the files
for(let theFile of filesToSearch){
let dataStream = fs.createReadStream(theFile);
let lineReader = readLine.createInterface({
input: dataStream
});
let count = 0; // this would do the trick but I'd rather use standard approach if there's one
// iterates over each line of the current file
lineReader.on('line',(line) => {
count++;
if(line.indexOf(searchPattern) > 0) {
let currLine = line.toString();
currLine = currLine.replace(/{/g, '');//cleanup { from string if present
results.push({
fileName: theFile,
value: currLine,
lineNumber: line.lineNumber //HERE: this results undefined
//lineNumber: count // this does the trick but I'd rather use standard approach.
});
}
});
// resolve the promise once the file scan is finished.
lineReader.on('close', () => resolve(results));
}
});
};
Unfortunately there isn't a way to find the line number using the readline node module, however, using ES6 it isn't difficult to roll your own counter in one line of code.
const line_counter = ((i = 0) => () => ++i)();
When we create the callback function we simply default the second parameter to the line_counter function, and we can act as though the line and the line number are both being passed when a line event occurs.
rl.on("line", (line, lineno = line_counter()) => {
console.log(lineno); //1...2...3...10...100...etc
});
Simply, using a variable increment together with foo(data, ++i) it will always pass the number of the new line to the function.
let i = 0
const stream = fs.createReadStream(yourFileName)
stream.pipe().on("data", (data) => foo(data, ++i))
const foo = (data, line) => {
consle.log("Data: ", data)
consle.log("Line number:", line)
}
You need to include the lineno param if you are using node linereader
lineReader.on('line', function (lineno, line) {
if(line.indexOf(searchPattern) > 0) {
let currLine = line.toString();
currLine = currLine.replace(/{/g, '');//cleanup { from string if present
results.push({
fileName: theFile,
value: currLine,
lineNumber: lineno
});
}
});

Asynchronous for loop: how do I make the for loop wait for function to finish?

I'm racking my brain trying to figure out how to sequence this / place callbacks to get what I need.
I have a loop that checks for the existence of files. I'd like it to callback when it's done, but the for loop and the callback finish before the "fs.open" finishes... typical asynchronous problem.
I am using node v0.10.29, express v3.14.0, and am looking at using the "async" library, but again, just can't figure out the logic that I need...
Here is what I have:
Input
function checkForAllFiles(callback)
{
var requiredFiles = [];
requiredFiles[requiredFiles.length] = "../Client/database/one.js";
requiredFiles[requiredFiles.length] = "../Client/database/two.dat";
requiredFiles[requiredFiles.length] = "../Client/database/three.xml";
requiredFiles[requiredFiles.length] = "../Client/database/four.dat";
requiredFiles[requiredFiles.length] = "../Client/database/five.dat";
requiredFiles[requiredFiles.length] = "../Client/database/six.xml";
var missingFiles = [];
for(var r=0; r<requiredFiles.length; r++)
{
fs.open(requiredFiles[r], 'r', function(err, fd){
if(err)
{
missingFiles[missingFiles.length] = err.path;
console.log("found missing file = ", err.path);
}
});
console.log("r = ", r);
}
console.log("sending callback: ", missingFiles);
callback(missingFiles);
}
Output
0
1
2
3
4
5
sending callback: []
found missing file: ../Client/database/three.xml
Desired Output
0
1
found missing file: ../Client/database/three.xml
2
3
4
5
sending callback: ["../Client/database/three.xml"]
I would use the reject method in the async module (which I see you've already found). What it will do is return an array in its callback that contains any elements that don't match a specified check function. For the check function, I'd recommend just using fs.exists instead of watching for an error on fs.open.
Using those functions you can actually reduce the whole check to one line. Something like this:
function checkForAllFiles(callback)
{
var requiredFiles = [];
requiredFiles[requiredFiles.length] = "../Client/database/one.js";
requiredFiles[requiredFiles.length] = "../Client/database/two.dat";
requiredFiles[requiredFiles.length] = "../Client/database/three.xml";
requiredFiles[requiredFiles.length] = "../Client/database/four.dat";
requiredFiles[requiredFiles.length] = "../Client/database/five.dat";
requiredFiles[requiredFiles.length] = "../Client/database/six.xml";
async.reject(requiredFiles, fs.exists, callback);
}
callback will get called with an array that contains just the files that don't exist.
Use the async library and the eachSeries method. Example:
async.eachSeries(array,
function(element, next) {
// do something with element
next();
}
);
It will sequentially go through the array and process each element. Calling next goes to the next element. Series makes sure it does it in the order of the array, otherwise the order of going through the array is not guaranteed. If you have other async functions called within it, just pass the next function around and call it when done with all the needed functions and the next array element will be processed.
Maybe something like this:
var missingFiles = []
async.eachSeries(requiredFiles, function(file, nextFile){
fs.open(file, 'r', function(err, fd){
if(err)
{
missingFiles[missingFiles.length] = err.path;
console.log("found missing file = ", err.path);
}
nextFile();
});
console.log("r = ", file);
});
console.log("sending callback: ", missingFiles);
callback(missingFiles);

Categories