In a Chrome extension im using the HTML5 FileSytem API.
Im retrieving a list of records in a folder.
var entries = [];
var metadata = [];
listFiles(folder);
function listFiles(fs) {
var dirReader = fs.createReader();
entries = [];
// Call the reader.readEntries() until no more results are returned.
var readEntries = function () {
dirReader.readEntries(function (results) {
if (!results.length) {
addMeta(entries);
} else {
console.log(results);
entries = entries.concat(toArray(results));
readEntries();
}
});
};
readEntries(); // Start reading dirs.
}
The FileEntry object does not contain metadata, I need the last modified date. I'm able to retrieve a object of metadata
function addMeta(entries) {
for (var i = 0; i < entries.length; i++) {
entries[i].getMetadata(function (metadata) {
console.log(entries);
console.log(metadata);
});
}
}
Problem is that i get the metadata in a callback.
How can i join the two object making sure the right match is made?
The simplified result im looking for is:
[
["fileName1", "modifyDate1"],
["fileName2", "modifyDate2"],
]
To get lastModifiedDate, you don't need to use getMetadata, as per the description of this question, just use entry.file.lastModifiedDate, though maybe file() is another callback.
To "join the two object making sure the right match is made", because of Closures, you could use the following code to get the right results. (Assuming the data structure is [[entry, metadata]] as you mentioned)
var ans = [];
function addMeta(entries) {
for (var i = 0; i < entries.length; i++) {
(function(entry) {
entry.getMetadata(function (metadata) {
ans.push([entry, metadata]);
});
}(entries[i]);
}
}
If what you want is to wait for all asynchronous callback ends, see this answer for more details, basically you could adjust your code and use Promise, or use other implementations like setInterval or use a variable to calculate how many callbacks remain.
I suggest to have a look on Promise-based bro-fs implementation of HTML Filesystem API.
To read all entries with metadata you can do something like this:
fs.readdir('dir')
.then(entries => {
const tasks = entries.map(entry => fs.stat(entry.fullPath))
return Promise.all(tasks);
})
.then(results => console.log(results))
Related
I have a for loop like so:
var currentLargest
for (var site in sites) {
// Each site is a website (www.mysite1.com, www.mysite2.com, etc.)
$.getJSON(site).then(function(data) {
if (data > currentLargest) {
currentLargest = data
}
});
}
// Here is where I want to use "currentLargest"
It checks multiple different websites to get the largest value. However, I can't extract the data I need after the for loop. I can only interact with currentLargest from within the promise, but that data is only relevant for ONE of the sites (it hasn't finished looping through all them until after the for loop).
How can I access the currentLargest value after the for loop containing the getJSON promises?
// Your code:
/*
var currentLargest
for (var site in sites) {
// Each site is a website (www.mysite1.com, www.mysite2.com, etc.)
$.getJSON(site).then(function(data) {
if (data > currentLargest) {
currentLargest = data
}
});
}
*/
// Updated:
const sitePromisesArr = sites.map((site) => $.getJSON(site));
$.when(sitePromisesArr)
.then((valArr, index) => {
// The "valArr" here will be an Array with the responses
const largest = valArr.reduce((current, next) => next > current ? next : current, 0);
// This will be the largest value.
console.log(largest);
// This will be the site that contains the largest value.
console.log(sitePromisesArr[index]);
});
<script src="https://ajax.googleapis.com/ajax/libs/jquery/2.1.1/jquery.min.js"></script>
You need to use a async loop and then execute a callback once all the async getJSON requests are finished. You could use http://caolan.github.io/async/docs.html#each to help with this.
Specifically, given a list of data, I want to loop over that list and do a fetch for each element of that data before I combine it all afterward. The thing is, as written, the code iterates through the entire list immediately, starting all the operations at once. Then, even though the fetch operations are still running, the then call I have after all that runs, before the data could've been processed.
I read something about putting all the Promises in an array, then passing that array to a Promise.all() call, followed by a then that will have access to all that processed data as intended, but I'm not sure how exactly to go about doing it in this case, since I have nested Promises in this for loop.
for(var i in repoData) {
var repoName = repoData[i].name;
var repoUrl = repoData[i].url;
(function(name, url) {
Promise.all([fetch(`https://api.github.com/repos/${username}/${repoData[i].name}/commits`),
fetch(`https://api.github.com/repos/${username}/${repoData[i].name}/pulls`)])
.then(function(results) {
Promise.all([results[0].json(), results[1].json()])
.then(function(json) {
//console.log(json[0]);
var commits = json[0];
var pulls = json[1];
var repo = {};
repo.name = name;
repo.url = url;
repo.commitCount = commits.length;
repo.pullRequestCount = pulls.length;
console.log(repo);
user.repositories.push(repo);
});
});
})(repoName, repoUrl);
}
}).then(function() {
var payload = new Object();
payload.user = user;
//console.log(payload);
//console.log(repoData[0]);
res.send(payload);
});
Generally when you need to run asynchronous operations for all of the items in an array, the answer is to use Promise.all(arr.map(...)) and this case appears to be no exception.
Also remember that you need to return values in your then callbacks in order to pass values on to the next then (or to the Promise.all aggregating everything).
When faced with a complex situation, it helps to break it down into smaller pieces. In this case, you can isolate the code to query data for a single repo into its own function. Once you've done that, the code to query data for all of them boils down to:
Promise.all(repoData.map(function (repoItem) {
return getDataForRepo(username, repoItem);
}))
Please try the following:
// function to query details for a single repo
function getDataForRepo(username, repoInfo) {
return Promise
.all([
fetch(`https://api.github.com/repos/${username}/${repoInfo.name}/commits`),
fetch(`https://api.github.com/repos/${username}/${repoInfo.name}/pulls`)
])
.then(function (results) {
return Promise.all([results[0].json(), results[1].json()])
})
.then(function (json) {
var commits = json[0];
var pulls = json[1];
var repo = {
name: repoInfo.name,
url: repoInfo.url,
commitCount: commits.length,
pullRequestCount: pulls.length
};
console.log(repo);
return repo;
});
}
Promise.all(repoData.map(function (repoItem) {
return getDataForRepo(username, repoItem);
})).then(function (retrievedRepoData) {
console.log(retrievedRepoData);
var payload = new Object();
payload.user = user;
//console.log(payload);
//console.log(repoData[0]);
res.send(payload);
});
I'm a bit confused about the use of LocalForage.
I just want to save and retrieve an image from LocalForage, here is how I do.
function preload() {
localforage.setDriver(localforage.LOCALSTORAGE).then(function() {
lcl_images[0] = new Object();
lcl_images[0].key = 'lclstorage_1';
lcl_images[0].value = 'http://www.superga.com/tcnimg/S/02/S009TE0/XBS009TE0___949______.jpg';
lcl_images[1] = new Object();
lcl_images[1].key = 'lclstorage_2';
lcl_images[1].value = 'https://mir-s3-cdn-cf.behance.net/project_modules/max_1200/2150fb35419617.56f6327b44e47.gif';
for (var i = lcl_images.length - 1; i >= 0; i--) {
var valore = lcl_images[i].value;
localforage.setItem(lcl_images[i].key, lcl_images[i].value, function() {
console.log('Saved: ' + valore);
});
}
});
}
function use_preloaded_image() {
for (var i = lcl_images.length - 1; i >= 0; i--) {
var key = lcl_images[i].key;
localforage.getItem(lcl_images[i].key, function(err, readValue) {
console.log('Read: ', readValue);
document.getElementById(key).src = readValue;
});
}
}
The point is, I don't know if it is the right method to use images: I store them as strings, localForage should use parse and stringify.
This example is working fine (when I go offline, I refresh the page, images in localstorage are vsible), but I see someone using Image objects or Blobs (I neve heard about that, and an instant search hasn't told me much about that).
other do an XHR request to download the image, then they save it.
I can think about the use of XHR request for the need of checking whether the image has been downloaded. Is this right?
Secondly: someone says that storing with blobs is better. Why?
Please help me, thanks.
EDIT (Multiple Promises)
When I retrieve images I must bind each of them to an <img>. But I don't know how to do it, with promises. Because when using Promise.all() I can't use a for loop (so no index to bind images src with images id).
var promises = [];
for (var i = lcl_images.length - 1; i >= 0; i--) {
promises.push(localforage.getItem(lcl_images[i].key));
}
Promise
.all(promises)
.then(function(value) {
console.log(value);
})
.catch(function(error) {
console.log(error);
});
for (var i = lcl_images.length - 1; i >= 0; i--) {
document.getElementById(lcl_images[i].key).src = value;
}
Promise
.all(promises)
.then(function(value) {
console.log(value);
document.getElementById(lcl_images[i].key).src = value;
// I can't get the index i, no way to set <img> src for id attribute
})
.catch(function(error) {
console.log(error);
});
FIX Promise.all().then()
Promise
.all(promises)
.then(function(value) {
console.log(value);
for (var i = lcl_images.length - 1; i >= 0; i--) {
document.getElementById(lcl_images[i].key).src = value[i];
};
})
Blobs are better because IndexedDB can optimize their storage on disk. In essence it stores them directly as file handles. (In cases where Blobs aren't supported or IDB isn't supported, LocalForage will convert to base64.)
To download an image as a Blob, the easiest way is to use a fetch polyfill or the fetch() API and use response.blob() (see MDN docs). Another way is to use XMLHttpRequest and do xhr.responseType = 'blob. Example:
fetch('http://example.com/myimage.gif').then(function (response) {
return response.blob();
}).then(function (blob) {
console.log("yay I have a blob", blob);
}).catch(console.log.bind(console))
Also another thing you should know is that it looks like you are making a classic Promise mistake in your code, which is to do multiple Promises within a forEach() loop. You probably would rather do Promise.all(). Please read We have a problem with promises for more details.
For working with Blobs, you can check out blob-util, which has a tutorial, or you can read the PouchDB guide to attachments. (PouchDB uses Blobs similarly to LocalForage, so the advice is similar.)
It is not a question about how to do post multipart form in nodejs.
But how to do such logic(first do a n times loops(async) then one time function(async)) in callback way?
for example, client will post multipart form with normal form fields:
req.files[n]: contains n images, needs to save to server's local filesystem
req.body: contains post.title, post.content, post.user
In normal way(php, java...), sample code would be
array savedPath = [];
// save images to local filesystem
foreach image in files
savedPath.push(saveImageToLocal(image))
// append saved images path to post
var post = req.body;
post.images = savedPath;
Posts.insert(post)
But in nodejs, callback way, how can i write it?
var savedPath = [];
saveImageToLocal(files[0], function(path) {
savedPath.push(path);
saveImageToLocal(files[1], function(path) {
savedPath.push(path);
//.... its n elements, how can I write it??
var post = req.body;
post.images = savedPath;
Posts.insert(postfunction(err, result) {
res.send(err, result)
});
});
});
Or
var savedPath = [];
for(i=0;i<n;i++) {
savesaveImageToLocalTo(files[i], function(path) {
savedPath.push(path);
});
}
waitSaveToFinished() ??
var post = req.body;
post.images = savedPath;
Posts.insert(postfunction(err, result) {
res.send(err, result)
});
How to do these kind of things in the way of nodejs/callback?
The best way to coordinate multiple asynchronous operation is to use promises. So, if this were my code, I would change or wrap saveImageToLocalTo() and Posts.insert() to return promises and then use promise features for coordinating them. If you're going to be writing much node.js code, I'd suggest you immediately invest in learning how promises work and start using them for all async behavior.
To solve your issue without promises, you'd have to implement a counter and keep track of when all the async operations are done:
var savedPath = [];
var doneCnt = 0;
for(i=0;i<n;i++) {
savesaveImageToLocalTo(files[i], function(path) {
++doneCnt;
savedPath.push(path);
// if all the requests have finished now, then do the next steps
if (doneCnt === n) {
var post = req.body;
post.images = savedPath;
Posts.insert(postfunction(err, result) {
res.send(err, result)
});
}
});
}
This code looks like it missing error handling since most async operations have a possibility of errors and can return an error code.
I'm running a script on an apache webserver on a linux box. Based on the parameter I want to change the name of variable(or set it)
The idea is that humDev(lines 11 and 14) is named humDev21 for example. Where devId is the number 21 in this example.
My script looks like this:
function getHumDev(devId){
$.ajax({
async: false,
url: "/url" + devId,
success: function(result) {
var array = result["Device_Num_" + devId].states;
function objectFindByKey(array, key, value) {
for (var i = 0; i < array.length; i++) {
if (array[i][key] === value) {
humDev = array[i].value;
}
}
return humDev;
};
objectFindByKey(array, 'service', 'some');
}
});
};
If Im looking in the wrong direction, please do let me know. Maybe its bad practice what Im trying. The reason I want to have the object a unique name is because this function is called several times by another function, based on the content of an array. But when I have the humDev object named without the number suffix to make it unique, the content of the object is getting mixed up between the different calls.
I may be off base but I am making some assumptions based on what I understand of what you are trying to do.
First, you need to understand how to do file I/O in node.js. So lets start there:
var pathToFile, //set with file path string
fs = require('fs'), //require the file i/o module API
bunchOfHumDevs = {},
fileContents; //we'll cache those here for repeated use
fs.readFile(pathToFile, function(err, result) {
if (err) {
throw new Error(); //or however you want to handle errors
} else {
fileContents = JSON.parse(result); //assumes data stored as JSON
}
});
function getHumDev(devId) {
//first make sure we have fileContents, if not try again in 500ms
if (!fileContents) {
setTimeout(function() {
getHumDev(devId);
}, 500);
} else {
var array = fileContents["Device_Num_" + devId].states,
i = array.length,
//if 'service' and 'some' are variable, make them params of
//getHumDev()
while (i--) {
if (array[i]['service'] === 'some') {
//store uniquely named humDev entry
bunchOfHumDevs['humDev' + devId.toString()] = array[i].value;
break; //exit loop once a match is found
}
}
}
return null;
}
getHumDev(21);
assuming a match is found for the devId 21, bunchOfHumdevs will now have a property 'humDev21' that is the object (value?) in question. Also, the fileContents are now cached in the program so you don't have to reopen it every time you call the function.