node.js large number of requests throwing error - javascript

I got some troubles with my image downloader script, so I have array with images names (size around 5000 elements), and I do for cycle in its array, and for every iteration download image with requests module.
All works fine but only then I have arrays with size not bigger then 500+- elements.
If I run script with 5000+ elements, I see many errors spamed from request module(err or underfined responce object) , and finnaly all application could fails with EMPTY FILE ERROR. I think that there some async troubles cause NODE.JS didnt handles so many operation at time.
Maybe I can solve it by splitting my large 5000 size array by 300 items, and dont iterate over(and dont call fetchImage()) on the next chunck before previous chunk. Or maybe there exists more nicer way to solving my problem.?
products.map(function (product) {
fetchImage(product.imageUrl,'./static/' + product.filename + '_big.', 0, 0);
return;
});
function fetchImage(url, localPath, index, iteration) {
var extensions = ['jpg', 'png', 'jpeg', 'bmp' , ''];
if (iteration > 2 || index === extensions.length) { // try another extensions
iteration++;
if(iteration < 3) {
setTimeout(function(){
fetchImage(url, localPath, 0, iteration);
}, 3000);
}else{
console.log('Fetching ' + url + ' failed or no image exists ');
return;
}
return;
}
var fileExtension;
if(extensions[index] === '' ) {
fileExtension = extensions[0];
}else{
fileExtension = extensions[index];
}
request.get(url + extensions[index], function(err,response,body) {
if(err || undefined === response){ // if err try after 3 sec timeout
setTimeout(function(){
console.log('Error URL : ' + url + extensions[index]);
fetchImage(url, localPath, index, iteration);
}, 3000);
return;
}else{
if(response.statusCode === 200) {
request(url + extensions[index])
.on('error', function(err) {
console.log("ERRRRRROR " + url + extensions[index] + " " + err);
setTimeout(function(){
console.log('Error URL : ' + url + extensions[index]);
fetchImage(url, localPath, index, iteration);
}, 3000);
return;
})
.pipe(fs.createWriteStream(localPath + fileExtension ));// write image to file
console.log('Successfully downloaded file ' + localPath + fileExtension);
return;
}else {
fetchImage(url, localPath, index + 1, iteration);
}
}
});
};

Fixed using setTimeOut beetween each request
setTimeout(
function () {
fetchImage(imageUrl,'./static/' + filename + '_big.', 0, 0);
},
300 * (i + 1) // where they will each progressively wait 300 msec more each
);

Related

node.js callback value trouble

My issue is downloading images with unknown extension( it may be 'png' or 'jpg' or 'bmp' or etc...).And I have some troubles with function chekHead's returning value:
var fs = require('fs'),
request = require('request');
var processImg = function (uri,filename){
if(checkHead(uri + 'png') > 2000){
download(uri + 'png', filename + '.png', function(){
console.log(uri + 'png' + " - downloaded")
})
}else if(checkHead(uri + 'jpg') > 2000){
download(uri + 'jpg', filename + '.jpg', function(){
console.log(uri + 'jpg' + " - downloaded")
})
}else if(checkHead(uri + 'bmp') > 2000) {
download(uri + 'bmp', filename + '.bmp', function () {
console.log(uri + 'bmp' + " - downloaded")
})
}
}
var checkHead = function(uri){
var length;
request.head(uri, function(err, res, body){
if(err) return console.log("Error");
length = res.headers['content-length'];
console.log(length);
});
return length;
}
var download = function(uri, filename, callback){
request(uri).pipe(fs.createWriteStream('./static/' + filename).on('close', callback));
};
So in checkHead function return length; always returns 'underfined', but console.log returns valid number; Why?
NodeJS executes your code in an asynchronous way using callbacks. Your return could happen before (in this case it's probably always the case) the callback is completed. The variable length is at the return undefined, because it hasn't received any values.
You could use promises to chain the function or you structure your code in another way.
For promises see e.g.:
async
q
var checkHead = function(uri){
var length;
// Callback is probably invoked after the return
request.head(uri, function(err, res, body){
if(err) return console.log("Error");
length = res.headers['content-length'];
console.log(length);
});
// gets executed directly
return length;
}
You have to use a callback so that is works the way you want it:
var checkHead = function(uri,callback){
request.head(uri, function(err, res, body){
if(err) return console.log("Error");
var length = res.headers['content-length'];
console.log(length);
callback(length);
});
};
Unfortunately because of your if-else logic I see no way at the moment to use promises(jquery) instead of callbacks and nested callbacks that can lead to callback-hell that is kind of bad pattern so I say sorry for this:
checkHead(uri + 'png',function(length){
if(length > 2000){
download(uri + 'png', filename + '.png', function(){
console.log(uri + 'png' + " - downloaded")
});
}
else{
checkHead(uri + 'jpg',function(length){
if(length > 2000){
download(uri + 'jpg', filename + '.jpg', function(){
console.log(uri + 'jpg' + " - downloaded")
});
}
else{
checkHead(uri + 'bmp',function(length){
if(length > 2000){
download(uri + 'jpg', filename + '.jpg', function(){
console.log(uri + 'jpg' + " - downloaded")
});
}
});
}
});
}
});
BUT
EcamScript 6 takes care of that. This is a good article about Generator Functions. And the main idea is to use yield for asynchronous methods or functions like request.head:
var checkHead = function*(uri){
var length = yield request.head(uri);
};
And use next to get length:
checkHead.next();//{value: 123, done: true}
This is only my concept i did not prove this but Generator Functions notations work in this way :)

callbacks in browser after socket.io emit

i am developing a way to get callbacks in the browser page, following a emit to the socketio server.
server code:
/** exec php file and retrieve the stdout */
socket.on('php', function(func, uid, data) {
console.log('php'.green + ' act:' + func + ' uid:' + uid);
runner.exec("php " + php_[func].exec,
function(err, stdout, stderr) {
if (err == null) {
socket.emit('php', {uid: uid, o: stdout});
console.log('emitted');
} else {
console.log('err '.red + stdout + ' ' + stderr);
}
});
});
this code executes a php page and retrieves the output to display or parse in the browser
it receives an id to echo back to the page, so I can know what function to execute
browser code to execute callbacks:
function log(text) {
$('.out').append(text + '<br />');
}
window.callbacks = [];
function setcb(c) {
var max = 0;
$.each(window.callbacks, function(index) {max = (index > max ? index : max);});
window.callbacks[max+1] = c;
return max+1;
};
function C(k){return(document.cookie.match('(^|; )'+k+'=([^;]*)')||0)[2]}
var s = io.connect("http://"+ window.location.host +":8088");
//s.emit('debug', 'here');
s.on('getid', function(){
console.log('getid cookieis: ' + C('igr') );
s.emit('setid', C('igr'));
});
s.emit('php', 'test',
setcb(
function () {
var data = JSON.parse(this);
log('callback passed' + this);
}
), null
);
s.on('php', function(data) {
//window.callbacks[j.uid].call(j.o);
log('rec' + JSON.stringify(data));
//var jsn = JSON.parse(data);
console.log(data);
console.log(window.callbacks[data.uid]);
window.callbacks[data.uid].call(data.o);
delete window.callbacks[data.uid];
window.callbacks.splice(data.uid, 1);
console.log(window.callbacks);
});
this is working, but when I try to make two requests at the same time, it doesn't run like expected, leaving one callback to execute and in the callbacks array.
test code:
s.emit('php', 'test',
setcb(
function (data) {log('callback passed' + this);}
), null
);
s.emit('php', 'test',
setcb(
function (data) {log('callback passed' + this);}
), null
);
I want to eliminate this error, and for each event received, execute the callback I define.
This is way more simple than I've imagined
You can pass by reference the callback.
server side code:
/** exec php file and retrieve the stdout */
socket.on('php', function(func, data, callback) {
console.log('php'.green + ' act:' + func);
runner.exec("php " + php_[func].exec,
function(err, stdout, stderr) {
if (err == null) {
callback(stdout);
//socket.emit('php', {uid: uid, o: stdout});
console.log('emitted');
} else {
console.log('err '.red + stdout + ' ' + stderr);
}
});
});
client side code:
function log(text) {
$('.out').append(text + '<br />');
}
function C(k){return(document.cookie.match('(^|; )'+k+'=([^;]*)')||0)[2]}
var s = io.connect("http://"+ window.location.host +":8088");
//s.emit('debug', 'here');
s.on('getid', function(){
console.log('getid cookieis: ' + C('igr') );
s.emit('setid', C('igr'));
});
s.emit('php', 'test', null,
function(data) { log('d: ' + JSON.stringify(data)); }
);

Downloading multiple files simultaneously in blackberry using webworks and cordova 2.9

I'm working on an app which needs to download some files from a webserver and store them on the device so they can be used offline. The code I'm using works fine in Android and iOS, however I'm having difficulty downloading more than one file on blackberry 10.
From looking at the web console it seems that the success callback is executed successfully for the first file, but not for any of the subsequent files. The fail callback is not being called for the subsequent files either.
I'm using cordova 2.9, and I've included all the required webworks plugins. I have set up access to my domain in config.xml and have set the access_shared permission.
I've also edited the cordova FileTransfer.download function to call the webworks download api "blackberry.io.filetransfer.download".
Below is the JavaScript code that I've written.
// Wait for device API libraries to load
//
document.addEventListener("deviceready", onDeviceReady, false);
// device APIs are available
//
function onDeviceReady() {
//replace list with your files you wish to download
var files = ["files/file1.txt", "files/file2.txt", "files/dir1/file3.txt"];
initiateDownload(files);
//getFSRoot(files);
}
var filesToDownload = 0;
var fileList = new Array();
var failedFileList;
var numFailedFiles = 0;
var currentFileIndex;
var retryCount = 0;
var root;
function isOnline(){
var bIsOnline = false;
if (navigator.connection.type != Connection.NONE && navigator.connection.type != Connection.UNKNOWN)
{
bIsOnline = true;
}
return bIsOnline;
}
function initiateDownload(files){
alert("initiate download");
failedFileList = new Array();
filesToDownload = files.length;
blackberry.io.sandbox = false;
for (i = 0; i < files.length; i++)
{
currentFileIndex = i;
console.log("initiate download of file index " + i + " File Name: " + files[i]);
getFile(files[i]);
}
}
function getFile(filePath)
{
//TODO: Do we need to make this function recursive to serialize asynchronous downloads?
console.log("START of function getFile() for " + filePath);
window.requestFileSystem(
LocalFileSystem.PERSISTENT, 0,
function onFileSystemSuccess(fileSystem)
{
console.log("Success getting filesystem for filePath: " + filePath);
createDirs(fileSystem.root, filePath, -1);
},
function(error){
console.log("Failed to get the filesystem for filePath: " + filePath);
}
);
}
function createDirs(parentDir, filePath, index)
{
console.log("createDirs params ===> parentDir=" + parentDir.toURL() + " filePath=" + filePath + " index=" + index);
arrDirs = filePath.split("/");
if (index >= (arrDirs.length - 1))
{
createFile(arrDirs[index], parentDir, filePath);
}
else
{
dirName = "myapp";
if (index >= 0)
{
dirName = arrDirs[index];
}
//if device is Blackberry, build up a full directory path as we are trying to install outside of sandbox
var path, dirToCreate = ""
if(device.platform = "BlackBerry"){
path = "myapp/";
console.log("JHPaths ======> arrDirs = " + arrDirs + " index = " +index);
for (i = 0; i <= index; i++){
path += arrDirs[i] + "/";
console.log("path = " + path + " i = " + i + " index = " + index);
}
dirToCreate = blackberry.io.home + "/" + path;
dirToCreate = dirToCreate.substring(0, dirToCreate.length - 1);
console.log("JHPaths Trying to create " + dirToCreate);
dirName = dirToCreate;
}
parentDir.getDirectory(dirName, {create: true, exclusive: false},
function (directoryEntry) {
console.log("Got directory " + directoryEntry.fullPath);
createDirs(directoryEntry, filePath, index + 1);
},
function (error) {console.log("Failed to get directory " + dirName + " Error code : " + error.code);});
}
}
function createFile(fileName, parentDir, filePath)
{
parentDir.getFile(
fileName, {create: true, exclusive: false},
function gotFileEntry(fileEntry)
{
localPath = fileEntry.fullPath;
if (isOnline())
{
console.log("Before remove");
fileEntry.remove(
function(){
console.log("FileEntry remove() was successful");
},
function(){
console.log("FileEntry remove() was failed");
}
);
console.log("After remove");
var fileTransfer = new FileTransfer();
//replace URL with the URL code you wish to download from
var baseURL = "http://<ip-address>/WebFolder/";
var uri = encodeURI(baseURL + filePath);
fullFilePath = blackberry.io.home + "/myapp/" + filePath;
fileTransfer.download(
uri,
/*localPath,*/
fullFilePath,
function(entry) {
console.log("download complete: " + entry.fullPath);
/*if(currentFileIndex < fileList.length){
currentFileIndex++;
}*/
if (device.platform == "iOS")
{
console.log("Setting file metadata");
parentDir.getFile(
fileName, {create:false, exclusive: false},
function gotFileEntry(fileEntry)
{
fileEntry.setMetaData(
function(){console.log("Set metadata for " + fileEntry.fullPath)},
function(){console.log("Set metadata failed for " + fileEntry.fullPath)},
{"com.apple.MobileBackup": 1}
);
},
function getFileFailed(fileError)
{
console.log("Get file failed:" + fileError.code);
}
);
}
filesToDownload--;
if (filesToDownload == 0 && failedFileList.length == 0)
{
//Call to retry failed Files
}
else if(failedFileList.length > 0){
if(retryCount < 1){
retryFailedFiles();
}
else{
}
}
},
function(error) {
console.log("Cache file download error source " + error.source);
console.log("Cache file download error target " + error.target);
console.log("Cache file download error code " + error.code);
//failedFileList[numFailedFiles++] = filePath;
failedFileList[numFailedFiles++] = filePath;
filesToDownload--;
if (filesToDownload == 0 && failedFileList.length == 0)
{
}
else if(failedFileList.length > 0){
if(retryCount < 1){
retryFailedFiles();
}
else{
}
}
}
);
}
},
function getFileFailed(fileError)
{
console.log("Create file failed:" + fileError.code);
filesToDownload--;
/*if (filesToDownload == 0)
{
callbackDeferred.resolve();
}*/
}
);
}
function retryFailedFiles(){
console.log("Retrying failed files");
retryCount++;
initiateDownload(failedFileList);
}

Simple if statement not working in NodeJS

I wrote a NodeJS app that uses eBay API to get listings from eBay. I'm having an issue where certain items are passing through even though they are supposed to be filtered out with a simple if statement.
The app receives post data from the front end as JSON, executes each search and then filters items out based on certain params. Here is the offending code:
if ( items[i].listingInfo.listingType != 'Auction' ) {
//console.log( items[i].listingInfo.listingType );
if ( items[i].primaryCategory.categoryId == '9355' ) {
//console.log( items[i].primaryCategory.categoryId );
if ( price < maxPrice && price > 40 ) {
//console.log( price, maxPrice );
file = path +
items[i].itemId + '-' +
price + '-' + maxPrice + '-' +
items[i].primaryCategory.categoryId + '-' +
items[i].listingInfo.listingType;
if ( !fs.existsSync( file ) ) {
console.log(
'File ' + file + ' does not exist.',
!fs.existsSync( file ),
items[i].listingInfo.listingType,
price < maxPrice,
items[i].itemId
);
fs.writeFile( file, ' ', function(err) {
if (err) {
if (debug)
console.log('Writing ' + file + ' failed.');
}
else {
if (debug)
console.log('Writing ' + file + ' worked.');
returnData.success = true;
returnData.results[ result.itemId ] = result;
console.log( price, maxPrice, !fs.existsSync( file ) );
console.log('success');
}
})
}
else {
returnData.discard.file[ result.itemId ] = result;
delete returnData.results[ result.itemId ];
}
}
else {
returnData.discard.price[ result.itemId ] = result;
if (debug)
console.log('FAILED (price): ' + items[i].itemId + ' is ' + ( price - maxPrice ) + ' greater than maxPrice.');
}
}
else {
returnData.discard.cat[ result.itemId ] = result;
if (debug)
console.log('FAILED (categoryId): ' + items[i].itemId + ' is ' + items[i].primaryCategory.categoryId);
}
}
else {
returnData.discard.type[ result.itemId ] = result;
if (debug)
console.log('FAILED (listingType): ' + items[i].itemId + ' is a ' + items[i].listingInfo.listingType);
}
You can see this line if ( price < maxPrice && price > 40 ) should filter out any items that are greater than the maxPrice and lower than 40. However, it does not do this. I have no idea why it's happening and what is going on here. It seems very simple and straightforward but isn't. Here is the returned object where you can see that it's not working properly.
111004318957:
listingType: "FixedPrice"
maxPrice: 170
price: 349
I'm also using node clusters, so my server.js file has this:
function start(route, handle) {
if ( cluster.isMaster ) {
for ( var i = 0; i < numCPUs; i++ ) {
cluster.fork();
}
cluster.on('exit', function( worker, code, signal) {
console.log( 'worker ' + worker.process.pid + ' died' );
})
}
else {
function onRequest(request, response) {
var postData = "";
var pathname = url.parse(request.url).pathname;
request.setEncoding("utf8");
request.addListener("data", function(postDataChunk) {
postData += postDataChunk;
});
request.addListener("end", function() {
//console.log('Request ended.');
if ( postData != '' ) {
postData = JSON.parse(postData);
}
//console.log(postData.search.searches[0]);
route(handle, pathname, response, postData);
});
}
http.createServer(onRequest).listen(8888);
console.log("Server has started.");
}
}
Any help here is appreciated, thanks.
EDIT: I should have explained that the 111004318957 is the itemId that is returned by eBay. The result object looks like this:
results: {
itemId1: {
listingType: '',
maxPrice: '',
price: ''
},
itemId2: {
listingType: '',
maxPrice: '',
price: ''
}
}
EDIT 2: price is set before this code snippet. It's returned in eBay's response and it's location is dependent on items[i].listingInfo.listingType, so there's a simple if/else to set that.
if ( items[i].listingInfo.listingType == 'AuctionWithBIN' ) {
price = parseInt( items[i].listingInfo.buyItNowPrice.USD );
}
else {
price = parseInt( items[i].sellingStatus.currentPrice.USD );
}
JSON returns listingType, maxPrice, price.
Try if (items[i].price < maxPrice && items[i].price > 40)
The author will almost certainly not be able to contribute anything to this question, to clarify if my statement is true or not, as it was asked six years ago.
However, it is fairly certain that the problem has to do with the following part of the code:
fs.writeFile( file, ' ', function(err) {
if (err) {
if (debug)
console.log('Writing ' + file + ' failed.');
}
else {
if (debug)
console.log('Writing ' + file + ' worked.');
returnData.success = true;
returnData.results[ result.itemId ] = result;
console.log( price, maxPrice, !fs.existsSync( file ) );
console.log('success');
}
})
fs.writeFile is async, and if the OP is looping over a list of results, then the result in returnData.results[ result.itemId ] = result will always refer to the last element that loop, no matter if that element matches the condition if ( price < maxPrice && price > 40 ) { or not.

How to avoid timeout while reading rows from a query (Phonegap + Javascript)

I am trying to insert around 58000 rows of a query inside a string. But after the row around 8000 I get a timeout error.
I've already tried to use SetTimeout funcions but it was of no use.
Check the code that I am working on:
function onQuerySuccess(tx, results) {
console.log("Entering onQuerySuccess");
if(results.rows) {
console.log("Rows: " + results.rows.length);
var len = results.rows.length;
if(len > 0) {
store_html(results, 0);
console.log("Finished Reading Rows: " + len);
saveNotes();
console.log("Finished Saving Notes");
} else {
//This should never happen
console.log("No rows.");
}
} else {
alert("No records match selection criteria.");
}
console.log("Leaving openView");
function store_html(results, rows_complete){
rows_complete=store_html_input(results, rows_complete);
console.log("Returning row:" + rows_complete);
if (rows_complete<results.rows.length)
{
setTimeout(store_html(results, rows_complete), 50);
}
}
function store_html_input(results, rows_complete){
for(var i = rows_complete; i < rows_complete+100; i++) {
gpsTextFile = gpsTextFile + results.rows.item(i).section + ' ' + results.rows.item(i).timestamp + ' ' + results.rows.item(i).latitude + ' ' +
results.rows.item(i).longitude + ' ' + results.rows.item(i).acx + ' ' + results.rows.item(i).acy + ' ' +
results.rows.item(i).acz + ' ' + results.rows.item(i).speed;
gpsTextFile = gpsTextFile + "\n\r";
}
return i;
}
So.. I get that "Javascript execution exceeded timeout".
Thank you for any of your help!
Best Regards.
You need to change your setTimeout() to NOT execute the function immediately. Change from this:
setTimeout(store_html(results, rows_complete), 50);
to this:
setTimeout(function() {store_html(results, rows_complete)}, 50);
As you had it before, it was immediately executing store_html(results, rows_complete) and passing the return value from that to `setTimeout() which was not delaying anything. This is a common mistake (2nd one of these problems I've answered today).

Categories