node.js warning when calling to function - javascript

I've the following code which works
if (fs.existsSync(dirName)) {
__writeFile();
return;
}
function __writeFile() {
writeIntoFile(readStream, destFileName, reject);
numOfFiles++;
}
But after execute it im getting warning:
(node:3547) [DEP0013] DeprecationWarning: Calling an asynchronous
function without callback is deprecated.
Any idea how to avoid this ?
update this is all the code
yauzl.open(filePath, (err, zipFile) => {
if (err) {
__rejectAndLog(err);
return;
}
zipFile.on('entry', __processEntry.bind(null, zipFile))
.once('error', __rejectAndLog)
.once('close', () => {
resolve();
});
});
function __processEntry(zipFile, entry) {
if (/\/$/.test(entry.fileName)) {
return;
}
zipFile.openReadStream(entry, (err, readStream) => {
if (err) {
__rejectAndLog(err);
return;
}
//Path to drop the files
let destFileName = '/' + entry.fileName;
let dirName = path.join(res);
if (fs.existsSync(dirName)) {
__writeFile();
return;
}
mkdirp(dirName, (err) => {
if (err) {
__rejectAndLog(err);
} else {
__writeFile();
}
});
function __writeFile() {
writeIntoFile(readStream, destFileName, reject);
numOfFiles++;
}
});
}
as requested this is the content of write into file
function writeIntoFile(readStream, filePath, onError) {
if (fs.existsSync(filePath)) {
fs.chmod(filePath, '777');
}
var ws = fs.createWriteStream(filePath);
ws.on('error', function (error) {
onError(error);
});
readStream.pipe(ws);
};
update2
I try to change it according the answer below and its not working (and where should I put the mode 777)
function writeIntoFile(readStream, filePath, onError) {
if (fs.existsSync(filePath)) {
fs.chmod(filePath, 0o777, function (err) {
var ws = fs.createWriteStream(filePath);
ws.on('error', function (error) {
onError(error);
});
readStream.pipe(ws);
});
}
};

Related

NodeJS + mongoose await for subquery

I have two models: ModelA and ModelB, I am trying to copy a tags = Array() column from ModelB into ModelA, like this:
function run() {
ModelA.find({}).limit(500).cursor()
.on('data', function(doc) {
let refID = doc.ref_id;
ModelB.findOne({_id: refID}).exec(function(err, modelb) {
if (err) {
console.log(`[Error]: Getting modelb ${refID}`);
}
if (modelb) {
if (modelb.tags.length > 0) {
doc.tags = modelb.tags;
}
}
});
doc.processed = true;
doc.save(function(err, hackAlert) {
if (err) {
console.log('[Error]: Saving ModelA: ' + err);
}
console.log(`Saved: ${doc._id}`);
});
})
.on('error', function(err){
console.log('[Error]');
})
.on('end', function(){
console.log('Done');
});
}
The script obviously saves all ModelA instances with processed = true but given the asynchronous nature of Node, ModelA documents end up without tags.
I'm new to this if you can't tell. I wanted to know what is the best, modern way to "await" for the ModelB.findOne() query to finish before saving ModelA ?
You can simply move the code to be called on the callback of your first query:
function run() {
ModelA.find({}).limit(500).cursor()
.on('data', function(doc) {
let refID = doc.ref_id;
ModelB.findOne({_id: refID}).exec(function(err, modelb) {
if (err) {
console.log(`[Error]: Getting modelb ${refID}`);
}
if (modelb) {
if (modelb.tags.length > 0) {
doc.tags = modelb.tags;
}
}
doc.processed = true;
doc.save(function(err, hackAlert) {
if (err) {
console.log('[Error]: Saving ModelA: ' + err);
}
console.log(`Saved: ${doc._id}`);
});
});
})
.on('error', function(err){
console.log('[Error]');
})
.on('end', function(){
console.log('Done');
});
}
For a more up-to-date approach using async/await and Promises:
async function run () {
ModelA.find({}).limit(500).cursor()
.on('data', async function (doc) {
let refID = doc.ref_id;
const modelb = await ModelB.findOne({_id: refID}).exec()
.catch(() => console.log(`[Error]: Getting modelb ${refID}`));
if (modelb) {
if (modelb.tags.length > 0) {
doc.tags = modelb.tags;
}
}
doc.processed = true;
await doc.save().exec()
.catch(err => console.log('[Error]: Saving ModelA: ' + err))
console.log(`Saved: ${doc._id}`);
})
.on('error', function (err){
console.log('[Error]');
})
.on('end', function () {
console.log('Done');
});
}

Callback isn't called

I use the following code and it seems that the callback (Which start with Im HERE) is not called, any idea why?
console.log("im starting");
process.start(function() {
//this line doesnt called
console.log("im HERE");
server.listen(app.get('port'), function(err) {
if (err) {
console.error(err);
} else {
console.log(' listen to: ' + app.get('port'));
}
});
});
the method start are called and finish ...any idea what it can be ?
before ive added the process.start the code look like following:
And this works OK, now I need to add this process.start and when it finish to do the server.listen
module.exports = (function() {
server.listen(app.get('port'), function(err) {
if (err) {
console.error(err);
} else {
console.log('listen ' + app.get('port'));
}
});
}());
UPDATE
This is the code of process start
exports.start = function () {
Validator.validateJson(function (err) {
console.log(err);
process.exit(1);
});
plugin.parse().then(function (conf) {
require.cache.pe.configObj = conf;
}, function (err) {
console.log(err);
});
envHandler.eventE.on('AppP', function () {
console.log('User port ' + require.cache.per);
});
var run= function () {
return Promise.all([
childPro.create(path.join(value)),
childPro.findAndUpdateUser()
]).spread(function (cmd,updatedAppEnv) {
return Promise.all([childProc.executeChildProcess('exec', cmd, updatedAppEnv), Promise.delay(50).then(function (results) {
return inter.ProcessRun(val);
})]);
})
}();
}
I use promise lib like bluebird if its matter in this case
It's a bit unclear where you want to call the callback. In short, change the start function to accept a callback parameter and call callback() when you are done (or pass it at end as argument to then).
exports.start = function (callback) {
Validator.validateJson(function (err) {
console.log(err);
process.exit(1);
});
plugin.parse().then(function (configObj) {
if (typeof require.cache.persist === 'undefined') {
require.cache.persist = {};
}
require.cache.persist.configObj = configObj;
}, function (err) {
console.log(err);
});
envHandler.eventEmitterIns.on('AppPortDef', function () {
console.log('User port ' + require.cache.persist.port);
});
var run= function () {
return Promise.all([
childPro.create(path.join(value)),
childPro.findAndUpdateUser()
]).spread(function (cmd,updatedAppEnv) {
return Promise.all([childProc.executeChildProcess('exec', cmd, updatedAppEnv), Promise.delay(50).then(function (results) {
return inter.ProcessRun(val);
})]);
})
}();
run.then(callback);
}

Error while using async.parallel

I am getting binding error while using async in node.js. The code in question:
var async = require('async');
var fs = require('fs');
var path = require('path');
function ignoreWhiteSpaceJudge(outDesired, outGenerated){
var contentOutDesired = "";
var contentOutGenerated = "";
async.parallel([
function(outDesired, callback) {
console.log(outDesired);
fs.readFile(outDesired, 'utf8',function(error, data) {
if (error) {
return callback(error);
} else {
contentOutDesired = data;
return callback();
}
});
},
function(outGenerated, callback) {
fs.readFile(outGenerated, 'utf8', function(error, data) {
if (error) {
return callback(error);
} else {
ontentOutGenerated = data;
return callback();
}
});
}],
function(error){
if(error){
console.log(error);
}
else{
console.log(contentOutDesired);
console.log(ontentOutGenerated);
}
});
}
var pathToOutDesired = path.normalize('/home/repos/gabbar/testcases/outputs/output_1_1.out');
var pathToOutGenerated = path.normalize('/home/repos/gabbar/testcases/outputs/output_1_2.out');
ignoreWhiteSpaceJudge(pathToOutDesired, pathToOutGenerated);
The error I am getting looks like this:
[Function]
fs.js:423
binding.open(pathModule._makeLong(path),
^
TypeError: path must be a string
at Object.fs.open (fs.js:423:11)
at Object.fs.readFile (fs.js:206:6)
at async.parallel.fs.readFile.ontentOutGenerated (/home/repos/gabbar/validation/ignoreWhiteSpaceJudge.js:17:18)
at /home/repos/gabbar/node_modules/async/lib/async.js:570:21
at /home/repos/gabbar/node_modules/async/lib/async.js:249:17
at /home/repos/gabbar/node_modules/async/lib/async.js:125:13
at Array.forEach (native)
at _each (/home/repos/gabbar/node_modules/async/lib/async.js:46:24)
at async.each (/home/repos/gabbar/node_modules/async/lib/async.js:124:9)
at _asyncMap (/home/repos/gabbar/node_modules/async/lib/async.js:248:13)
I am relatively new to node.js and trying to use async module for the first time. Could somebody help me in this regard?
You are overwriting your paths with the callback function of parallel.
Just remove the first parameter from your functions which is the callback and not your data:
function(callback) {
console.log(outDesired);
fs.readFile(outDesired, 'utf8',function(error, data) {
if (error) {
return callback(error);
} else {
contentOutDesired = data;
return callback();
}
});
},
function(callback) {
fs.readFile(outGenerated, 'utf8', function(error, data) {
if (error) {
return callback(error);
} else {
ontentOutGenerated = data;
return callback();
}
});
}

Node.js multiple query transactions

I'm using the following function for handling multiple query transactions:
db.js
function waterfall (tasks, callback) {
pg.connect(conString, function (err, client, done) {
if (err) {
return callback(err);
}
//client.query(begin_transaction, function (err) {
client.query('BEGIN', function (err) {
if (err) {
done();
return callback(err);
}
var wrapIterator = function (iterator) {
return function (err) {
if (err) {
//client.query(rollback_transaction, function () {
client.query('ROLLBACK', function () {
done();
callback(err);
});
}
else {
var args = Array.prototype.slice.call(arguments, 1);
var next = iterator.next();
if (next) {
args.unshift(client);
args.push(wrapIterator(next));
}
else {
args.unshift(client);
args.push(function (err, results) {
var args = Array.prototype.slice.call(arguments, 0);
if (err) {
//client.query(rollback_transaction, function () {
client.query('ROLLBACK', function () {
done();
callback(err);
});
}
else {
//client.query(commit_transaction, function () {
client.query('COMMIT', function () {
done();
callback.apply(null, args);
})
}
})
}
async.setImmediate(function () {
iterator.apply(null, args);
});
}
};
};
wrapIterator(async.iterator(tasks))();
});
});
}
(referred from http://baudehlo.com/2014/04/28/node-js-multiple-query-transactions/)
What's wrong with the following function:
plot.js
db.waterfall([
function(client,cb){
client.query("INSERT INTO mydb.plotsold" +
"(plot_id, agent_id, plot_price, plot_date) VALUES " +
"($1,$2,$3,$4) RETURNING id", soldInfo.PlotId, soldInfo.agentId,
soldInfo.soldPrice, soldInfo.newDate,cb);
},
function(client,results,cb){
client.query("update mydb.listing " +
"set status =2 where id = $1 RETURNING id",soldInfo.listingId,cb);
}
],cb);

Uploading to dropbox via node js fails

I have the following code to read from a stream and upload to Dropbox. But I'm getting a
Uncaught TypeError: Cannot call method 'toString' of undefined
at Function.Dropbox.Util.Xhr.Xhr.urlEncodeValue (node_modules\dropbox\lib\dropbox.js:3695:40)
at Function.Dropbox.Util.Xhr.Xhr.urlEncode (node_modules\dropbox\lib\dropbox.js:3689:59)
at Xhr.Dropbox.Util.Xhr.Xhr.paramsToUrl (node_modules\dropbox\lib\dropbox.js:3570:40)
at Xhr.Dropbox.Util.Xhr.Xhr.prepare (node_modules\dropbox\lib\dropbox.js:3598:14)
at Client.Dropbox.Client.Client._dispatchXhr (node_modules\dropbox\lib\dropbox.js:2137:11)
at Client.Dropbox.Client.Client.resumableUploadStep (node_modules\dropbox\lib\dropbox.js:1454:19)
.....
error when I try to run the code.If I give a 'false' as cursor, the error doesn't occur in step function, but still it occures in finish function. Can anyone help me on this?
stream.on('data', function (data) {
client.resumableUploadStep(data, function (error, cursor) {
if (error) {
return console.log(error);
}
})
});
stream.on('end', function () {
client.resumableUploadFinish(fileName, function (error, stats) {
if (error) {
return callback(error);
}
return callback(null, stats);
});
});
I used the following code and now it works.
var pcursor = null;
var eventObject = new EventEmitter();
var counter = 0;
stream.on('data', function (data) {
counter++;
client.resumableUploadStep(data, pcursor, function (error, cursor) {
if (error) {
return callback(error);
}
counter--;
pcursor = cursor;
eventObject.emit('event');
});
});
stream.on('end', function () {
eventObject.on('event', function () {
if (counter == 0) {
client.resumableUploadFinish(fileName, pcursor, function (error, stats) {
if (error) {
return callback(error);
}
return callback(null, stats);
});
counter = -1;
}
});
eventObject.emit('event');
});
It looks like you're missing the cursor parameter to resumableUploadFinish. Also, you should be passing in a cursor to resumableUploadStep after the first call too.
I think the code you want is something like this (completely untested):
var cursor = null;
stream.on('data', function (data) {
client.resumableUploadStep(data, cursor, function (error, new_cursor) {
cursor = new_cursor;
});
});
stream.on('end', function () {
client.resumableUploadFinish(fileName, cursor, function (error, stats) {
return callback(null, stats);
});
});

Categories