Promisfy function with events - javascript

This is a promisified spawn function:
async function aspawn(cmd, args){
return new Promise((resolve, reject) => {
const proc = spawn(cmd, args);
proc.stderr.on('data', data => {
console.error('err', data.toString());
});
proc.stdout.on('data', data => {
console.error('stdout', data.toString());
});
proc.on('close', code => {
console.error('closed with code', code);
resolve();
});
});
}
I was wondering if it's possible to make it less indented

Using async iterator and once event emitter feature you could write them like this:
const { spawn } = require('child_process')
const { once } = require('events')
aspawn1('cat', ['README.md'])
.then(() => aspawn1('cat', ['FOO.md'])) // error stream
.then(() => aspawn2('cat', ['README.md']))
async function aspawn1 (cmd, args) {
try {
const proc = spawn(cmd, args)
// in any case you can add events to `proc`
// consume the stream
for await (const chunk of proc.stdout) {
console.log('>>> ' + chunk.length)
}
for await (const chunk of proc.stderr) {
console.log('err >>> ' + chunk.length)
}
// the stream is ended and the spawn aswell
} catch (err) {
// if you need to retrun always a positive promise
console.log('error happened', err)
}
}
// Since node: v11.13.0, v10.16.0 you may write that function like this to have a strict "fire and forget" spawn:
function aspawn2 (cmd, args) {
return once(spawn(cmd, args), 'close')
}

Related

nodejs streams promise doesn't reject on error

I have a code that fetches CSV files from an SFTP and parses them, The following function doesn't reject when there is an error (permission denied) when opening the file
const getCSV = (fileName) => {
const results = []
return new Promise((resolve, reject) => {
if (!fileName) {
resolve(results)
}
sftp
.createReadStream(`${directoryToFetch}/${fileName}`)
.on('error', (e) => {
console.error(`Failed parsing CSV ${e}, ${directoryToFetch}/${fileName}`)
return reject(new Error(`Failed parsing CSV ${e}: ${directoryToFetch}/${fileName}`))
})
.pipe(csv({ skipLines: 1, separator: '\t' }))
.on('data', (data) => results.push(data))
.on('end', () => {
return resolve(results)
})
.on('error', () => {
return reject('Failed parsing CSV')
})
})
}
the function does get to the .on('error') event and it executes the reject but in the for loop that awaits on the results from the function I don't get the .catch triggered by the rejection of the promise
const filesList = await getRelevantFileList()
const processedFiles = []
for (const file of filesList) {
try {
const { name } = file
let dataFromFile = await getCSV(name)
const dataToInsert = dataFromFile.filter((entry) => entry.SharesOutstanding > 0)
dataFromFile = []
processedFiles.push(file)
} catch (error) {
console.error(`${error} Unable to fetch ${file}`)
}
}
One issue I can see here is that
if (!fileName) {
resolve(results)
}
Is missing a return, so it should be
if (!fileName) {
return resolve(results)
}
This means that if the fileName is missing you will both an error and a resolve

Why Unexpected reserved word 'await' even when my function is declared as async?

I'm putting together some Node.js code for querying LDAP that uses promises. When I run it, I get Unexpected reserved word concerning the await on line 43. This part:
let connection = await connect(ldapURL).catch((err) => {
console.error('LDAP server error:', err);
reject(err);
});
The entire code is shown below.
I have a promise returned in the connect() function and that's working fine. In fact, if I remove the promise from the listObjects() function, the console.debug(results); line prints exactly what I'm expecting.
So why is the await connect() causing an error in my listObjects() function? My searching has yielded a lot of answers saying, "You need to use async," but I already have my listObjects() declared as async.
Where have I gone wrong?
Full Code:
#!/usr/bin/env node
import ldapjs from 'ldapjs';
const ldapURL = [ 'ldap://127.0.0.1:389' ];
const bindDN = 'uid=search,dc=home';
const bindPassword = 'P#ssw0rd';
function connect(serverURL) {
return new Promise((resolve, reject) => {
const client = ldapjs.createClient({
url: serverURL
});
client.on('connect', () => {
console.debug('Connected to:', ldapURL);
console.debug('Binding as:', bindDN);
client.bind(bindDN, bindPassword, (err) => {
if (err) {
console.debug(err.message);
reject('Bind credentials rejected.');
}
else {
resolve(client);
}
});
});
client.on('error', (err) => {
reject('Unable to connect to ' + serverURL);
});
});
}
/**
* Search LDAP and return objects.
* #baseDN {string} Where to start, like 'ou=People,dc=example,dc=com'
* #filter {string} Optional LDAP query to limit results, like '(objectClass=posixAccount)'
* #returns {promise} ... Eventually.
*/
async function listObjects(baseDN, filter) {
return new Promise((resolve, reject) => {
let connection = await connect(ldapURL).catch((err) => {
console.error('LDAP server error:', err);
reject(err);
});
let opts = {
filter: filter,
scope: 'sub'
};
let results = [];
connection.search(`${baseDN}`, opts, (err, res) => {
res.on('searchEntry', (entry) => {
results.push(entry);
});
res.on('end', () => {
connection.unbind(() => {
console.debug(results);
resolve(results);
});
});
});
});
}
let ldapObjects = await listObjects('dc=home', '(objectClass=posixAccount)');
console.log(ldapObjects);
After helpful suggestions in the comments, the solution was to Move the line return new Promise((resolve, reject) => { down so that it only wraps the connection.search(…) part as suggested by Bergi
Here is the code after that modification:
#!/usr/bin/env node
import ldapjs from 'ldapjs';
const ldapURL = [ 'ldap://127.0.0.1:389' ];
const bindDN = 'uid=search,dc=home';
const bindPassword = 'P#ssw0rd';
function connect(serverURL) {
return new Promise((resolve, reject) => {
const client = ldapjs.createClient({
url: serverURL
});
client.on('connect', () => {
console.debug('Connected to:', ldapURL);
console.debug('Binding as:', bindDN);
client.bind(bindDN, bindPassword, (err) => {
if (err) {
console.debug(err.message);
reject('Bind credentials rejected.');
}
else {
resolve(client);
}
});
});
client.on('error', (err) => {
reject('Unable to connect to ' + serverURL);
});
});
}
/**
* Search LDAP and return objects.
* #baseDN {string} Where to start, like 'ou=People,dc=example,dc=com'
* #filter {string} Optional LDAP query to limit results, like '(objectClass=posixAccount)'
* #returns {promise} ... Eventually.
*/
async function listObjects(baseDN, filter) {
let connection = await connect(ldapURL).catch((err) => {
console.error('LDAP server error:', err);
reject(err);
});
let opts = {
filter: filter,
scope: 'sub'
};
let results = [];
return new Promise((resolve, reject) => {
connection.search(`${baseDN}`, opts, (err, res) => {
res.on('searchEntry', (entry) => {
results.push(entry);
});
res.on('end', () => {
connection.unbind(() => {
resolve(results);
});
});
});
});
}
let ldapObjects = await listObjects('dc=home', '(objectClass=posixAccount)');
console.log(ldapObjects);
I think you have to remove new Promise because async return the data wraps in the Promise. I think that your ldapObjects store a Promise and listObjects return a Promise wrap inside a Promise.

Correctly use fs write inside createReadStream on data

I am attempting to combine n binary files into a single file in javascript using streams. I have a write stream that is passed to the following function. I notice that the total written bytes does not match the actual number of bytes in the file, and is also not consistent across multiple runs.
After reading the documentation, I noticed that the write call returns a promise and is not safe to be called again until the previous promise is fulfilled. I am not sure how to make readStream.on('data', function (chunk) use await, as the function is not async and I get an error await is only valid in async function
async function concatFile (filename, fileHandle) {
return new Promise((resolve, reject) => {
const readStream = fs.createReadStream(filename, { highWaterMark: 1024 })
readStream.on('data', function (chunk) {
// read
fileHandle.write(chunk)
})
readStream.on('error', e => {
reject(e)
})
readStream.on('close', function (err) {
// close
})
readStream.on('end', function () {
// done
readStream.close()
resolve()
})
}) // end of Promise
}
I am using the above function in the following snippet:
const fileWriter = fs.createWriteStream('concatBins.bin', { flags: 'w' })
let writtenLen = 0
fileList = {}
fileList[0] = "foo.bin"
fileList[1] = "bar.bin"
for (const [key, value] of Object.entries(fileList)) {
await concatFile(value, fileWriter)
writtenLen = fileWriter.bytesWritten
console.log('bytes written ' + writtenLen)
}
You can pause the readStream until the write is done to avoid getting future data events and the resume it when done with the write. And, you can declare the .on('data', ...) callback to be async if you want to use await. But, you do have to pause the readStream because the async/await won't pause it for you.
// stream write that returns a promise when OK to proceed
// with more writes
function write(stream, data) {
return new Promise((resolve, reject) => {
if (stream.write(data)) {
resolve();
} else {
// need to wait for drain event
stream.once('drain', resolve);
}
});
}
async function concatFile (filename, writeStream) {
return new Promise((resolve, reject) => {
const readStream = fs.createReadStream(filename, { highWaterMark: 1024 });
let paused = false;
let ended = false;
readStream.on('data', async function(chunk) {
// read
try {
readStream.pause();
paused = true;
await write(writeStream, chunk);
} catch(e) {
// have to decide what you're doing if you get a write error here
reject(e);
} finally {
paused = false;
readStream.resume();
if (ended) {
readStream.emit("finalEnd");
}
}
});
readStream.on('error', e => {
reject(e)
})
readStream.on('close', function (err) {
// close
})
readStream.on('end', function () {
// done
ended = true;
if (!paused) {
readStream.emit('finalEnd');
}
});
// listen for our real end event
readStream.on('finalEnd', () {
readStream.close();
resolve()
});
}) // end of Promise
}

AWS Lambda & Node: Write data while streaming - ends prematurely and data is missing

I've got a Lambda function that is triggered by a write to an S3 bucket. It reads the JSON file that is written to the bucket, parses out the individual records, and writes them to a database.
Problem is; I'm not sure what I'm doing wrong, because the stream ends and the Lambda exits before all the data is written.
I'm in "flowing mode" on my readable stream, and I'm pausing/resuming during the db write. According to the docs, this should do the trick, but it's not working as expected.
Lambda handler:
exports.handler = async (event, context) => {
let result = false;
try {
result = await parseData(event);
} catch (e) {
console.error(e);
}
return result;
};
Promise:
const StreamArray = require("stream-json/streamers/StreamArray");
async parseData(event) {
try {
let objectStream = s3.getObject(params).createReadStream();
const streamParser = StreamArray.withParser();
return new Promise((resolve, reject) => {
objectStream.pipe(streamParser).on("data", async streamData => {
objectStream.pause();
let result = await writeData(streamData);
objectStream.resume();
}).on("finish", () => {
console.log("STREAM FINISH!");
resolve(true);
}).on("error", e => {
console.error("Stream error:", e);
reject(e);
});
});
} catch (e) {
console.error(e);
}
}
Got it working by simply swapping-out stream-json with JSONStream, which is a more widely-used package anyhow. Works like a charm now!
const JSONStream = require("JSONStream");
async parseData(event) {
try {
let objectStream = s3.getObject(params).createReadStream();
const streamParser = JSONStream.parse("*");
return new Promise((resolve, reject) => {
objectStream.pipe(streamParser).on("data", async streamData => {
streamParser.pause();
let result = await writeData(streamData);
streamParser.resume();
}).on("finish", () => {
console.log("STREAM FINISH!");
resolve(true);
}).on("error", e => {
console.error("Stream error:", e);
reject(e);
});
});
} catch (e) {
console.error(e);
}
}

Node Async/Await/Promise.All not waiting for other to complete

I have 3 functions which I need to run in order and one needs to finish before the other runs so I've done this:
var fs = require('fs');
async function create() {
fs.writeFile('newfile.txt', 'some text here', (err) => {
if (err) throw err;
console.log('File is created successfully.');
return ('File is created successfully.');
});
}
async function waitt() {
setTimeout(() => { return 'waited!' }, 10000);
}
async function read() {
fs.readFile('newfile.txt', {encoding: 'utf-8'}, (err,data) => {
if (!err) {
console.log('received data: ' + data);
return ('received data: ' + data);
} else {
console.log(err);
}
});
}
async function process() {
let [r1, r2, r3] = await Promise.all([create(), waitt(), read()]);
console.log(r1 + ' ' + r2 + ' ' + r3);
}
process();
So, process() runs create() which creates a file, then run waitt() which just pauses and finally read() shows the contents of the file.
The issue I'm having is that it's running in this order:
create()
read()
and then waitt()
instead of
create()
waitt()
read()
which is what I want.
How can I fix this?
This won't work:
async function waitt() {
setTimeout(() => { return 'waited!' }, 10000);
}
Because, you're return-ing from within the setTimeout callback, not the async-marked wait function.
To mix callback-style code and async/await you must first convert callback style code to use Promises, or use fs-extra which already provides fs functions returning promises.
Something like this:
function waitt() {
return new Promise((resolve) => {
setTimeout(() => {
resolve('waited...')
}, 10000)
})
}
The same applies to the rest of your functions.
Also note that if a function explicitly returns a Promise, it doesn't need to be marked as async to be awaited, since await essentially works with Promises.
Now for the order:
Promise.all doesn't guarantee order, for that you might be able to get away with a simple for..of, or just call the functions yourself.
function wait() {
return new Promise((resolve, reject) => {
setTimeout(() => {
console.log('waited...')
resolve('foo')
}, 500)
})
}
// Assume this is your promisified read function.
function read() {
return new Promise((resolve, reject) => {
setTimeout(() => {
console.log('read...')
resolve('bar')
}, 500)
})
}
// Assume this is your promisified create function.
function create() {
return new Promise((resolve, reject) => {
setTimeout(() => {
console.log('created...')
resolve('baz')
}, 500)
})
}
;(async () => {
try {
await create()
await wait()
await read()
} catch (err) {
console.error(err)
}
})()
Your problem is that Promise.all does not guarantee the order of processing, just that all the promises in the list are processed.
Can you not just say:
await create();
await read();
await waitt();

Categories