Wrap a FTP request in Async/Await - javascript

I am trying to perform a FTP request, wait till the file has downloaded and then close the FTP module. When both these actions have finished, then list the contents of a directory. At the moment, it is doing it in the opposite direction.
I have wrapped them in async and prepended the FTP with await. But the directory list is being logged first. Can spot the error in the async function?
(async function () {
await Ftp.get("document.txt", "document.txt", err => {
if (err) {
return console.error("There was an error retrieving the file.");
}
console.log("File copied successfully!");
Ftp.raw("quit", (err, data) => {
if (err) {
return console.error(err);
}
console.log("Bye!");
});
});
})()
// Read the content from the /tmp directory to check it's empty
fs.readdir("/", function (err, data) {
if (err) {
return console.error("There was an error listing the /tmp/ contents.");
}
console.log('Contents of tmp file above, after unlinking: ', data);
});

First, await only works with promises, and ftp.get apparently uses a callback instead of a promise. So you'll have to wrap ftp.get in a promise.
Second, your fs.readdir is outside of the async function, so it is not going to be affected by the await. If you need it to be delayed, then you need it to be inside the async function, after the await statement.
So put together that looks something like this:
(async function () {
await new Promise((resolve, reject) => {
Ftp.get("document.txt", "document.txt", err => {
if (err) {
reject("There was an error retrieving the file.")
return;
}
console.log("File copied successfully!");
Ftp.raw("quit", (err, data) => {
if (err) {
reject(err);
} else {
resolve(data);
}
});
})
});
fs.readdir("/", function (err, data) {
if (err) {
return console.error("There was an error listing the /tmp/ contents.");
}
console.log('Contents of tmp file above, after unlinking: ', data);
});
})()

I usually try to separate things out. It looks like you wanted to save a file so I built this out with that in mind. I put each request into it's own promise. I don't think you need Ftp.raw. I am not sure if Ftp is the node library or simply the var name of another library.
const util = require("util");
const fs = require("fs");
const fsOpen = util.promisify(fs.open);
const fsWriteFile = util.promisify(fs.writeFile);
const fsClose = util.promisify(fs.close);
async function saveDocumentAndListDirectoryFiles() {
let documentData;
let fileToCreate;
let listDirectoryFiles
//We get the document
try {
documentData = await getDocument();
} catch (error) {
console.log(error);
return;
}
// Open the file for writing
try {
fileToCreate = await fsOpen("./document.txt", "wx");
} catch (err) {
reject("Could not create new file, it may already exist");
return;
}
// Write the new data to the file
try {
await fsWriteFile(fileToCreate, documentData);
} catch (err) {
reject("Error writing to new file");
return;
}
// Close the file
try {
await fsClose(fileToCreate);
} catch (err) {
reject("Error closing new file");
return;
}
// List all files in a given directory
try {
listDirectoryFiles = await listFiles("/");
} catch (error) {
console.log("Error: No files could be found");
return;
}
console.log(
"Contents of tmp file above, after unlinking: ",
listDirectoryFiles
);
};
// Get a document
function getDocument() {
return new Promise(async function(resolve, reject) {
try {
await Ftp.get("document.txt", "document.txt");
resolve();
} catch (err) {
reject("There was an error retrieving the file.");
return;
}
});
};
// List all the items in a directory
function listFiles(dir) {
return new Promise(async function(resolve, reject) {
try {
await fs.readdir(dir, function(err, data) {
resolve(data);
});
} catch (err) {
reject("Unable to locate any files");
return;
}
});
};
saveDocumentAndListDirectoryFiles();

Related

readFile returns undefined

I have this situation where I'm trying to send a request with the content of a file, but the problem is that the content arrives undefined. How can I solve this? I tried multiple versions from stackoverflow, but nothing worked so far.
const ifExists = (filePath) => {
try {
if (fs.existsSync(filePath)) {
return true;
}
} catch (err) {
console.log(err);
}
return false;
}
const readMyFile = async (filePath) => {
const fileExists = ifExists(filePath);
if (fileExists) {
fs.readFile(filePath, (err, data) => {
if (err) {
console.log("Error occurred when trying to read the file.");
return false;
}
console.log("File successfully read.");
return data; // data has the right content here
});
} else {
console.log("File not found");
return false;
}
}
const getFile = async function (req, res, next) {
try {
const content = await readMyFile(filePath); // the content is undefined here
res.writeHead(200, { "Content-Type": "application/json" });
res.write(JSON.stringify(content));
} catch (err) {
console.log("Error occurred.");
res.status(500).send("Error");
} finally {
res.end();
}
};
Thank you for your time!
fs.readFile uses a callback and does not return a promise which means it can't be used properly in an async function. If you want to use an async function I suggest returning a promise.
const readFile = async (filePath) => {
return new Promise((resolve, reject) => {
if (!exists(filePath)) {
reject(Error("File not found"));
}
fs.readFile(filePath, (err, data) => {
if (err) {
reject(err);
}
resolve(data)
});
})
}

Rename files asynchronously in Node.js if destination files don't exist

I am trying to rename files asynchronously in Node.js only if destination files don't exist.
I made a quick test like follows:
const fs = require('fs')
const files = [ 'file1', 'file2', 'file3' ]
const new_name = 'new-name' // same destination name for all
fs.exists() - DEPRECATED
for (let file of files)
fs.exists(new_name, (exists) => {
if (!exists) fs.rename(file, new_name, (err) => {})
})
fs.access() - RECOMMENDED
for (let file of files)
fs.access(new_name, fs.constants.F_OK, (err) => {
if (err) fs.rename(file, new_name, (err) => {})
})
fs.move() - from fs-extra
const fs_extra = require('fs-extra')
for (let file of files)
fs_extra.move(file, new_name, { overwrite: false }, (err) => {})
Each time all 3 files were overwriten and renamed to one file.
I believe this is happens because all exists checks fire sooner than any rename happens.
I know how to accomplish this task synchronously, but want to be sure that there is no proper async way to do so.
You can create Promise which resolve's when file is renamed
fs.rename(file, new_name, (err) => {
resolve(); <------
});
or when renaming is skipped
fs.access(new_name, fs.constants.F_OK, (err) => {
if (err) {
return fs.rename(file, new_name, (err) => {
resolve();
});
}
resolve(); <------
});
Full code
(async () => {
for (let file of files) {
await new Promise((resolve) => {
fs.access(new_name, fs.constants.F_OK, (err) => {
if (err) {
return fs.rename(file, new_name, (err) => {
resolve();
});
}
resolve();
});
});
}
})();
and if you don't want to mix async/await with Promise
(async () => {
function rename(file, new_name) {
return new Promise((resolve) => {
fs.access(new_name, fs.constants.F_OK, (err) => {
if (err) {
return fs.rename(file, new_name, (err) => {
resolve();
});
}
resolve();
});
});
}
for (let file of files) {
await rename(file, new_name);
}
})();
#ponury-kostek solution works brilliantly and marked as accepted answer.
I ended up with the following code since it's a bit shorter:
async function rename_files() {
for (let file of files)
await fs.move(file, new_name)
}
rename_files()
Instead of wrapping fs library in promises.
I like to import the promise implementation of the fs library.
Then call the fs methods with await.
.
import {promises as fs_promises} from 'fs'; // The promise implmentation of fs library.
async function renameFile() {
const fileFullPath = '1234.txt';
const newFileFullPath = '5678.txt';
await fs_promises.rename(fileFullPath, newFileFullPath, (error) => {
if (error) {
console.log(error);
} else {
console.log("\nFile Renamed\n");
}
});
}
await renameFile(); // Call the async method.

Refactoring promise function to es6 format, await doesn't actually await

So I'm just revisiting some old code to tidy up and modernize some syntax in my Partum-React project. But when I reformat this specific older traditional function() return new Promise... to an async () => ...return... type function, it doesn't seem to await for the function to be finished. The answers I found don't seem to solve my problem, since as far as I can tell, my async function is written correctly and should work as expected. I also already reformatted some older function/promise functions into async functions without an issue. So I'm pretty stumped on why this specific function doesn't seem to get properly wrapped in a promise.
The old function which works as expected, program properly awaits for the function to complete before continuing execution (found in helper.js in the /src folder):
function shell(command, log = false, cb = null) {
return new Promise((resolve, reject) => {
exec(command, (err, stdout, stderr) => {
if (err) {
reject(err);
} else {
if (cb) cb(stdout);
if (log) process.stdout.write(`\n${stdout}\n\n`);
resolve({
stdout,
stderr,
});
}
});
});
}
Rewritten function, doesn't seem to properly await when called with await. consecutive shell calls are run synchronously causing git to error out. I'm sure it's a small mistake on my part, but I just can't seem to see it. As far as I know, this function should be properly wrapped in a promise and function as such:
const shell = async (command, log = false, cb = null) => {
exec(command, (err, stdout, stderr) => {
if (err) throw err;
if (cb) cb(stdout);
if (log) process.stdout.write(`\n${stdout}\n\n`);
return {
stdout,
stderr,
};
});
};
This is where the function is being called (found in initializer.js in the /src folder) (I know, this also needs some major cleaning up):
finalize() {
return new Promise((resolve, reject) => {
process.stdout.write('finalizing project\n');
copy(this.tempPath, this.destination, (err) => {
if (err) {
reject(err);
} else {
process.stdout.write('cleaning up temporary files\n');
rmdir(this.tempPath, async (err) => { // eslint-disable-line
if (err) {
reject(err);
} else {
try {
await shell(`mkdir ${path.join(this.destination, this.options.componentPath)}`);
if (this.options.redux) {
await shell(`mkdir ${path.join(this.destination, this.options.reduxPath, this.options.actionPath)}`);
}
await shell(`git -C ${this.destination} init`);
await shell(`git -C ${this.destination} add .`);
await shell(`git -C ${this.destination} commit -m 'Initialized with Partum-React'`);
process.stdout.write(
`\nrunning npm install inside ${this.destination}\n`,
);
await npmInstall(this.destination, this.silent);
resolve(true);
} catch (err) { // eslint-disable-line
reject(err);
}
}
});
}
});
});
}
};
console logging shell(....) without await logs Promise {undefined}
console logging await shell(....) with await logs {undefined}
Thank you in advance for any help on this!
No, your async function is not written correctly. You cannot replace the new Promise constructor by async/await syntax. If something provides only a callback API, you need to promisify it before being able to use await on it.
You should be doing
function shell(command) {
return new Promise((resolve, reject) => {
exec(command, (err, stdout, stderr) => {
if (err) {
reject(err);
} else {
resolve({stdout, stderr});
}
});
});
}
function copy(source, destination) {
return new Promise((resolve, reject) => {
…
});
}
function rmdir(path) {
return new Promise((resolve, reject) => {
…
});
}
and then you can write your finalise function using async/await:
async finalize() {
process.stdout.write('finalizing project\n');
await copy(this.tempPath, this.destination);
process.stdout.write('cleaning up temporary files\n');
await rmdir(this.tempPath);
await shell(`mkdir ${path.join(this.destination, this.options.componentPath)}`);
if (this.options.redux) {
await shell(`mkdir ${path.join(this.destination, this.options.reduxPath, this.options.actionPath)}`);
}
await shell(`git -C ${this.destination} init`);
await shell(`git -C ${this.destination} add .`);
await shell(`git -C ${this.destination} commit -m 'Initialized with Partum-React'`);
process.stdout.write(`\nrunning npm install inside ${this.destination}\n`);
await npmInstall(this.destination, this.silent);
return true;
}
const shell = async (command, log = false, cb = null) => {
/*problem lies here, because most probably this is a async function and
therefore undefined is returned from this function as resolved value*/
exec(command, (err, stdout, stderr) => {
if (err) throw err;
if (cb) cb(stdout);
if (log) process.stdout.write(`\n${stdout}\n\n`);
return {
stdout,
stderr,
};
});
};
see this question

Javascript how the better way to code nested callback?

I have 3 layer callbacks like this :
app.post('/', (req, res) => {
var filename = `outputs/${Date.now()}_output.json`;
let trainInput = req.files.trainInput;
let trainOutput = req.files.trainInput;
let testInput = req.files.trainInput;
//first
trainInput.mv(`inputs/${req.body.caseName}/train_input.csv`, function (err) {
if (err) return res.status(500).send(err);
//second
trainOutput.mv(`inputs/${req.body.caseName}/train_output.csv`, function (err) {
if (err) return res.status(500).send(err);
//third
testInput.mv(`inputs/${req.body.caseName}/test_input.csv`, function (err) {
if (err) return res.status(500).send(err);
res.send('success');
});
});
});
});
In this case, there are only 3 file uploads. In another case, I have more than 10 file uploads, and it makes 10 layer callbacks. I know it because of JavaScript asynchronous.
Is there any way, with this case, to make a beautiful code? This is because when it 10 layer callbacks, the code looks horizontally weird.
Thanks
You can use the following code to make you code look better and avoid callback hell
app.post('/', async (req, res) => {
var filename = `outputs/${Date.now()}_output.json`;
let trainInput = req.files.trainInput;
let trainOutput = req.files.trainInput;
let testInput = req.files.trainInput;
try {
var result1 = await trainInput.mv(`inputs/${req.body.caseName}/train_input.csv`);
var result2 = await trainInput.mv(`inputs/${req.body.caseName}/train_output.csv`);
var result2 = await testInput.mv(`inputs/${req.body.caseName}/test_input.csv`);
res.send('success');
}
catch (error) {
res.status(500).send(error);
}
});
You can make the functions return a Promise
I advice to make one function because you do the same thing 3 times. In this case I called the function 'save' but you can call it what ever you want. The first parameter is the file end the second the output filename.
function save(file, output) = return new Promise((resolve, reject) => {
file.mv(`inputs/${req.body.caseName}/${output}`, err =>
if (err) return reject(err)
resolve()
})
Promise.all([
save(req.files.trainInput, 'train_input.csv'),
save(req.files.trainInput, 'train_output.csv'),
save(req.files.trainInput, 'test_input.csv')
])
.then(_ => res.send(200))
.catch(err => res.send(400);
What version of Node you using? If async/await is available that cleans it up a bunch.
const moveCsv = (file, dest) => {
return new Promise((resolve, reject) => {
//third
file.mv(dest, function (err) {
if (err) reject(err);
resolve();
});
})
}
app.post('/', async(req, res) => {
try {
var filename = `outputs/${Date.now()}_output.json`;
const {
trainInput,
trainOutput,
testInput
} = req.files;
const prefix = `inputs/${req.body.caseName}`;
await moveCsv(trainInput, `${prefix}/train_input.csv`);
await moveCsv(trainOutput, `${prefix}/train_output.csv`);
await moveCsv(testInput, `${prefix}/test_input.csv`);
res.send('success');
} catch(err) {
res.status(500).send(err);
}
});
I'm also assuming here that your trainInput, trainOutput, testOutput weren't all meant to be req.files.trainInput.
Just be careful since the synchronous nature of the await calls are thread blocking. If that writer function takes ages you could also looking at putting those calls onto a worker thread. Won't really matter if your requests to that server endpoint are fast and non-frequent.
You can add RXJS to your project and use Observables.forkJoin()
Solution with Observables(assuming that trainInput.mv() returns Observable):
/* Without a selector */
var source = Rx.Observable.forkJoin(
trainInput.mv(`inputs/${req.body.caseName}/train_input.csv`),
trainInput.mv(`inputs/${req.body.caseName}/train_output.csv`),
trainInput.mv(`inputs/${req.body.caseName}/test_input.csv`)
);
var subscription = source.subscribe(
function (x) {
// On success callback
console.log('Success: %s', x);
},
function (err) {
// Error callback
console.log('Error');
},
function () {
// Completed - runs always
console.log('Completed');
});
// => Success: [result_1, result_2, result_3] or Error
// => Completed

Trouble with await/async

var images = ['image', 'image2', 'image3'];
async function proccess_images(images) {
for (const image of images) {
await postprocess(image)
}
console.log('done'); //this prints out before for loop is finished
}
async function postprocess(image) {
await process(function(data) {
console.log(data);
}, image)
};
function process(callback, image) {
3rdpartylibrary.process(image, (err, data) => {
if(err) {
console.error(err);
} else {
console.log('not done')
callback(data);
}
})
}
proccess_images(images)
I'm having trouble getting an async/await function working with my code that does some image processing using a 3rd party library. I'm trying to get each image to finish processing before moving onto the next image, but currently all three start processing immediately without waiting. It seems like the 3rd party library is starting its process which makes the async/await function think its done, but how do I make sure the next image isn't processed until AFTER 3rdpartylibrary.process has finished processing the data? I've also tried putting
await new Promise ((resolve, reject) =>
In front of the 3rdpartylibrary function which stops them all from processing at the same time, but it causes the for loop to exit after processing the first image correctly. Any ideas as to what I'm doing wrong?
Your problem appears to be that your first function isn't returning a promise. await requires a promise to work correctly. Something like this should work:
async function postprocess(image) {
var output = await process(image);
console.log(output);
};
function process(image) {
return new Promise((resolve, reject) => {
3rdpartylibrary.process(image, (err, data) => {
if(err) {
console.error(err);
reject(err);
} else {
console.log('not done')
resolve(data);
}
})
});
}
Change the process function definition to wrap a Promise:
function process(image) {
return new Promise((resolve,reject) => {
3rdpartylibrary.process(image, (err, data) => {
if (err) reject(err);
resolve(data);
})
})
}
The key point being to remove any callback references, since now it returns a Promise so you can await it.
You want to wrap the 3rdpartylibrary in a promise. What you want to do is convert the callback style to a promise by wrapping it. This is a good article to refer to. Try this:
var images = ['image1', 'image2', 'image3'];
// mock thirdPartyLibrary
const thirdPartyLibrary = {
process: (image, callback) => {
console.log(`processing image ${image}`);
setTimeout(() => {
const noError = undefined;
callback(noError, `finished procesing image ${image}`);
}, 1000);
}
}
async function proccess_images(images) {
console.log('start');
for (const image of images) {
console.log(`before processing image ${image}`);
await process(image);
console.log(`after processing image ${image}`);
}
console.log('done');
}
function process(image) {
// wrap thirdPartyLibrary in promise
return new Promise((resolve, reject) => {
thirdPartyLibrary.process(image, (err, data) => {
if (err) {
reject(err);
} else {
resolve(data);
}
})
})
}
proccess_images(images)

Categories