Conditional async callbacks - javascript

I'm writing an Electron program which takes a CSV file as input, and does file operations depending on the CSV content and file existence (it's to manage MAME arcade roms).
In order to have a progress bar on the UI side, I have switched the code from fully synchronous (because it was much easier) to asynchronous.
I just cannot find out how to reliably display a message to the user when all the lines in the CSV file are processed, and all the zip files are copied or removed.
Here is a (simplified) sample method:
fs.readFile(file, { 'encoding': 'utf8' }, (err, fileContents) => {
let fileCsv = csvparse(fileContents);
let lines = fileCsv.length;
fileCsv.forEach((line) => {
lines--;
let zip = line.name + '.zip';
let sourceRom = path.join(romset, zip);
let destRom = path.join(selection, zip);
this.emit('progress.add', fileCsv.length, fileCsv.length - lines, zip);
if (fs.existsSync(sourceRom) && !fs.existsSync(destRom)) {
fs.copy(sourceRom, destRom, (err) => {
let sourceChd = path.join(romset, game);
if (fs.existsSync(sourceChd)) {
fs.copy(sourceChd, path.join(selection, game), (err) => {
if (lines <= 0) { callback(); } // chd is copied
});
} else {
if (lines <= 0) { callback(); } // no chd, rom is copied
}
});
} else {
if (lines <= 0) { callback(); } // no source found or already exists
}
});
});
The problem is that the CSV file is processed really fast, but the file are not copied as fast. So it decrements the lines counter to 0, then after each file copy, it finds that it's zero and triggers the callback.
How do I reliably trigger the callback at the end of all these nested callbacks and conditions?
Thanks

I tried to change the code without massively overwriting your style - assuming there is a reason to avoid things like bluebird, async/await & native Promises, and the async lib.
You need to decrement lines after a line is processed. I pulled the processing logic out into a function to make this clearer:
function processLine({
sourceRom, destRom, romset, game, callback
}) {
if (fs.existsSync(sourceRom) && !fs.existsSync(destRom)) {
fs.copy(sourceRom, destRom, (err) => {
// *really* should handle this error
let sourceChd = path.join(romset, game);
if (fs.existsSync(sourceChd)) {
fs.copy(sourceChd, path.join(selection, game), (err) => {
// *really* should handle this error
callback();
});
} else {
callback();
}
});
} else {
callback() // no source found or already exists
}
}
fs.readFile(file, { 'encoding': 'utf8' }, (err, fileContents) => {
let fileCsv = csvparse(fileContents);
let lines = fileCsv.length;
fileCsv.forEach((line) => {
let zip = line.name + '.zip';
let sourceRom = path.join(romset, zip);
let destRom = path.join(selection, zip);
this.emit('progress.add', fileCsv.length, fileCsv.length - lines, zip);
processLine({ sourceRom, destRom, game, romset, callback: () => {
lines--;
if (lines <= 0) {
callback();
}
}})
});
});

Related

How to break out a loop in a callback

hi how can I break out of the for loop ? I want to be able to break out of it in the callback in the if statement
I want this program to create a folder in the given directory and every time it throws an error I want it to change the folder name and add a number to it so when it says that the folder already exists, It'll create a unique folder name until it doesn't throw an error.
I will check for the error code later help me solve this first
const path = require('path');
function folder(folderName) {
for (let i = 1; i <= 10; i++) {
let pathNumber = i;
let fullPath = folderName + pathNumber;
fs.mkdir(path.join("D:", fullPath), (err) => {
if (!err) {
return; // I want to break out of the loop here
}
})
}
}
folder("folder");
You can't write the code that way because the for loop will already be done before any of the fs.mkdir() callbacks are called. They are asynchronous and happen LATER.
If you want to execute one iteration of the loop, including the fs.mkdir() before moving on to any other, then you can use async/await with fs.promises.mkdir().
Here's what a solution could look like with fs.promises.mkdir(). I've also added error handling for the case where all 10 sub-dir names you're trying already exist.
async function folder(folderName) {
let lastError;
for (let pathNumber = 1; pathNumber <= 10; pathNumber++) {
let fullPath = path.join("D:", folderName + pathNumber);
try {
await fs.promises.mkdir(fullPath);
return fullPath;
} catch(e) {
lastError = e;
// ignore error so we keep trying other numbers
}
}
throw lastError;
}
folder("folder").then(fullPath => {
console.log(`dir created: ${fullPath}`);
}).catch(err => {
console.log(err);
});
Much simpler without await
const numFolders = 10,
folders = Array.from(Array(numFolders), (_,i) => `folder${i+1}`), len = folder.length;
let cnt = 0;
const makeFolder = () => {
if (cnt >= len) return; // stop because done
fs.mkdir(path.join("D:", fullPath), (err) => {
if (err) {
makeFolder(); // only call again if error
}
cnt++
}
makeFolder()

Async Loop seems to partially parallel

I'm trying to implement a function, which slices a file into chunks and then sends them to my backend one after another.
The function has to hash each file & validate if the hash is already known before starting the upload.
The following code is the code part, where my problematic function is called.
process: async (
fieldName,
file,
metadata,
load,
error,
progress,
abort,
transfer,
options,
) => {
// fieldName is the name of the input field - No direct relevance for us
// logger.log(`fieldName: ${fieldName}`);
// Usually Empty - Can be added with Metadata-Plugin
// logger.log(metadata);
const source = this.$axios.CancelToken.source();
const abortProcess = () => {
// This function is entered if the user has tapped the cancel button
source.cancel('Operation cancelled by user');
// Let FilePond know the request has been cancelled
abort();
};
let chunks = [];
const {
chunkForce,
chunkRetryDelays,
chunkServer,
chunkSize,
chunkTransferId,
chunkUploads,
} = options;
// Needed Parameters of file
const { name, size } = file;
if (chunkTransferId) {
/** Here we handle what happens, when Retry-Button is pressed */
logger.log(`Already defined: ${chunkTransferId}`);
return { abortProcess };
}
this.hashFile(file)
.then((hash) => {
logger.log(`File Hashed: ${hash}`);
if (hash.length === 0) {
error('Hash not computable');
}
return hash;
})
.then((hash) => {
logger.log(`Hash passed through: ${hash}`);
return this.requestTransferId(file, hash, source.token)
.then((transferId) => {
logger.log(`T-ID receieved: ${transferId}`);
return transferId;
})
.catch((err) => {
error(err);
});
})
.then((transferId) => {
transfer(transferId);
logger.log(`T-ID passed through: ${transferId}`);
// Split File into Chunks to prepare Upload
chunks = this.splitIntoChunks(file, chunkSize);
// Filter Chunks - Remove all those which have already been uploaded with success
const filteredChunks = chunks.filter(
(chunk) => chunk.status !== ChunkStatus.COMPLETE,
);
logger.log(filteredChunks);
return this.uploadChunks(
filteredChunks,
{ name, size, transferId },
progress,
error,
source.token,
).then(() => transferId);
})
.then((transferId) => {
// Now Everything should be uploaded -> Set Progress to 100% and make item appear finished
progress(true, size, size);
load(transferId);
logger.log(transferId);
})
.catch((err) => error(err));
return { abortProcess };
},
uploadChunks is where the problem starts.
async uploadChunks(chunks, options, progress, error, cancelToken) {
const { name, size, transferId } = options;
for (let index = 0; index < chunks.length; index += 1) {
let offset = 0;
const chunk = chunks[index];
chunk.status = ChunkStatus.PROCESSING;
// eslint-disable-next-line no-await-in-loop
await this.uploadChunk(chunk.chunk, options, offset)
.then(() => {
chunk.status = ChunkStatus.COMPLETE;
offset += chunk.chunk.size;
progress(true, offset, size);
logger.log(offset); // This is always chunk.chunk.size, instead of getting bigger
})
.catch((err) => {
chunk.status = ChunkStatus.ERROR;
error(err);
});
}
},
uploadChunk(fileChunk, options, offset) {
const { name, size, transferId } = options;
const apiURL = `${this.$config.api_url}/filepond/patch?id=${transferId}`;
return this.$axios.$patch(apiURL, fileChunk, {
headers: {
'content-type': 'application/offset+octet-stream',
'upload-name': name,
'upload-length': size,
'upload-offset': offset,
},
});
},
As you can see uploadChunks takes an array of chunks, some options, two functions (progress & error) and a cancelToken (which I currently don't use, since I'm still stuck at this problem)
Each chunk in the array has the form of:
{
status: 0, // Some Status indicating, if it's completed or not
chunk: // binary data
}
The Function uploadChunks iterates over the chunk array and should in theory upload one chunk after another and always increment offset after each upload and then call progress. After this it should start the next iteration of the loop, where offset would be bigger than in the call before.
The calls themselves get executed one after another, but every call has the same offset and progress does not get repeatedly called. Instead my progress-bar locks until everything is uploaded and them jumps to 100%, due to the load-call in the first function right at the end.
So the upload itself works fine in the correct order, but all the code after the await this.uploadChunk... doesn't get called after each chunk and blocks somehow.
You are setting offset to 0 inside the loop. So offset is always 0. You should move this line:
let offset = 0;
before the for statement.

looping the callback function in node js

This is a piece of code which writes data to a ble device and reads data from it. data is written to the device in the form of a buffer. the value in 'mydata' (AAAD0000) is the command to be written in order to read the data.
function named chara3() consists of write and read function which is a callback function in which the command is passed read back.
My requirement is the 'mydata' value which i said earlier, the last two zeros is the memory address. i need to read the data in different memory addresses starting from zero to 59. That is AAAD0000 to AAAD0059. so of course i need to run a loop. If I'm reading the zeroth location, the code is quite fine and i got the output as well but when i tried to make it inside a loop, the code is all a mess. the read part is not executing.
can any one suggest a better way to read data from zeroth memory location to 59th memory location (AAAD0000 to AAAD0059)???
first command writes to it
then reads data
memory location incremented by 1
this should repeat up to 59
var mydata = 'AAAD0000';
function chara3() {
var buff2 = new Buffer(mydata, 'hex');
SensorCharacteristic.write(buff2, false, function(error) { //[0x002d]
console.log('Writing command SUCCESSFUL',mydata);
if (!error) {
SensorCharacteristic.read((error, data) => {
console.log("i just entered");
if (data.toString('hex') != '0000') {
console.log('Temperature History: ', data.toString('hex'));
enter();
}
else {
console.log('contains null value');
} //else
});
}
function enter()
{
mydata = (parseInt(mydata, 16) + 00000001).toString(16);
}
}); //.write
} //chara3
there's no error. But some part of the code is not executing.
You can use the recursion by wrapping your methods into a promise
async function chara3(mydata = 'AAAD0000') {
if (mydata === 'AAAD0059') {
return;
}
var buff2 = new Buffer(mydata, 'hex');
return new Promise((resolve) => {
SensorCharacteristic.write(buff2, false, function (error) { //[0x002d]
console.log('Writing command SUCCESSFUL', mydata);
if (!error) {
SensorCharacteristic.read(async (error, data) => {
console.log("i just entered");
if (data.toString('hex') != '0000') {
console.log('Temperature History: ', data.toString('hex'));
let next = await chara3(enter())
return resolve(next);
}
else {
console.log('contains null value');
return resolve();
} //else
});
}
}); //.write
});
} //chara3
function enter() {
return (parseInt(mydata, 16) + 00000001).toString(16);
}
Also if you can convert your methods SensorCharacteristic.write and SensorCharacteristic.read into promises you can simply map
function chara3(mydata) {
var buff2 = new Buffer(mydata, 'hex');
await SensorCharacteristic.write(buff2, false);
console.log('Writing command SUCCESSFUL', mydata);
let data = await SensorCharacteristic.read();
if (data.toString('hex') != '0000') {
console.log('Temperature History: ', data.toString('hex'));
enter();
} else {
console.log('contains null value');
}
};
let promiseArray = Array(60).fill().map((_, i) => (parseInt('AAAD0000', 16) + i).toString(16)).map(chara3);
Promise.all(promiseArray).then(() => console.log('done'));

Hashchain generating using async

I'm trying to generate a hashchain using the following code:
var async = require('async');
var _ = require('lodash');
var offset = 1e7;
var games = 1e7;
var game = games;
var serverSeed = '238asd1231hdsad123nds7a182312nbds1';
function loop(cb) {
var parallel = Math.min(game, 1000);
var inserts = _.range(parallel).map(function() {
return function(cb) {
serverSeed = genGameHash(serverSeed);
game--;
query('INSERT INTO `hash` SET `hash` = ' + pool.escape(serverSeed));
};
});
async.parallel(inserts, function(err) {
if (err) throw err;
// Clear the current line and move to the beginning.
var pct = 100 * (games - game) / games;
console.log('PROGRESS: ' + pct.toFixed(2) + '%')
if (game > 0){
loop(cb);
}else {
console.log('Done');
cb();
}
});
}
loop(function() {
console.log('Finished with SEED: ', serverSeed);
});
When I run this code it generates a hash chain of 1k hash's, while I'm trying to generate a chain of 1m hash's. It seems like async isn't working properly, but I have no idea why, there are no errors in console, nothing that points out a flaw.
Any ideas?
Do you can run it with smaller games (about 3000)?
Your parallel function nerver send done signal because the callback of inserts item never trigged. I think query function has two pramasters query(sql: string, callback?: (err, result) => void) (Typescript style).
I suggest you change your logic and flow like below block code:
var inserts = _.range(parallel).map(function() {
return function(cb) {
serverSeed = genGameHash(serverSeed);
query('INSERT INTO `hash` SET `hash` = ' + pool.escape(serverSeed), function(err, result) {
if(result && !err) {
game--;
}
cb(); // remember call the callback
});
};
});
In your code, you have used async.parallel, I think it is not good idea, too many connection has be open(1m). Recommeded for this case is parallelLimit

Read a large file N lines at a time in Node.JS

I have a file with 65,000,000 lines, that is about 2gb in size.
I want to read this file in N lines at a time, perform a db insert operation, and then read the next N, with N being, say, 1000 in this case. Insert order doesn't matter, so synchronous is fine.
What's the best way of doing this? I've only found was to either load in 1 line at a time, or methods that read the whole file into memory. Sample code below, that I've been using to read the file one line at a time. :
var singleFileParser = (file, insertIntoDB) => {
var lr = new LineByLineReader(file);
lr.on('error', function(err) {
// 'err' contains error object
console.error(err);
console.error("Error reading file!");
});
lr.on('line', function(line) {
insertIntoDB(line);
// 'line' contains the current line without the trailing newline character.
});
lr.on('end', function() {
// All lines are read, file is closed now.
});
};
Lines can only be parsed one at a time by someone. So, if you want 10 at once, then you just collect them one at a time until you have collected 10 and then process the 10.
I did not think Jarek's code quite worked right so here's a different version that collects 10 lines into an array and then calls dbInsert():
var tenLines = [];
lr.on('line', function(line) {
tenLines.push(line);
if (tenLines.length === 10) {
lr.pause();
dbInsert(<yourSQL>, function(error, returnVal){
if (error) {
// some sort of error handling here
}
tenLines = [];
lr.resume();
});
}
});
// process last set of lines in the tenLines buffer (if any)
lr.on('end', function() {
if (tenLines.length !== 0) {
// process last set of lines
dbInsert(...);
}
});
Jarek's version seems to call dbInsert() on every line event rather than only every 10th line event and did not process any left over lines at the end of the file if they weren't a perfect multiple of 10 lines long.
Something like this should do
var cnt = 0;
var tenLines = [];
lr.on('line', function(line) {
tenLines.push(line);
if (++cnt >= 10) {
lr.pause();
// prepare your SQL statements from tenLines
dbInsert(<yourSQL>, function(error, returnVal){
cnt = 0;
tenLines = [];
lr.resume();
});
}
});
This is my solution inside an async function:
let multipleLines = [];
const filepath = '<file>';
const numberLines = 50;
const lineReader = require('readline').createInterface({
input: require('fs').createReadStream(filepath)
});
// process lines by numberLines
for await (const line of lineReader) {
multipleLines.push(line);
if (multipleLines.length === numberLines) {
await dbInsert();
multipleLines = [];
}
}
// process last set of lines (if any)
if (multipleLines.length !== 0) {
await dbInsert();
}
This is my solution using built modules rather then NPM's line-by-line package
var lineReader = require('readline').createInterface({
input: require('fs').createReadStream(fileToRead)
});
lines = []
lineReader.on('line', function (line) {
lines.push(line.split(","));
if (lines.length == 10) {
makeCall()
lines = []
}
});
lineReader.on('close', function () {
if (lines.length !== 0) {
makeCall()
lines = []
}
});
function makeCall() {
//Do something with lines
}

Categories