I have two models: ModelA and ModelB, I am trying to copy a tags = Array() column from ModelB into ModelA, like this:
function run() {
ModelA.find({}).limit(500).cursor()
.on('data', function(doc) {
let refID = doc.ref_id;
ModelB.findOne({_id: refID}).exec(function(err, modelb) {
if (err) {
console.log(`[Error]: Getting modelb ${refID}`);
}
if (modelb) {
if (modelb.tags.length > 0) {
doc.tags = modelb.tags;
}
}
});
doc.processed = true;
doc.save(function(err, hackAlert) {
if (err) {
console.log('[Error]: Saving ModelA: ' + err);
}
console.log(`Saved: ${doc._id}`);
});
})
.on('error', function(err){
console.log('[Error]');
})
.on('end', function(){
console.log('Done');
});
}
The script obviously saves all ModelA instances with processed = true but given the asynchronous nature of Node, ModelA documents end up without tags.
I'm new to this if you can't tell. I wanted to know what is the best, modern way to "await" for the ModelB.findOne() query to finish before saving ModelA ?
You can simply move the code to be called on the callback of your first query:
function run() {
ModelA.find({}).limit(500).cursor()
.on('data', function(doc) {
let refID = doc.ref_id;
ModelB.findOne({_id: refID}).exec(function(err, modelb) {
if (err) {
console.log(`[Error]: Getting modelb ${refID}`);
}
if (modelb) {
if (modelb.tags.length > 0) {
doc.tags = modelb.tags;
}
}
doc.processed = true;
doc.save(function(err, hackAlert) {
if (err) {
console.log('[Error]: Saving ModelA: ' + err);
}
console.log(`Saved: ${doc._id}`);
});
});
})
.on('error', function(err){
console.log('[Error]');
})
.on('end', function(){
console.log('Done');
});
}
For a more up-to-date approach using async/await and Promises:
async function run () {
ModelA.find({}).limit(500).cursor()
.on('data', async function (doc) {
let refID = doc.ref_id;
const modelb = await ModelB.findOne({_id: refID}).exec()
.catch(() => console.log(`[Error]: Getting modelb ${refID}`));
if (modelb) {
if (modelb.tags.length > 0) {
doc.tags = modelb.tags;
}
}
doc.processed = true;
await doc.save().exec()
.catch(err => console.log('[Error]: Saving ModelA: ' + err))
console.log(`Saved: ${doc._id}`);
})
.on('error', function (err){
console.log('[Error]');
})
.on('end', function () {
console.log('Done');
});
}
Related
function Getir() {
var options =
{
host: 'example',
port: 443,
path: '/myUrl'
};
get(options, function (http_res) {
var data = "";
http_res.on("data", function (chunk) {
data += chunk;
});
http_res.on("end", function () {
writeFile('NewHtml.txt', `${data}`, 'utf8', (err) => {
if (err) console.log(err);
});
});
});
}
function DegistirDuzenle() {
if (existsSync("./DatabaseHtml.txt")) {
var DataBaseHtml = readFileSync("./DatabaseHtml.txt", 'utf-8', (err) => { if (err) console.log(err) });
var MyHtml = readFileSync("./NewHtml.txt", 'utf-8', (err) => {if (err) console.log(err) });
if (MyHtml == DataBaseHtml) {
unlink("./NewHtml.txt", (err)=>{ if(err) console.log(err)});
console.log("değişiklik yapılmadı");
} else {
//notification
console.log("değişiklik yapıldı");
//Change
unlink('./DatabaseHtml.txt', (err) => { if(err) console.log(err); });
writeFile('./DatabaseHtml.txt', `${MyHtml}`, 'utf-8', (err) => { if(err) console.log(err); });
unlink('./NewHtml.txt', (err) => { if(err) console.log(err); });
}
}
else {
writeFile('DatabaseHtml.txt', `NewDataBaseHtml`, 'utf8', (err) => {
if (err) console.log(err);
});
}
}
async function Mysystem() {
let mypromis = new Promise((resolve, reject)=>{
resolve(Getir());
});
await mypromis.then(DegistirDuzenle());
}
Mysystem();
I want to create a txt file, read it and delete it later. I have 2 function 1.(Getir()) Create txt, 2.(DegistirDuzenle()) read txt and delete but 2. function starts working first and I getting error. "Error: ENOENT: no such file or directory, open './NewHtml.txt'"
async function Mysystem() {
let mypromis = new Promise((resolve, reject)=>{
resolve(Getir());
});
await mypromis()
await DegistirDuzenle()
}
Mysystem()
You should use
async function Mysystem() {
await Getir();
await DegistirDuzenle();
}
or
function Mysystem() {
return Getir().then(DegistirDuzenle);
}
but not a mix of them. Also notice that when passing the DegistirDuzenle function to .then() as a callback, it shouldn't be invoked (passing the result of a call, not passing the function). Alternatively, you could write .then((getirResult) => DegistirDuzenle()).
Also, for this to work, you'll need to properly promisify the code in Getir and DegistirDuzenle.
The scenario is I have two large CSV files csv1.csv and csv2.csv. In both the files, there is an email column and I have to read csv1.csv row by row and check if the email exists in csv2.csv and if matches write the row of csv2.csv in csv3.csv. I have tried read stream as well but it is not working as expected. Any guidance or help is appreciated.
Thanks to all in advance.
Following are the CSV files
csv1.csv
email,header1,header2
test1#example.com,test1,test1
test2#example.com,test2,test2
test3#example.com,test3,test3
test4#example.com,test4,test4
test5#example.com,test5,test5
csv2.csv
email,header1,header2
test4#example.com,test4,test4
test5#example.com,test5,test5
test6#example.com,test6,test6
test7#example.com,test7,test7
test8#example.com,test8,test8
Following is the code that I tried
const fs = require('fs');
const csv = require('fast-csv')
class CsvHelper {
static write(filestream, rows, options) {
return new Promise((res, rej) => {
csv.writeToStream(filestream, rows, options)
.on('error', err => rej(err))
.on('finish', () => res());
});
}
constructor(opts) {
this.headers = opts.headers;
this.path = opts.path;
this.writeOpts = {
headers: this.headers,
includeEndRowDelimeter: true
};
}
create(rows) {
return CsvHelper.write(fs.createWriteStream(this.path, { flags: 'a' }), rows, { ...this.writeOpts });
}
append(rows) {
return CsvHelper.write(fs.createWriteStream(this.path, { flags: 'a' }), rows, {
...this.writeOpts,
writeHeaders: false,
});
}
}
class Helper {
async matchCsv (outerRow) {
try {
const filePath2 = "csv2.csv";
const filePath3 = "csv3.csv";
let row = [];
const csvFile = new CsvHelper({
path: filePath3,
headers: ["Email", "Active"]
});
return new Promise((resolve, reject) => {
fs.createReadStream(filePath2)
.on("error", err => {
reject(err);
})
.pipe(csv.parse({headers: true}))
.on("error", err => {
reject(err);
})
.on("data", async innerRow => {
if(outerRow["email"] === innerRow["email"]) {
console.log("====================");
console.log("match found");
console.log(innerRow);
console.log("====================");
row.push([innerRow["email"], "yes"]);
console.log("row: ", row);
}
})
.on("finish", async() => {
if (!fs.existsSync(filePath3)) {
await csvFile.create(row).then(() => {
resolve("Done from matchCsv");
})
} else {
await csvFile.append(row).then(() => {
resolve("Done from matchCsv");
})
}
})
});
} catch (err) {
throw(err);
}
}
async generateCsv () {
try {
const filePath1 = "csv1.csv";
return new Promise((resolve, reject) => {
fs.createReadStream(filePath1)
.on("error", err => {
reject(err);
})
.pipe(csv.parse({headers: true}))
.on("error", err => {
reject(err);
})
.on("data", async outerRow => {
const result = await this.matchCsv(outerRow);
console.log("result: ", result);
})
.on("finish", () => {
resolve("Generated csv3.csv file.");
});
});
} catch (err) {
throw(err);
}
}
}
async function main() {
const helper = new Helper();
const result = await helper.generateCsv()
console.log(result);
}
main();
So the question is a little confusing, but I think I know what you want. Here's what I would do to check if the email exists. It will add all the rows to an array, cycle through them, then if the email address matches the email you're looking for, it will do something else... I think you said you wanted to write to a csv file again with the row, but that should be simple enough.
const csv = require('csv-parser');
const fs = require('fs');
const filepath = "./example_data.csv";
const emailAdd = "myemail#email.com";
var rowsArr = [];
fs.createReadStream(filepath)
.on('error', () => {
// handle error
})
.pipe(csv())
.on('data', (row) => {
rowsArr.push(row);
})
.on('end', () => {
for (var i = 0; i <= rowsArr.length; i++) {
if (rowsArr[i].emailAddress == emailAdd) {
//do something
}
}
})
I'm studying to create a simple API with mysql. I've understood and implemented the simple structure in which the app call the router, that call the controller, that call the service. But now i'm developing a multiple tag service module and I've realized that I need to call the same sql queries services declared in it. I show you the code for a better understanding:
tag_service.js:
const mysql = require("../../config/database");
module.exports = {
insertTags: async (data, callBack) => {
const connection = await mysql.connection();
let results = '';
const tagsArray = data.tags.map(tag => [data.id_manager,data.cod_table,data.id_record,tag])
try {
//console.log("at insertCallout...");
await connection.query("START TRANSACTION");
results = await connection.query(
`INSERT INTO s_com_tags (id_manager,cod_table,id_record,tag)
VALUES (?,?,?)`,
[tagsArray]
);
await connection.query("COMMIT");
} catch (err) {
await connection.query("ROLLBACK");
//console.log('ROLLBACK at insertCallout', err);
throw err;
} finally {
await connection.release();
return callBack(null, results);
}
},
deleteTags: async (data, callBack) => {
//console.log(data);
let results = '';
const connection = await mysql.connection();
try {
//console.log("at deleteCallouts...");
await connection.query("START TRANSACTION");
results = await connection.query(
`DELETE FROM s_com_tags
WHERE cod_table = ? AND id_record = ? AND tag IN (?)`,
[data.code_table, data.id_record,data.tags]
);
//console.log(res);
await connection.query("COMMIT");
} catch (err) {
await connection.query("ROLLBACK");
//console.log('ROLLBACK at deleteCallouts', err);
throw err;
} finally {
await connection.release();
return callBack(null, Callouts);
}
},
};
controller's structure that will use the service:
module.exports = {
updateLabDesc: async (req, res, next) => {
try {
const body = req.body;
if(!body.internal_code){
updateLabDesc(body.manager, async (err, results) => {
if (err) {
return next(createError.InternalServerError())
}
});
}
updateTags(body, async (err, results) => {
if (err) {
return next(createError.InternalServerError())
}
return res.json({
success: (results ? 1 : 0 ),
message: (results || 0) + " LabDesc inserted successfully"
});
});
} catch (error) {
next(error)
}
},
};
But the update is something like
updateTag function => {
try {
const current_tags = await getTags(req.body);
let newTags = [];
let oldTags = [];
req.body.tags.forEach(tag => {
if(!current_tags.includes(tag))
newTags.push(tag)
});
await insertTags(newTags);
current_tags.tags.forEach(tag => {
if(!req.body.tags.includes(tag))
oldTags.push(tag)
});
await deleteTags(oldTags);
} catch (error) {
next(error)
}
},
Basically, the tag_service has insertTags and deleteTags but I need the updateTags to call these functions as well. The final controller will call insertTags, deleteTags and updateTags. How can I structure these calls?
It is a controller that could call 2 helpers (insertTag and deleteTags) and another helper (updateTags) that call these 2 helpers. Any ideas?
I am coding a post request which downloads all URL HTML,zips them and email it back. This all should happen in the backend. I am storing all the data in an array and extract the first element to start these operations.
I have while loop inside which I am calling some functions. Each function gets executed at a certain time.
I used async, await and promises to make sure they run one after the
other.
Coming to my problem.
My while loop starts getting executed again before all the
functions inside it are executed.
app.post('/?', async (req, res) => {
var urls = req.query.urls
var email = req.query.email;
var new_stack = [urls, email]
stack.push(new_stack)
res.send("Mail sent")
if (isFunctionRunning === false) { //initially it is false
console.log(isFunctionRunning, stack.length)
send_mails();
}
});
const getGoogleIndexHTML = (url) => {
return new Promise((resolve, reject) => {
request(url, (err, res, body) => err ? reject(err) : resolve(body))
})
}
const some_function_to_download = async (url) => {
try {
const a = url.split(".")
let googleIndexHTML = await getGoogleIndexHTML(url)
await fs.writeFile(directory + '/' + a[1] + '.html', googleIndexHTML, (err) => {
if (err) throw err
})
console.log('File created.')
} catch (err) {
console.log(err)
}
}
const html_to_zip_file = async () => {
await zipper.zip(directory, function (error, zipped) {
if (!error) {
zipped.compress();
zipped.save('./package.zip', function (error) {
if (!error) {
console.log("Saved successfully !");
}
});
} else {
console.log(error)
}
})
}
const send_mails = async () => {
while (stack.length > 0) {
isFunctionRunning = true
var a = stack.shift()
var urls = a[0]
var collection_urls = urls.split(",");
var to_email = a[1]
rimraf(directory, function () {
console.log("done");
});
fs.mkdirSync(directory);
for (url of collection_urls) {
await some_function_to_download(url); // 5 sec per download
}
await html_to_zip_file() // takes 5 sec to zip
.then(result => {
transporter.sendMail(set_mail_options(to_email)) //2 sec to send mail
.then(result => {
console.log("Mail sent")
})
.catch(err => {
console.log(err)
})
})
.catch(err => {
console.log(err)
})
console.log("reached") // this is reached before zip is done and mail sent. I want to prevent this
}
isFunctionRunning = false
}
You need to return transporter.sendMail in sendMail, fs.writeFile in someFunctionToDownload and zipper.zip in htmlToZipFile otherwise the await won't work as expected (I'm assuming that they actually do return promises, I'm only familiar with fs.writeFile)
Also: CamelCase is used in JS, not snake_case 🙃
And are you sure rimraf is synchronous?
const sendMails = async () => {
while (stack.length > 0) {
isFunctionRunning = true;
const [urls, toEmail] = stack.shift();
var collectionUrls = urls.split(",");
rimraf(directory, function() {
console.log("done");
});
await fs.mkdir(directory);
await Promise.All(collectionUrls.map(someFunctionToDownload)); // 5 sec per download
await htmlToZipFile() // takes 5 sec to zip
.then(result => transporter.sendMail(set_mail_options(toEmail))) //2 sec to send mail
.then(result => {
console.log("Mail sent");
})
.catch(err => {
console.log(err);
});
console.log("reached"); // this is reached before zip is done and mail sent. I want to prevent this
}
isFunctionRunning = false;
};
const someFunctionToDownload = async url => {
const a = url.split(".");
const googleIndexHTML = await getGoogleIndexHTML(url);
return fs.writeFile(`${directory}/${a[1]}.html`, googleIndexHTML, err => {
if (err) throw err;
});
};
const htmlToZipFile = async () => {
return zipper.zip(directory, function(error, zipped) {
if (!error) {
zipped.compress();
zipped.save("./package.zip", function(error) {
if (!error) {
console.log("Saved successfully!");
}
});
} else {
console.log(error);
}
});
};
Try using the following
while (stack.length > 0) {
isFunctionRunning = true
var a = stack.shift()
var urls = a[0]
var collection_urls = urls.split(",");
var to_email = a[1]
rimraf(directory, function () {
console.log("done");
});
fs.mkdirSync(directory);
for (url of collection_urls) {
await some_function_to_download(url); // 5 sec per download
}
try {
const result = await html_to_zip_file() // takes 5 sec to zip
const sendMailResult = await transporter.sendMail(set_mail_options(to_email))
} catch(e)
{
console.log(e)
}
console.log("reached")
}
Since html_to_zip_file() and sendMail function are independent
we can use
const result = await Promise.all([html_to_zip_file(),transporter.sendMail(set_mail_options(to_email))]);
I'm having issues with reading the data from a file created with fs.createWriteStream.
It looks like the stream.write get executed at the end of the program after the file is read as opposed to when they are called (which would explain why there is nothing in variable_3 at the time of outputting it).
The function used is as follow (simplified):
module.exports = async data1 => {
console.log('start');
try {
const stream = fs.createWriteStream(filename_1, { flags: "a" });
console.log('stream created');
stream.write("some data", (err) => {
if (err) {
console.log(err.message);
} else {
console.log("data written");
}
});
for (const variable_1 of object_1) {
const variable_2 = await function2({
// generates a buffer
});
stream.write(variable_2, (err) => {
if (err) {
console.log(err.message);
} else {
console.log("data written");
}
});
}
stream.end();
console.log('stream ended');
console.log('opening file');
const variable_3 = fs.readFileSync(filename_1);
console.log('file opened and read with data: ' + variable_3);
return;
} catch (error) {
console.log(error);
}
};
Output:
> start
> stream created
> stream ended
> opening file
> file opened and read with data:
> data written
> data written
Once the code has run however, when I open filename_1 (via the explorer), the whole data is present?!
Got it sorted in the end.
Didn't realise the stream functions don't return a promise so I had to manually return a promise.
Here is the code changed:
module.exports = async data1 => {
console.log('start');
try {
const stream = fs.createWriteStream(filename_1, { flags: "a" });
console.log('stream created');
stream.write("some data", (err) => {
if (err) {
console.log(err.message);
} else {
console.log("data written");
}
});
for (const variable_1 of object_1) {
const variable_2 = await function2({
// generates a buffer
});
stream.write(variable_2, (err) => {
if (err) {
console.log(err.message);
} else {
console.log("data written");
}
});
}
stream.end();
console.log('stream ended');
console.log('opening file');
const variable_3 = fs.readFileSync(filename_1);
console.log('file opened and read with data: ' + variable_3);
return new Promise(resolve => {
stream.on('finish', () => {
resolve('done');
console.log("createNRRD is done");
});
});
} catch (error) {
console.log(error);
}
};