Node js Multiple Query Promises - javascript

How do we handle multiple query promises in node js and mongo using async function ?
For example I wanted to add new query which is and then handle the gallery result the way const file result being handled.
const gallery = await gfs.findManyAsync({ metadata: vehicle.VIN })
Here is my current code
async function myFunction() {
gfs.findOneAsync = promisify(gfs.findOne);
gfs.findManyAsync = promisify(gfs.files.find);
const totalCount = await Vehicle.model.count({})
const vehicles = await Vehicle.model
.find(query, {}, pagination)
.sort(sortOrd + sortType)
const totalPages = Math.ceil(totalCount / size)
const promises = vehicles.map(async vehicle => {
let ImageList = vehicle.ImageList.split(',')
let profile_name = ImageList[0].split('/').pop();
const file = await gfs.findOneAsync({
$and: [{
$or: [{
metadata: vehicle.VIN
}]
},
{
$or: [{
filename: profile_name
}]
}
]
})
const gallery = await gfs.findManyAsync({ metadata: vehicle.VIN })
// const gallery = await gfs.findManyAsync({ metadata: vehicle.VIN })
// console.log("Gallery :" , gallery)
if (file) {
return new Promise(resolve => {
const readstream = gfs.createReadStream(file.filename);
const url = `http://localhost:3002/api/vehicle/file/${file.filename}`
vehicle.FileData = url
resolve()
})
}
})
try { } catch { (err) }
await Promise.all(promises)
return res.status(200).send({
message: "success",
pageNo: pageNo,
totalRecords: vehicles.length,
data: vehicles,
totalPages: totalPages
})
}

Related

I want to save a data from api to mongodb compass by mongose?

am trying to save a data from fetch api to my database using mongoose so
the data never come.
could anyone help? and thank you,
this is my code
`
const mongoose = require('mongoose');
mongoose.connect('mongodb://localhost:27017/Quran')
const SurahSchema = mongoose.Schema({
ayahs:[{
number:Number,
numberInSurah:Number,
text:String
}],
englishName:String,
englishNameTranslation:String,
name:String,
number:Number,
revelationType:String,
});
var surah = mongoose.model('Surah',SurahSchema)
`
`
const API = 'http://api.alquran.cloud/v1/quran/quran-uthmani'
async function getdata(){
const res = await fetch(API)
const data = await res.json()
for (let i = 0; i < data.data.surahs.length; i++) {
const Surah = new surah({
ayahs:[{
number:data.data.surahs[i]['ayahs'].number,
numberInSurah:data.data.surahs[i]['ayahs'].numberInSurah,
text:data.data.surahs[i]['ayahs'].text
}],
englishName:data.data.surahs[i]['englishName'],
englishNameTranslation:data.data.surahs[i]['englishNameTranslation'],
name:data.data.surahs[i]['name'],
number:data.data.surahs[i]['number'],
revelationType:data.data.surahs[i]['revelationType']
})
Surah.save(function (err) {
if (err) return handleError(err);
// saved!
});
}
}
getdata()
`
i tried to search about the problem in google and i did not find anything similar.
Instead of writing such a complicated for loop, you can just use the .insertMany() method on the model
const API = 'http://api.alquran.cloud/v1/quran/quran-uthmani'
async function getdata(){
const res = await fetch(API);
const data = await res.json();
try{
const inserted = await surah.insertMany(data.data.surahs);
}catch(e){
console.log("Some error");
console.log(e);
}
}
getdata()
the following is a complete working snippet
import mongoose from 'mongoose';
import fetch from 'node-fetch';
mongoose.connect('mongodb://localhost:27017/Quran');
const SurahSchema = mongoose.Schema({
ayahs: [
{
number: Number,
numberInSurah: Number,
text: String,
},
],
englishName: String,
englishNameTranslation: String,
name: String,
number: Number,
revelationType: String,
});
const surah = mongoose.model('Surah', SurahSchema);
const API = 'http://api.alquran.cloud/v1/quran/quran-uthmani';
async function getdata() {
const res = await fetch(API);
const data = await res.json();
try {
const inserted = await surah.insertMany(data.data.surahs);
console.log(inserted);
process.exit(0);
} catch (e) {
console.log('Some error');
console.log(e);
process.exit(0);
}
}
getdata();
this inserted 114 documents
PS: you just need to install node-fetch and mongoose

async/await with firebase storage

I have a function that receives data, I use an asynchronous promise to get a link to the document item.poster = await Promise.all(promises), then the data does not have time to be added to the array and I get an empty array. But if I remove the function where I get the link to the document, then everything works fine. In debug mode, I can see all the data fine, but why am I getting an empty array?
async FetchData({ state, commit }, to) {
try {
const q = query(collection(db, to));
await onSnapshot(q, (querySnapshot) => {
let data = [];
querySnapshot.forEach(async (doc) => {
let promises = [];
let item = {
id: doc.id,
name: doc.data().name,
slug: doc.data().slug,
country: doc.data().country,
duration: doc.data().duration,
year: doc.data().year,
video: doc.data().video,
genres: doc.data().genres,
actors: doc.data().actors,
};
if (to === "films") {
const starsRef = ref(storage, `images/${doc.id}/poster.png`);
promises.push(getDownloadURL(starsRef));
item.poster = await Promise.all(promises);
}
data.push(item);
});
commit("setData", { data, to });
});
} catch (err) {
console.error(err);
} }
forEach and async do not work nice together, forEach is not awaitable. But the solution is simple, .map and Promise.all():
// async not really needed
async FetchData({ state, commit }, to) {
const q = query(collection(db, to));
// TODO: Call the unsubscribeFn method in order to stop
// onSnapshot callback executions (when needed)
const unsubscribeFn = onSnapshot(q, (querySnapshot) => {
const allPromises = querySnapshot.docs.map(async (doc) => {
const docData = doc.data();
let item = {
id: doc.id,
name: docData.name,
slug: docData.slug,
country: docData.country,
duration: docData.duration,
year: docData.year,
video: docData.video,
genres: docData.genres,
actors: docData.actors
};
// Or you can use
// let item = { id: doc.id, ...docData };
if (to === "films") {
const starsRef = ref(storage, `images/${doc.id}/poster.png`);
item.poster = await getDownloadURL(starsRef);
}
return item;
});
Promise.all(allPromises)
.then((data) => commit("setData", { data, to }))
.catch(console.error);
});
}

SQS messages are coming back to DLQ?

I have written the following script to move messages from DLQ to MainQ.
'use strict'
import { readFileSync } from "fs";
import {
DeleteMessageCommand,
SQSClient,
GetQueueUrlCommand,
SendMessageCommand,
ReceiveMessageCommand,
GetQueueAttributesCommand
} from '#aws-sdk/client-sqs';
const REGION = 'us-east-1';
const sqs = new SQSClient({ region: REGION });
async function getMessageCount(queueUrl) {
const params = { AttributeNames: ['ApproximateNumberOfMessages'], QueueUrl: queueUrl };
try {
const data = await sqs.send(new GetQueueAttributesCommand(params));
// console.log(data);
return data.Attributes.ApproximateNumberOfMessages;
} catch (err) {
console.error(err);
}
}
async function reprocessMessages(dlqQueue, mainQueue) {
try {
const response1 = await sqs.send(new GetQueueUrlCommand({ QueueName: dlqQueue }));
const response2 = await sqs.send(new GetQueueUrlCommand({ QueueName: mainQueue }));
const dlqQueueUrl = response1.QueueUrl;
const mainQueueUrl = response2.QueueUrl;
const messageCount = await getMessageCount(dlqQueueUrl);
console.log(`Message count ${messageCount}`);
const visibiltyTimeout = Math.ceil(messageCount / 100) * 2;
let count = 0;
while (count <= 50) {
count += await moveMessage(mainQueueUrl, dlqQueueUrl, 5);
}
console.log(`Moved ${count} messages`);
return "Completed";
} catch (err) {
console.error(err);
}
}
async function moveMessage(mainQueueUrl, dlqQueueUrl, visibiltyTimeout) {
try {
const receiveMessageParams = {
MaxNumberOfMessages: 9,
MessageAttributeNames: ['All'],
QueueUrl: dlqQueueUrl,
VisibilityTimeout: visibiltyTimeout,
WaitTimeSeconds: 1
};
const receiveData = await sqs.send(new ReceiveMessageCommand(receiveMessageParams));
// console.log(receiveData);
if (!receiveData.Messages) {
// console.log("finished");
return 0;
}
const messages = [];
receiveData.Messages.forEach(msg => {
messages.push({ id: msg.MessageId, msgAttributes: msg.MessageAttributes, body: msg.Body, receiptHandle: msg.ReceiptHandle });
});
const sendMsg = async ({ id, msgAttributes, body, _ }) => {
const sendMessageParams = {
MessageId: id,
MessageAttributes: msgAttributes,
MessageBody: body,
QueueUrl: mainQueueUrl
};
const sentData = await sqs.send(new SendMessageCommand(sendMessageParams));
// console.log("Success, message sent. MessageID: ", sentData.MessageId);
return 'Success';
};
const deleteMsg = async ({ id, receiptHandle }) => {
const deleteMessageParams = {
MessageId: id,
QueueUrl: dlqQueueUrl,
ReceiptHandle: receiptHandle
};
const deleteData = await sqs.send(new DeleteMessageCommand(deleteMessageParams));
// console.log("Message deleted", deleteData);
return 'Deleted';
};
const sent = await Promise.all(messages.map(sendMsg));
console.log(sent);
const deleted = await Promise.all(messages.map(deleteMsg));
console.log(deleted);
console.log(sent.length);
return sent.length;
// return 1;
} catch (err) {
console.log(err);
}
}
function main() {
const queues = readFileSync('queues.txt', 'utf8').split('\n');
queues.map(async elem => {
const [dlqQueue, mainQueue] = elem.split(' ');
console.log(`Moving messages from ${dlqQueue} to ${mainQueue}`);
const response = await reprocessMessages(dlqQueue, mainQueue);
console.log(response);
});
}
main()
This script moves the message from DLQ to MainQ and shows the new count of the DLQ decreased by the number of messages. However, when those messages in MainQ fail they again come back to DLQ thus keeping the count same as before reprocessing. But, after a while I see that the messages in DLQ increase by the same amount as the number of messages processed.
For instance for the below case I moved 54 messages and the screenshots for the queue counts are attached after each stage.

export to CSV at the end of the run

The original code that scrape the first page of data works but then I created a loop that clicks on a "load more" button and then scrapes the data until there is no more "load more" button. At the end of my run it is not exporting anything. Is my code for exporting to CSV incorrect? Where am I going wrong with this?
const puppeteer = require('puppeteer');
const jsonexport = require('jsonexport');
(async () => {
const browser = await puppeteer.launch({ headless: false }); // default is true
const page = await browser.newPage();
await page.goto('https://www.Website.com/exercises/finder', {
waitUntil: 'domcontentloaded',
});
//load more CSS to be targeted
const LoadMoreButton =
'#js-ex-content > #js-ex-category-body > .ExCategory-results > .ExLoadMore > .bb-flat-btn';
do {
// clicking load more button and waiting 1sec
await page.click(LoadMoreButton);
await page.waitFor(1000);
const loadMore = true;
const rowsCounts = await page.$eval(
'.ExCategory-results > .ExResult-row',
(rows) => rows.length
);
//scraping the data
const exerciseNames = [];
for (let i = 2; i < rowsCounts + 1; i++) {
const exerciseName = await page.$eval(
`.ExCategory-results > .ExResult-row:nth-child(${i}) > .ExResult-cell > .ExHeading > a`,
(el) => el.innerText
);
exerciseNames.push(exerciseName);
}
console.log({exerciseNames});
} while (10000);
const allData = [
{
exercise: exerciseNames,
},
];
// exporting data to CSV
const options = [exercise];
//json export error part
jsonexport(allData, options, function (err, csv) {
if (err) return console.error(err);
console.log(csv);
});
await browser.close();
})().catch((e) => {
console.error(e);
});
Edit:
This is what I have at the moment for the exporting and writing to a CSV file. I'm getting 3 headers but only the exercises are being written and then nothing else. Console.log shows exercises, muscle target group, and equipments being exported though. I'm trying to get it where they are 3 headers (name, equipment, and targeted muscle) and then each row is being filled inside of it. Ex: squat, barbell, legs these would be in one row but each in their own cell.
Current export code:
const allData = [
{
exercise: exerciseNames,
muscleGroup: muscleTargets,
equipment: equipmentTypes,
},
];
var ws = fs.createWriteStream('test1.csv');
csv.write(allData, { headers: true, delimiter: ',' }).pipe(ws);
//json export error part
jsonexport(allData, function (err, csv) {
if (err) return console.error(err);
console.log(csv);
});
Edit 2
This is currently my entire code. It is outputting the allData pre-filled info but no more new data
const puppeteer = require('puppeteer');
const jsonexport = require('jsonexport');
const fs = require('fs');
(async () => {
const browser = await puppeteer.launch({ headless: false }); // default is true
const page = await browser.newPage();
await page.goto('https://www.website.com/exercises/finder', {
waitUntil: 'domcontentloaded',
});
const loadMore = true;
const rowsCounts = await page.$$eval(
'.ExCategory-results > .ExResult-row',
(rows) => rows.length
);
let allData = [];
for (let i = 2; i < rowsCounts + 1; i++) {
const exerciseName = await page.$eval(
`.ExCategory-results > .ExResult-row:nth-child(${i}) > .ExResult-cell > .ExHeading > a`,
(el) => el.innerText
);
const muscleGroupName = await page.$eval(
`.ExCategory-results > .ExResult-row:nth-child(${i}) > .ExResult-cell > .ExResult-muscleTargeted > a`,
(el) => el.innerHTML
);
const equipmentName = await page.$eval(
`.ExCategory-results > .ExResult-row:nth-child(${i}) > .ExResult-cell > .ExResult-equipmentType > a`,
(el) => el.innerHTML
);
let obj = {
exercise: exerciseName,
muscleGroup: muscleGroupName,
equipment: equipmentName,
};
allData.push(obj);
}
console.log(allData);
async function fn() {
const allData = [
{
exercise: 'Rickshaw Carry',
muscleGroup: 'Forearms',
equipment: 'Other',
},
{
exercise: 'Single-Leg Press',
muscleGroup: 'Quadriceps',
equipment: 'Machine',
},
{
exercise: 'Landmine twist',
muscleGroup: 'Forearms',
equipment: 'Other',
},
{
exercise: 'Weighted pull-up',
muscleGroup: 'Forearms',
equipment: 'Other',
},
];
// json export error part
jsonexport(allData, function (err, csv) {
if (err) return console.error(err);
console.log(csv);
fs.writeFileSync('output.csv', csv);
});
}
fn();
await browser.close();
})().catch((e) => {
console.error(e);
});
I see two issues here.
I.) One of them is with the options declaration:
const options = [exercise]; // ❌
You are trying to access the exercise property of allData object without a proper notation. If you really need to extract it inside a new array you can do it by going inside the first element of the allData array using index [0], then using the dot-notation to access the exercise property.
const options = [allData[0].exercise]; // ✅
Note: I suggest to leave the options simply allData[0].exercise (without the wrapping array) as your allData object is already an array, I see no benefit of making the structure deeper.
II.) The second issue is with the usage of jsonexport npm package. I suppose you left allData accidentally in this line:
jsonexport(allData, options, function (err, csv) // ❌
You only need the options here (as according to the docs you can give only one object as the input):
jsonexport(options, function (err, csv) // ✅
Edit
Based on your updated answer your problem can be solved if you restructure a bit your allData object, so the jsonexport will recognize each column and row correctly.
const jsonexport = require('jsonexport')
const fs = require('fs')
async function fn() {
const allData = [
{
exercise: 'Rickshaw Carry',
muscleGroup: 'Forearms',
equipment: 'Other'
},
{
exercise: 'Single-Leg Press',
muscleGroup: 'Quadriceps',
equipment: 'Machine'
},
{
exercise: 'Landmine twist',
muscleGroup: 'Forearms',
equipment: 'Other'
},
{
exercise: 'Weighted pull-up',
muscleGroup: 'Forearms',
equipment: 'Other'
}
]
// json export error part
jsonexport(allData, function (err, csv) {
if (err) return console.error(err)
console.log(csv)
fs.writeFileSync('output.csv', csv)
})
}
fn()
To achieve such structure you should extend allData in each iteration like this:
let allData = []
for (let i = 2; i < rowsCounts; i++) {
const exerciseName = await page.$eval(`...row:nth-child(${i})...`,
el => el.textContent.trim())
const muscleGroupName = await page.$eval(`...row:nth-child(${i})...`,
el => el.textContent.trim())
const equipmentName = await page.$eval(`...row:nth-child(${i})...`,
el => el.textContent.trim())
let obj = {
exercise: exerciseName,
muscleGroup: muscleGroupName,
equipment: equipmentName
}
allData.push(obj)
}
console.log(allData)

Imported async data from another module in Node does not resolve, how can I fix this?

This is the module that collections and exports async data: scraper.js
const express = require('express')
const cheerio = require('cheerio')
const request = require("tinyreq")
const fs = require('fs')
const _ = require('lodash')
const uuid = require('uuid/v4')
const async = require('async')
const mental_models = {
url: 'https://www.farnamstreetblog.com/mental-models/',
data: {}
}
const decision_making = {
url: 'https://www.farnamstreetblog.com/smart-decisions/',
data: {}
}
const cognitive_bias = {
url: 'https://betterhumans.coach.me/cognitive-bias-cheat-sheet-55a472476b18',
data: {}
}
const DATA_URLS = [mental_models, decision_making, cognitive_bias]
const filterScrape = async (source, params) => {
let filtered_data = {
topics: [],
content: [],
additional_content: []
}
let response = await scrape(source)
try {
let $ = cheerio.load(response)
params.forEach((elem) => {
let headers = ['h1', 'h2', 'h3']
if ($(elem) && headers.includes(elem)) {
let topic = {}
let content = {}
let id = uuid()
topic.id = id
topic.text = $(elem).text()
if ($(elem).closest('p')) {
content.text = $(elem).closest('p').text()
content.id = id
}
filtered_data.topics.push(topic)
filtered_data.content.push(content)
} else if ($(elem) && !headers.includes(elem)) {
let content = {}
let id = uuid()
content.text = $(elem).text()
content.id = id
filtered_data.additional_content.push(content)
} else {
}
})
}
catch (err) {
console.log(err)
}
return filtered_data
}
const scrape = (source) => {
return new Promise((resolve, reject) => {
request(source.url, function (err, body) {
if (err) {
reject(err)
return
}
resolve(body)
})
})
}
const DATA = _.map(DATA_URLS, async (source) => {
let params = ['h1', 'h2', 'h3', 'p']
let new_data = await filterScrape(source, params)
try {
source.data = new_data
}
catch (err) {
console.log(err)
}
})
module.exports = DATA
This is the module that imports the data: neural.js
const brain = require('brain')
const neural_net = new brain.NeuralNetwork()
const DATA = require('./scraper')
console.log(DATA)
Obviously not much going on, I've removed the code since the variable doesn't resolve. When logged it logs a promise but the promise does not resolve. However in the imported module, the promise is logged and then resolves. What gives? Should I import a function that resolves the data?
Of course it would be best to import that function, however it won't change the issue in your code which is here:
const DATA = _.map(DATA_URLS, async (source) => {
Lodash doesn't support async iteration - so you need to have some other method, one would be to use the newest nodejs version (10.x) and make use of async iteration - but that won't use the full power of asynchronous code.
You can also use scramjet - a framework my company is supporting. The code above would take the following form:
const {DataStream} = require("scramjet");
const DATA_URLS = [mental_models, decision_making, cognitive_bias];
module.exports = async () => DataStream.fromArray(DATA_URLS)
.setOptions({maxParallel: 2}) // if you need to limit that at all.
.map(async ({url}) => {
let params = ['h1', 'h2', 'h3', 'p']
let data = await filterScrape(source, params);
return { url, data };
})
.toArray();
The other file would take the following form:
const brain = require('brain')
const neural_net = new brain.NeuralNetwork()
const scraper = require('./scraper')
(async (){
const DATA = await scraper();
console.log(DATA); // or do whatever else you were expecting...
})();

Categories