request every 5 min in a loop Nodejs - javascript

I need to make a get calls request to an external server in a loop but the problem is that calling all in the loop can cause rate limits so I just wanna make the calls inside the loop every 5 min or 10 or so , is there a way that can help achieve this.
here is my code :
const util = require('util');
const StravaClientService = require("../../strava/client.service");
const StravaActivityService = require("../../strava/activity.service");
const _ = require("underscore");
const fs = require("fs");
const AWS = require("aws-sdk");
const ids = require("underscore");
const data = require("underscore");
AWS.config.update({
region: "us-east-1",
});
var docClient = new AWS.DynamoDB.DocumentClient();
module.exports = (router) => {
router.get("/streams/:id", async (req, res, done) => {
const userc = req.user;
const access_token = userc.access_token;
const ids = [4401422821,
4401416494,
4401413107,
]
const stravaClient = StravaClientService.getClient(access_token);
const activityService = StravaActivityService(stravaClient);
//
var params = {
TableName:"run-id",
Key: {
"id": userc.stravaId,
}
};
docClient.get(params, function(err, data) {
if (err) {
console.log("Error", err);
} else {
console.log("Success", data.Item.json);
ids.map(( id ) => setTimeout(activityService.streamActivity,5000,id))//data.Item.json
}
});

Basically, setInterval is enough in your scenario. However, you could try to use node-schedule which helps to manage your cron jobs.

Use the index to offset?
ids.map(( id, index ) => setInterval(()=>activityService.streamActivity(id),(5+index) * 60000))//data.Item.json

Use setInterval
setInterval(() => {
ids.map(( id ) => setTimeout(activityService.streamActivity,5000,id))
}, 5000); \\ for 5 minutes

Related

FS.Readable Stream skipping rows when importing to noSQL using Mongoose schemas

I am attempting to import a CSV using my mongoose modal and regardless of its size, I am importing the first 2 rows and then every other row.
const fs = require('mz/fs');
const { parse } = require('#fast-csv/parse');
const streamToIterator = require('stream-to-iterator');
mongoose.Promise = global.Promise;
mongoose.set('debug', true);
const Product = require('./schemas/Product');
mongoose.Promise = global.Promise;
const options = {
useNewUrlParser: true,
useUnifiedTopology: true,
};
const database = mongoose
.connect(
process.env.DATABASE_URL,
options
)
.then((db) =>
(async function () {
console.log('Connected to database.');
try {
await Promise.all(
Object.entries(db.models).map(([k, m]) => m.deleteMany())
);
let headers = Object.keys(Product.schema.paths).filter(
(k) => ['_id', '__v'].indexOf(k) === -1
);
if (await fs.exists('./database.csv')) {
let stream = fs
.createReadStream('./database.csv')
.pipe(parse({ headers }));
const iterator = await streamToIterator(stream).init();
let buffer = [],
counter = 0;
for (let docPromise of iterator) {
let doc = await docPromise;
buffer.push(doc);
counter++;
if (counter > 10000) {
await Product.insertMany(buffer);
buffer = [];
counter = 0;
}
}
if (counter > 0) {
await Product.insertMany(buffer);
buffer = [];
counter = 0;
}
}
} catch (e) {
console.error(e);
}
})()
)
.catch((err) => console.error('Error connecting to database:', err));
module.exports = database;
When I look at my doc variable, it is already in a malformed state (every other row) and when I'm reading the stream, it is already in a malformed state so I'm assuming its occurring around there?
What I ended up doing to resolve this was to just turn the CSV into JSON and importing it as normal. Not ideal or really addresses the underlying issue but my database has what it needs.

Sinon stub out module's function from a middleware

Based on this question, I need to also make a test for a middleware which also uses the db-connection.js file. The middleware file will look like this:
const dbConnection = require('./db-connection.js')
module.exports = function (...args) {
return async function (req, res, next) {
// somethin' somethin' ...
const dbClient = dbConnection.db
const docs = await dbClient.collection('test').find()
if (!docs) {
return next(Boom.forbidden())
}
}
}
, the database connection file do not change, which is:
const MongoClient = require('mongodb').MongoClient
const dbName = 'test'
const url = process.env.MONGO_URL
const client = new MongoClient(url, { useNewUrlParser: true,
useUnifiedTopology: true,
bufferMaxEntries: 0 // dont buffer querys when not connected
})
const init = () => {
return client.connect().then(() => {
logger.info(`mongdb db:${dbName} connected`)
const db = client.db(dbName)
})
}
/**
* #type {Connection}
*/
module.exports = {
init,
client,
get db () {
return client.db(dbName)
}
}
How the middleware works is by passing list of strings (that strings is roles), I have to query to the database and check whether there is a record of each roles. If the record exists, I will return next(), while if the record does not exist, I will return next(Boom.forbidden()) (next function with a 403 status code from Boom module).
Given the details above, how does one make a test to test out the return value of the middleware if the record exists or not? This means I have to assert the next() and next(Boom.forbidden) to be exact.
Based on the answer. You can create stubs for the req, res objects, and next function.
E.g.(Doesn't run, but it should work.)
const sinon = require('sinon');
describe('a', () => {
afterEach(() => {
sinon.restore();
});
it('should find some docs', async () => {
process.env.MONGO_URL = 'mongodb://localhost:27017';
const a = require('./a');
const dbConnection = require('./db-connection.js');
const dbStub = {
collection: sinon.stub().returnsThis(),
find: sinon.stub(),
};
sinon.stub(dbConnection, 'db').get(() => dbStub);
const req = {};
const res = {};
const next = sinon.stub();
const actual = await a()(req, res, next);
sinon.assert.match(actual, true);
sinon.assert.calledWithExactly(dbStub.collection, 'test');
sinon.assert.calledOnce(dbStub.find);
sinon.assert.calledOnce(next);
});
});

Changes of a json not taken into account NodeJS

I'm coming to you because I'm trying to do a foreach loop on Discord.JS to detect changes in a JSON file. My file does change content, but my foreach loop keeps the old content in memory. I have no idea how to solve the problem...
My index.js:
const Discord = require('discord.js');
const low = require('lowdb')
const FileSync = require('lowdb/adapters/FileSync')
const fetch = require('node-fetch');
const client = new Discord.Client();
const config = require('./config.json');
const database = require('./db.json');
const adapter = new FileSync('./db.json')
const db = low(adapter)
const prefix = config.prefix;
let api = config.api;
client.once('ready', () => {
db.defaults({numbers: []})
.write()
setInterval(function() {
database.numbers.forEach(async element => {
let state = await fetch(`some-api-url`).then(response => response.json());
if(state[0].response != element.response){
db.get('numbers')
.find({number: element.number})
.assign({response: state[0].response, sms: state[0].msg})
.write();
let user = element.clientId;
try {
await client.users.cache.get(user).send(`Your message for number ${element.number} is ${element.sms}`);
} catch(error){
console.log(error)
}
}
});
}, 3000);
console.log('Ready!');
});
It all works, it just keeps the old file in memory.
To solve this problem, I passed my const database = require('./db.json'); into let. Then I integrated fs so that I could clear the cache:
setInterval(function() {
delete require.cache[require.resolve('./db.json')]
database = require('./db.json');
Problem solved!

Struggling with chaining postgres db async/await promises to get a result sent to client (express)

I have spent a while looking around at similar problems online but I seem to be really struggling to wrap my head around this. I've reviewed a few sources online for help but was unable to apply the logic I seen to my own codebase, I'm hoping somebody out there might be able to help.
I'm running an express server with a pg Pool to handle DB logic.
I am able to successfully log out the information from within the pg.Pool logic however despite banging my head against multiple attempts I have been unable to successfully pass the data onto the clientside.
dbQueries.js
const { Pool } = require('pg');
const pool = new Pool({
user: process.env.DB_USER,
host: process.env.DB_HOST,
database:process.env.DB_NAME,
password: process.env.DB_PASSWORD,
port: process.env.DB_PORT
});
// Main function called in server.js
// step 1. fetches distinct name values from pg table
// step 2. fetches values to get full list of reviews for those distinct names
// step 3. does some data modification to make the data formatted for frontend usecase
const getFormattedReviews = async function(){
console.log('Fetching all unique therapist review names.')
const getDistinct = {
name: 'distinct-reviews',
text: 'SELECT DISTINCT therapist_name FROM reviews'
};
// step 1
const res = await pool.query(getDistinct, (err, res) => {
let data = []
if (err) {
console.log(err.stack);
} else {
// console.log(res.rows);
data = res.rows.map(
// step 2
therapist => getSpecificTherapistReviews(therapist.therapist_name)
)
}
console.log(`\n DEBUG3 - getFormattedReviews data: ${JSON.stringify(data)} \n`)
return data;
});
return res;
}
const getSpecificTherapistReviews = async function(therapist_name){
console.log(`Fetching reviews for: ${therapist_name}.`)
const getSpecificTherapistReviews = {
name: `${therapist_name}-reviews`,
text: `SELECT * FROM reviews WHERE therapist_name LIKE '%${therapist_name}%'`
};
const res = await pool.query(getSpecificTherapistReviews, (err, res) => {
let data = []
if (err) {
console.log(err.stack);
} else {
// console.log(res.rows);
// step 3
data = filteringDataForFrontend(res.rows);
}
console.log(`\n DEBUG2 - GetSpecificTherapistReviews ${JSON.stringify(data)} \n`)
return data;
});
return res;
}
const filteringDataForFrontend = function(data){
console.log(`Filtering Data for Frontend.`)
// Based on length of the reviews array, each review = 1 object
const total_reviews = data.length;
// Underlying logic assumes consistent data across all entries for these values
const therapist_name = data[0].therapist_name;
const type = data[0].type;
const image = data[0].image;
const location = data[0].location;
// Summing the rating values across multiple review entries
const ratings = data.reduce((acc, obj) => ({
rating_friendliness: acc.rating_friendliness + obj.rating_friendliness,
rating_techniques: acc.rating_techniques + obj.rating_techniques,
rating_progression: acc.rating_progression + obj.rating_progression,
rating_cost: acc.rating_progression + obj.rating_progression,
rating_listening: acc.rating_listening + obj.rating_listening,
rating_overall: acc.rating_overall + obj.rating_overall
})
)
// Placeholder as string, most likely restructure to an array of objects
const comments = data.reduce ((acc, obj) => ({
feedback_comments: acc.feedback_comments + obj.feedback_comments
})
)
// Filtered data for returning
const filteredData = {
therapist_name,
type,
image,
location,
rating_friendliness: ratings.rating_friendliness / total_reviews,
rating_techniques: ratings.rating_techniques / total_reviews,
rating_progression: ratings.rating_progression / total_reviews,
rating_cost: ratings.rating_cost / total_reviews,
rating_listening: ratings.rating_listening / total_reviews,
rating_overall: ratings.rating_overall / total_reviews,
feedback_comments: comments.feedback_comments
}
console.log(`\n DEBUG 1 - filteredData -> ${JSON.stringify(filteredData)} \n`)
return filteredData;
}
module.exports = {
getFormattedReviews,
};
An ideal setup I would like to have on the server.js side of things running express would be:
server.js
const express = require('express');
const DB = require('./dbQueries.js');
const app = express();
const port = process.env.SERVER_PORT || 8000;
app.get('/get-reviews', async (req, res) => {
const data = await DB.getFormattedReviews();
console.log(`data check ${data}`);
res.send({data});
});
Currently the endpoint is logging 'data check undefined'.
DEBUG checks 1 & 2 successfully appear to log information, however I spotted that DEBUG 3 only logs DEBUG3 - getFormattedReviews data: [{},{},{}] so perhaps I'm doing something wrong around there?
Any help/insight appreciated.
Thanks #Abraham & #Labkovsky for the suggestions -> Ill review them properly during the week.
I managed to get the basic fuctionality up and running with this code - it likely needs some refactoring but for reference:
dbQueries.js
const getFormattedReviews = async function(){
const getDistinct = {
name: 'distinct-reviews',
text: 'SELECT DISTINCT therapist_name FROM reviews'
};
const res = await new Promise(resolve => {
pool.query(getDistinct, (err, res) => {
let data = []
if (err) {
console.log(err.stack);
} else {
// console.log(res.rows);
data = res.rows.map(
async therapist => await getSpecificTherapistReviews(therapist.therapist_name)
)
// Promise.all(data).then(results => console.log(`\n DEBUG3 - getFormattedReviews data: ${JSON.stringify(results)} \n`))
}
Promise.all(data).then(results => resolve(results));
});
});
return res;
}
const getSpecificTherapistReviews = async function(therapist_name){
// console.log(`Fetching reviews for: ${therapist_name}.`)
const getSpecificTherapistReviews = {
name: `${therapist_name}-reviews`,
text: `SELECT * FROM reviews WHERE therapist_name LIKE '%${therapist_name}%'`
};
const res = await new Promise(resolve => {
pool.query(getSpecificTherapistReviews, (err, res) => {
let data = []
if (err) {
console.log(err.stack);
} else {
// console.log(res.rows);
data = filteringDataForFrontend(res.rows);
}
// console.log(`\n DEBUG2 - GetSpecificTherapistReviews ${JSON.stringify(data)} \n`)
resolve(data);
});
});
return res;
}
const filteringDataForFrontend = function(data){
// Based on length of the reviews array, each review = 1 object
const total_reviews = data.length;
// Underlying logic assumes consistent data across all entries for these values
const therapist_name = data[0].therapist_name;
const type = data[0].type;
const image = data[0].image;
const location = data[0].location;
// Summing the rating values across multiple review entries
const ratings = data.reduce((acc, obj) => ({
rating_friendliness: acc.rating_friendliness + obj.rating_friendliness,
rating_techniques: acc.rating_techniques + obj.rating_techniques,
rating_progression: acc.rating_progression + obj.rating_progression,
rating_cost: acc.rating_progression + obj.rating_progression,
rating_listening: acc.rating_listening + obj.rating_listening,
rating_overall: acc.rating_overall + obj.rating_overall
})
)
// Placeholder as string, most likely restructure to an array of objects
const comments = data.reduce ((acc, obj) => ({
feedback_comments: acc.feedback_comments + obj.feedback_comments
})
)
// Filtered data for returning
const filteredData = {
therapist_name,
type,
image,
location,
total_reviews,
rating_friendliness: ratings.rating_friendliness / total_reviews,
rating_techniques: ratings.rating_techniques / total_reviews,
rating_progression: ratings.rating_progression / total_reviews,
rating_cost: ratings.rating_cost / total_reviews,
rating_listening: ratings.rating_listening / total_reviews,
rating_overall: ratings.rating_overall / total_reviews,
feedback_comments: comments.feedback_comments
}
// console.log(`\n DEBUG 1 - filteredData -> ${JSON.stringify(filteredData)} \n`)
return filteredData;
}
module.exports = {
getFormattedReviews,
};
server.js
const express = require('express');
const DB = require('./dbQueries.js');
const app = express();
const port = process.env.SERVER_PORT || 8000;
app.get('/get-reviews', async (req, res) => {
const data = await DB.getFormattedReviews();
// data.then(data => console.log(`data2 check ${JSON.stringify(data)}`))
res.send(data);
});
Ultimately I think my limited understanding of Promises/Async/Await & Promise.all failed me a bit here, the Promise.all in the getFormattedReviews was the missing trick.
Some of the code was rewritten with some new Promise syntax which can likely be redacted however I noted when doing this with the getFormattedReviews method that it broke sending the data to the /get-reviews endpoint. Something I'll investigate later.
You are awaiting a callback. I don't think that works.
Try wrapping in a Promise. That might be the issue.
Full disclosure: I did not read your code very in depth...
const res = await new Promise(resolve => {
pool.query(getSpecificTherapistReviews, (err, res) => {
let data = []
if (err) {
console.log(err.stack);
} else {
// console.log(res.rows);
// step 3
data = filteringDataForFrontend(res.rows);
}
console.log(`\n DEBUG2 - GetSpecificTherapistReviews ${JSON.stringify(data)} \n`)
resolve(data);
});
})

Remove comma from ffmpeg output in AWS Lambda layer

I am using the ffmpeg Lambda layer to get the duration and channels from an audio file. I am then outputting these details to variables to use later in my code?
Can anyone spot/tidy this code so it only outputs the actual value and not one prepended with a comma
const { spawnSync } = require("child_process");
var fs = require('fs');
const https = require('https');
exports.handler = async (event) => {
const source_url = 'https://upload.wikimedia.org/wikipedia/commons/b/b2/Bell-ring.flac';
const target_path = '/tmp/test.flac';
async function downloadFile() {
return new Promise((resolve, reject) => {
const file = fs.createWriteStream(target_path);
const request = https.get(source_url, function(response) {
response.pipe(file);
console.log('file_downloaded!');
resolve();
});
});
}
await downloadFile();
const duration = spawnSync(
"/opt/bin/ffprobe",
[
target_path,
"-show_entries",
"stream=duration",
"-select_streams",
"a",
"-of",
"compact=p=0:nk=1",
"-v",
"0"
]
);
const channel = spawnSync(
"/opt/bin/ffprobe",
[
target_path,
"-show_entries",
"stream=channels",
"-select_streams",
"a",
"-of",
"compact=p=0:nk=1",
"-v",
"0"
]
);
var durations = duration.output.toString('utf8');
console.log(durations);
var channels = channel.output.toString('utf8');
console.log(channels);
/*const response = {
statusCode: 200,
//body: JSON.stringify([channel.output.toString('utf8')])
body: 'Complete'
};
return response;*/
};
Just not sure where these comma values are coming from and I need these as number values for comparison functions later in the code.
It uses this easy Lambda layer with no external modules required
https://github.com/serverlesspub/ffmpeg-aws-lambda-layer

Categories