I want to render array of stories from all projects in node.js. How to do that?
app.get('/stories', function(request, response) {
var projects_ids = [1,2];
project_ids.forEach(function(id) {
pivotal.getStories(id, function(err, project_stories) {
console.log("Stories: " + project_stories);
return JSON.stringify(project_stories);
});
});
response.send(array_of_stories);
});
In log I get:
Stories: [object Object]
Stories: [object Object]
Your current approach won't work since getStories are async (assumption based on the method signature of getStories). If you can, I would suggest that you create a method on pivotal that can get stories for multiple project ids, so your code would read:
app.get('/stories', function(req, res) {
var project_ids = [1, 2];
pivotal.getStories(project_ids, function(err, project_stories) {
res.send(project_stories);
}
});
If that is not an option, I would suggest that you look into a flow library like e.g. node-seq. Then you code could read something like this:
app.get('/stories', function(req, res) {
var project_ids = [1, 2];
Seq(project_ids)
.parEach(function(project_id) {
pivotal.getStories(project_id, this);
})
.seq(function() {
var aggregatedStories = [];
Hash.map(this.args, (function(arg) {
// Each parSeq aparently results in an array of objects
aggregatedStories.push(arg[0]);
}));
res.send(aggregatedStories);
});
});
The Hash.map function is from a node module called hashish
Edit: To elaborate a little more, parEach will execute the functions in parallel, and the following seq would execute after all the callbacks from the parallel executions have finished. node-seq puts the result from each parallel execution into the parameter array of the following seq, hence the somewhat cryptic Hash.map(this.args)
You want your server to send an array of project_stories objects as a response to the request? I'm not sure where your return statement returns to, so omitting that I would try something similar to the following to send an array of objects as the response to the request. Edit: As pointed out by others, .getStories is asynchronous. Maybe you could try out, caolan's async module. Implemented like this:
var async = require('async');
var array_of_stories = [];
var getStories = function(id, callback) {
pivotal.getStories(id, function(err, project_stories) {
array_of_stories.push(project_stories);
});
}
app.get('/stories', function(request, response) {
var projects_ids = [1,2];
async.forEach(project_ids, getStories, function(error) {
if(!error){
response.send(array_of_stories);
}
});
});
Related
I am creating a node API using javascript. I have used redis as my key value store.
I created a redis-client in my app and am able to get values for perticular key.
I want to retrieve all keys along with their values.
So Far I have done this :
app.get('/jobs', function (req, res) {
var jobs = [];
client.keys('*', function (err, keys) {
if (err) return console.log(err);
if(keys){
for(var i=0;i<keys.length;i++){
client.get(keys[i], function (error, value) {
if (err) return console.log(err);
var job = {};
job['jobId']=keys[i];
job['data']=value;
jobs.push(job);
});
}
console.log(jobs);
res.json({data:jobs});
}
});
});
but I always get blank array in response.
is there any way to do this in javascript?
Thanks
First of all, the issue in your question is that, inside the for loop, client.get is invoked with an asynchronous callback where the synchronous for loop will not wait for the asynchronous callback and hence the next line res.json({data:jobs}); is getting called immediately after the for loop before the asynchronous callbacks. At the time of the line res.json({data:jobs}); is getting invoked, the array jobs is still empty [] and getting returned with the response.
To mitigate this, you should use any promise modules like async, bluebird, ES6 Promise etc.
Modified code using async module,
app.get('/jobs', function (req, res) {
var jobs = [];
client.keys('*', function (err, keys) {
if (err) return console.log(err);
if(keys){
async.map(keys, function(key, cb) {
client.get(key, function (error, value) {
if (error) return cb(error);
var job = {};
job['jobId']=key;
job['data']=value;
cb(null, job);
});
}, function (error, results) {
if (error) return console.log(error);
console.log(results);
res.json({data:results});
});
}
});
});
But from the Redis documentation, it is observed that usage of
Keys are intended for debugging and special operations, such as
changing your keyspace layout and not advisable to production
environments.
Hence, I would suggest using another module called redisscan as below which uses SCAN instead of KEYS as suggested in the Redis documentation.
Something like,
var redisScan = require('redisscan');
var redis = require('redis').createClient();
redisScan({
redis: redis,
each_callback: function (type, key, subkey, value, cb) {
console.log(type, key, subkey, value);
cb();
},
done_callback: function (err) {
console.log("-=-=-=-=-=--=-=-=-");
redis.quit();
}
});
Combination of 2 requests:
import * as ioredis from 'ioredis';
const redis = new ioredis({
port: redisPort,
host: redisServer,
password: '',
db: 0
});
const keys = await redis.collection.keys('*');
const values = await redis.collection.mget(keys);
Order will be the same for both arrays.
This will get all keys but with no values:
const redis = require('redis');
const client = redis.createClient();
client.keys('*', (err, keys) => {
// ...
});
Now you need to get the values for those keys in a usual way. For example:
Promise.all(keys.map(key => client.getAsync(key))).then(values => {
// ...
});
or with async module or in any way you like.
You should never do this. First off, it is not recommended to use KEYS * in production. Second, this does not scale (cluster).
You can organise your cached entries into SETs and query for the items within the SET, then retrieve the references keys. This also makes invalidation easier.
Have a look at some data storage best practices.
https://redis.io/topics/data-types-intro
how to get all keys and values in redis in javascript?
You may find something useful in this link
https://github.com/NodeRedis/node_redis/tree/master/examples
So, we are trying to rewrite our express server into Rx. It is currently using async for all stream operations. The code looks like the following:
var async = require('async');
function getCountAndChannels(name, cb){
var tasks = [
function(cb) {
//does a mongoDB search and returns count
},
function(cb) {
//does a findOne mongoDB search and returns
}
];
async.parallel(tasks, cb);
}
router.get('data', function(req, res) { //router is the express router
var recorders = req.query.recorders.split(',');
async.map(recorders, function(name, cb) {
getCountAndChannels(name, cb);
}, function(err, countsAndChannels) {
if(err) throw err;
// here countsAndChannels is an array with first element the count
// and second element the document.
// do other async stuff based on the results
res.status(200).json('send some calculations');
});
The thing here I have to do is loop over the array of recorders and for each one calculate the two mongoDB searches. I have tried using Rx.Observable.merge which doesn't return the results in an array but in 2 different calls of the callback. So, then I tried Rx.Observable.zip which I believe is what I'm looking for.
The problem is I don't know how to loop over the recorders and send the result when all operations are finished. Because a simple forEach loop will throw a Cannot set headers after they are sent error.
This is what I have so far:
recorders.forEach(recorder => {
Rx.Observable.zip([
search1,
search2
]).subscribe(
(countsAndChannels) => {
// do stuff
res.send('the results');
},
err => res.status(500).json(err),
() => res.send('OK')
);
});
Haven't used Rx before, so any help is appreciated.
It might be easier to convert your list of recorders to an Observable stream, then flatMap over each recorder (ie perform your async processing), then call toArray to store all the results into an array:
var recorder$ = Rx.Observable.from(recorders);
var countsAndChannels$ = recorder$
.flatMap(performAsyncTask);
// allResults$ will emit once all of the async work is complete
var allResults$= countsAndChannels$.toArray();
allResults$.subscribe(results => {
// Send response to client;
});
I am trying to write a nodejs program that queries github for a list of repos (via a Node wrapper for the github API: https://www.npmjs.com/package/github) and retrieves the git clone url in an array, which I then wish to sort alphabetically.
Due to the asynchronous nature of the calls, I am not sure how to wait until all of the async requests are returned?
Here is the loop in question. repoArrayis an array of repos in [username/reponame] format
var urls = [];
for (var i=0; i < repoArray.length; i++) {
var components = repoArray[i].split('/');
github.repos.get({
user: components[0],
repo: components[1]
}, function(err, res) {
urls.push(res.ssh_url);
});
}
// do a case-insensitive sort
urls.sort(function(a,b) {
return a.localeCompare(b, 'en', {'sensitivity': 'base'});
});
console.log("urls: " + urls);
Basically, since github.repos.get() calls in the loop are all asynchronous/callback-based, when the code reaches urls.sort() and then the console.log(), none or some of the github.repos.get() calls are done yet.
I am not that familiar with promises or deferreds, but is that the way to go? I'm not sure how I could refactor that loop so that urls.sort() is called only after all the requests from the loop are complete?
The Async library is meant for exactly these scenarios, and is usually what people tend to use for these problems. It can help you execute asynchronous tasks in parallel and execute a callback when they all finish, using async.each.
var async = require('async');
var urls = [];
//make each HTTP request
function process(repo,callback){
var components = repo.split('/');
github.repos.get({
user: components[0],
repo: components[1]
}, function(err, res) {
if(err){
// call callback(err) if there is an error
return callback(err);
}
else{
urls.push(res.ssh_url);
// call callback(null) if it was a success,
return callback(null);
}
});
}
// this will iterate over repoArray and pass each repo to the 'process' function.
// if any of the calls to 'process' result in an error,
// the final callback will be immediately called with an error object
async.each(repoArray,process,function(error){
if(error){
console.error('uh-oh: '+error)
return;
}
else{
// do a case-insensitive sort
urls.sort(function(a,b) {
return a.localeCompare(b, 'en', {'sensitivity': 'base'});
});
console.log("urls: " + urls);
}
});
edit: since you are sorting them at the end, the urls will be in order.
I have a config json file as below.
"constraints": {
"input": "input",
"output": "output"
}
I am trying to read this file and create input and output directory with child directories.
var fs = require("fs");
var async = require("async");
var node_xj = require("xls-to-json");
var cf= JSON.parse(fs.readFileSync("config.json", 'utf8'));
// Declare variables
var files = [];
function readAsync(file, callback) {
node_xj(file, callback);
}
function create(currentDirPath, outputDirPath, callback) {
// some code creating directories
}
create(cf.lang.input, cf.lang.output, function(stat, config) {
files.push(config);
});
async.map(files, readAsync, function(err, results) {
if(err) throw err;
});
The code works fine, but its sometimes does not. Let me walk through the code. I am loading some modules.
var fs = require("fs");
var async = require("async");
var node_xj = require("xls-to-json");
I am loading the config file.
var cf= JSON.parse(fs.readFileSync("config.json", 'utf8'));
Then i am passing the cf file to create function which after its operation returns me an an object which i push it into an array.
var files = [];
function readAsync(file, callback) {
node_xj(file, callback);
}
function create(input, output, callback) {
// some code creating directories and object
}
create(cf.lang.input, cf.lang.output, function(stat, config) {
files.push(config);
});
async.map(files, readAsync, function(err, results) {
if(err) throw err;
});
Then i am passing the files array to my async.map function which passes it to readAsync function for another operation.
Question:
Can anyone tell me whether the way i have written the code flow
makes if flaw sometimes.
Is there a better way of writing the same code logic flow.
Should i use async.map to iterate files and then pass it to
readAsync
If you're depending on asynchronous operations to occur before other operations, you have to treat them asynchronously! Your code as described seems to create some directories (which will take measurable time) and immediately attempts to use them, which isn't kosher.
You might consider something like:
async.series([
function(){
// create your directories here
},
function(){
async.map(files,...)
}
]);
which will guarantee that the resources needed by your map exist before the map is called.
I am able to insert and retrieve data from an neDB database in nodejs. But I cannot pass the data outside of the function that retrieves it.
I have read through the neDB documentation and I searched for and tried different combinations of callbacks and returns (see code below) without finding a solution.
I'm new to javascript so I do not know if I am misunderstanding how to use variables in general or if this issue is related to using neDB specifically or both.
Could someone please explain why "x" in my code does not ever contain the docs JSON results from the database? How can I make it work?
var fs = require('fs'),
Datastore = require('nedb')
, db = new Datastore({ filename: 'datastore', autoload: true });
//generate data to add to datafile
var document = { Shift: "Late"
, StartTime: "4:00PM"
, EndTime: "12:00AM"
};
// add the generated data to datafile
db.insert(document, function (err, newDoc) {
});
//test to ensure that this search returns data
db.find({ }, function (err, docs) {
console.log(JSON.stringify(docs)); // logs all of the data in docs
});
//attempt to get a variable "x" that has all
//of the data from the datafile
var x = function(err, callback){
db.find({ }, function (err, docs) {
callback(docs);
});
};
console.log(x); //logs "[Function]"
var x = db.find({ }, function (err, docs) {
return docs;
});
console.log(x); //logs "undefined"
var x = db.find({ }, function (err, docs) {
});
console.log(x); //logs "undefined"*
Callbacks are generally asynchronous in JavaScript meaning you can not use assignment operator, consequently you do not return anything from the callback function.
When you call an async function execution of you programme carries on, passing the 'var x = whatever' statement. The assignment to a variable, the result of whatever callback is received, you need to perform from within the callback itself... what you need is something in the lines of ...
var x = null;
db.find({ }, function (err, docs) {
x = docs;
do_something_when_you_get_your_result();
});
function do_something_when_you_get_your_result() {
console.log(x); // x have docs now
}
EDIT
Here is a nice blog post about asynchronous programming. And there is a lot more resources on this topic that you can pick up.
This is a popular library to help with async flow-control for node.
P.S.
Hope this helps. Please, by all means ask if you need something clarified :)
I ran into the same problem. In the end I used a combination between async-await and a promise with resolve to solve it.
In your example the following would work:
var x = new Promise((resolve,reject) {
db.find({ }, function (err, docs) {
resolve(docs);
});
});
console.log(x);
I had to learn a bit about async functions to get it right. For those who are looking for specific help about getting a return value from nedb, here's a snippet of what worked for me. I was using it in electron.
function findUser(searchParams,callBackFn)
{
db.find({}, function (err, docs))
{
//executes the callback
callBackFn(docs)
};
}
usage
findUser('John Doe',/*this is the callback -> */function(users){
for(i = 0; i < users.length; i++){
//the data will be here now
//eg users.phone will display the user's phone number
}})