Get JSON from javascript file and display in route file using NodeJS - javascript

I am using Nodejs Express. I currently have a script that produces an array of objects from Google API. I need to take that JSON data and use it in my templates. How can I call the function in my script from my route file?
This is my script file:
var Spreadsheet = require('edit-google-spreadsheet');
Spreadsheet.load({
debug: true,
spreadsheetId: '1eWmSV4Eq-se4gZSvBfW-J-lEOLwNopEfMavZByJ-qD8',
worksheetId: 'owrromd',
// 1. Username and Password
username: 'user',
password: 'pass',
}, function sheetReady(err, spreadsheet) {
//use speadsheet!
spreadsheet.receive(function(err, rows, info) {
if (err) throw err;
var announcementArray = [];
//console.log(rows);
for (x in rows) {
var eachObject = rows[x]
var side = eachObject['1'];
//console.log(side);
var type = eachObject['2'];
//console.log(type);
var announcement = eachObject['3'];
//console.log(announcement);
var announcementItem = {};
announcementItem.side = side;
announcementItem.type = type;
announcementItem.announcement = announcement;
announcementArray.push(announcementItem);
}
announcementArray.shift();
console.log(announcementArray);
});
});
This is my route js file:
module.exports=function(app){
app.get('/', function(req,res){
res.render('index', {title:"Home page", description:"The is the description"});
});
}

Change the content of the script file, let's call it loadSheet.js
var Spreadsheet = require('edit-google-spreadsheet');
function loadSheet() {
Spreadsheet.load({
debug: true,
spreadsheetId: '1eWmSV4Eq-se4gZSvBfW-J-lEOLwNopEfMavZByJ-qD8',
worksheetId: 'owrromd',
// 1. Username and Password
username: 'user',
password: 'pass',
}, function sheetReady(err, spreadsheet) {
//use speadsheet!
spreadsheet.receive(function(err, rows, info) {
if (err) throw err;
var announcementArray = [];
//console.log(rows);
for (x in rows) {
var eachObject = rows[x]
var side = eachObject['1'];
//console.log(side);
var type = eachObject['2'];
//console.log(type);
var announcement = eachObject['3'];
//console.log(announcement);
var announcementItem = {};
announcementItem.side = side;
announcementItem.type = type;
announcementItem.announcement = announcement;
announcementArray.push(announcementItem);
}
announcementArray.shift();
console.log(announcementArray);
});
});
}
//Export it to module
exports.loadSheet = loadSheet;
Then in the route js:
var ls = require('./loadSheet.js'); //Load the module, get the name of the script file
app.get('/', function(req,res){
res.render('index', {title:"Home page", description:"The is the description"});
ls.loadSheet();
});

So you'd adapt the module the other response created. But you are going to need to give a callback to loadSheet. I am cutting out the main body of that function for clarity.
var Spreadsheet = require('edit-google-spreadsheet');
function loadSheet(theCallback) { //take a callback here.
Spreadsheet.load({...yourdata...}, function sheetReady(...) {
// create your announcementArray
// then call the callback
theCallBack(undefined,announcementArray);
});
});
}
//Export it to module
exports.loadSheet = loadSheet;
Then, from your routes, you can get it like so:
var ls = require('./loadsheet.js'); // assumes in same dir as routes
app.get('/', function(req,res){
ls.loadSheet(function(err,result){
res.render('myTemplate',result);
});
});
I am going to assume you can take care of getting the result data into your template. You can look in the index template to see how it pulls in the data. I don't know whether you are using Jade or EJS.
Note, this is all sort of hackish but addresses your functional question. Let me know if you need a little further direction.

Related

How do I refresh data from json-file with charts.js?

I'm attempting to make a live graph with data from my sensors, and currently that's working, but I'm not sure how to get it to update by itself.
I'm using node/express/pug, and I have a backend which listens for MQTT messages and appends them to a json file, keeping the latest 30 values or so.
The frontend's routing index.js parses the JSON and structures it in arrays, which are passed to the pug template, which the javascript inside it can then access.
My question is, in order for this to dynamically reload, can I still do it like this, through index.js, or do I need to do it another way? Ideally some sort of notification when the file is updated from the backend to front end, but a timer will be perfectly adequate. The updates will be at about 1000ms.
I am quite a beginner in javascript/web development.
My index.js file, the last part is the relevant part:
var router = express.Router();
var db;
var fs = require('fs');
var moment = require('moment');
/* GET home page. */
router.get('/', function(req, res, next) {
res.render('index', { title: 'Sematek StrainMonitor' });
next();
});
//when rawdata is loaded, load client
router.get('/rawdata', function(req, res) {
var dateFrom = req.query.from;
var dateTo = req.query.to;
var dateFromEpoch = moment(dateFrom).unix();
var dateToEpoch = moment(dateTo).unix();
let searchQuery;
if ((dateFromEpoch) && (dateToEpoch)) {
searchQuery = "{epoch : { $gt : " + dateFromEpoch + ", $lt : " + dateToEpoch + "}}";
};
req.conn.then(client=> client.db('test').collection('sensor0').find({searchQuery}).toArray(function(err, docs) {
if(err) { console.error(err) }
if (!docs) {
console.log("oops.. didn't retrieve any docs from the mongodb serv");
}
const dataPairsDB = docs.map(e => ({x: e.epoch, y: e.data}) );
let datasetDB = [];
let labelsetDB = [];
dataPairsDB.forEach((num,index) => {
labelsetDB.push(moment.unix(dataPairsDB[index].x).format('HH:mm:ss'));
datasetDB.push(dataPairsDB[index].y);
});
console.log(req.query)
//gets CloudMQTT values from JSON file and converts it into two arrays for Chart.js display
let data = JSON.parse(fs.readFileSync("./json/latest-value.json"));
const dataPairs = data.readings.map(e => ({x: e.epoch, y: e.data}) );
let dataset = [];
let labelset = [];
dataPairs.forEach((num,index) => {
labelset.push(moment.unix(dataPairs[index].x).format('HH:mm:ss'));
dataset.push(dataPairs[index].y);
});
if (req.cookies.isLoggedIn == 'true') {
res.render('rawdata', {
docs : docs,
datasetDB : JSON.stringify(datasetDB),
labelsetDB : JSON.stringify(labelsetDB),
dataset : JSON.stringify(dataset),
labelset : JSON.stringify(labelset)
});
} else {
res.redirect(401,'/');
}
}))
});
From my rawdata.pug file:
canvas(id="line-chart" )
script(src='https://cdnjs.cloudflare.com/ajax/libs/Chart.js/2.5.0/Chart.min.js')
script(type='text/javascript').
var dataset = JSON.parse('!{dataset}');
var labelset = JSON.parse('!{labelset}');
script(src='/scripts/charter.js')
Just have the webpage subscribe to the same MQTT topic (using MQTT over Websockets see the Paho Javascript client) and update live without having to go back to the back end and pull all the historical data every time a new value is added.

Javascript Node Js & Cheerio web scraper creating object with data

I'm building a simple web scraper using cheerio, this is my code :
var express = require('express');
var fs = require('fs');
var request = require('request');
var cheerio = require('cheerio');
var app = express();
app.get('/', function(req, res){
url = 'http://www.gazzetta.it/calcio/fantanews/voti/serie-a-2016-17/giornata-32';
request(url, function(error, response, html){
if(!error){
var $ = cheerio.load(html);
var json = {
teamsList : {}
};
$('.listView .magicTeamList').each(function(){
var teamName = $(this).find('.teamNameIn').text();
var playerName = $(this).find('.playerNameIn').text();
var playerGrade = $(this).find('.fvParameter').not('.head').text();
json.teamsList[teamName] = {};
json.teamsList[teamName][playerName] = playerGrade;
})
} else {
console.log('error happened :' + error);
}
fs.writeFile('output.json', JSON.stringify(json, null, 4), function(err){
console.log('File successfully written! - Check your project directory for the output.json file');
})
// Finally, we'll just send out a message to the browser reminding you that this app does not have a UI.
res.send('Check your console!')
});
})
app.listen(8081);
console.log('Magic happens on port 8081');
exports = module.exports = app;
I want to push data inside the json object but I'm not having the desired effect, the output.json i get is this (i'll just paste an excerpt of the result):
{
"teamsList": {
"atalanta": {
"Gollini P.Masiello A.Conti A.Hateboer H.Caldara M.Toloi R.Kurtic J.Cristante B.Freuler R.Kessie F.Petagna A.Dalessandro M.Cabezas B.Paloschi A.": "4.56.57.55.5779667666-"
},
"bologna": {
"Mirante A.Torosidis V.Maietta D.Masina A.Gastaldello D.Pulgar E.Taider S.Dzemaili B.Verdi S.Di Francesco F.Petkovic B.Krafth E.Krejci L.Sadiq U.": "5.5665.5636.565.556--5.5"
}
}
}
But what i want is an output like this:
{
"teamsList": {
"atalanta": {
"Gollini P." : 4.5,
"Masiello A." : 6.5,
...
}
}
}
I've searched for answers but I couldn't find any for my specific problem, or maybe I'm just missing something very stupid.. btw any help would be appreciated, thx guys
On each loop you have this
json.teamsList[teamName] = {};
That is going to remove any existing players from the team object. You need to have IF statement to check if it already exists.

Node.js variable use outside of function

I'm trying to make it so that I can pass my trends variable from its function into a renderer for my Pug template, and I can't seem to do it.
var express = require('express');
var router = express.Router();
var googleTrends = require('google-trends-api');
var auth = require('http-auth');
var ustrends;
var uktrends;
const Console = require('console').Console;
var basic = auth.basic({
realm: "Web."
}, function (username, password, callback) { // Custom authentication method.
callback(username === "user" && password === "pass");
}
);
var find = ',';
var regex = new RegExp(find, 'g');
googleTrends.hotTrends('US').then(function(trends){
ustrends = trends
});
googleTrends.hotTrends('EU').then(function(trends1) {
uktrends = trends1
});
console.log(ustrends);
/* GET home page. */
router.get('/', auth.connect(basic), function(req, res, next) {
res.render('index', {trends: ustrends.toString().replace(regex, ", "), trends1: uktrends.toString().replace(regex, ", "), title: 'Trends in the U.S & U.K'});
});
module.exports = router;
As you can see, I'm trying to pass the "ustrends" and "uktrends" variables into the renderer. Any help is appreciated.
Remember that hotTrends will return a promise, as it's getting results from Google's API. Since the renderer is outside of the callbacks wherein ustrends and uktrends are set to values, there's no guarantee these values will be set prior to the renderer being called.
You could use several nested callbacks, but that would lead to some code pushed pretty far to the right; I recommend the async library, which has a function called series that allows you to pass in 1) an array of functions to be executed in order and 2) a callback that will be executed after the functions have completed that takes an error if there was one and the result of the functions as an argument. In the snippet below, the trends API returns results prior to the renderer being called:
async.series([
function(cb) {
googleTrends.hotTrends('US').then(function(trends){
ustrends = trends;
cb();
})
},
function(cb) {
googleTrends.hotTrends('EU').then(function(trends1) {
uktrends = trends1;
cb();
});
}
], function(err, results) {
/* handle errors, do rendering stuff */
})

NodeJS restify API caching best practice

I am very new to NodeJS and I am building my first API using restify.
I want to find out what is best practice for caching the response data - each API call must have its own cache time.
I have looked at res.cache() but that seems to be only per user request and not a global application cache.
I then looked at restify-cache but the documentation did not clearly tell me how to use it.
My application works like this:
server.js code:
var restify = require('restify');
var mysqlDB = require('./config/connection');
// REST server declaration and configuration
var server = restify.createServer({
name: 'test-api',
version: '0.0.1'
});
server.pre(restify.pre.sanitizePath());
server.use(restify.queryParser());
server.use(restify.acceptParser(server.acceptable));
server.use(restify.queryParser());
server.use(restify.bodyParser());
server.listen(9007, function() {
console.log('%s listening at %', server.name, server.url);
mysqlDB.handleDisconnect();
console.log(new Date() +': Started Cricket API on port 9007');
});
var routes = require('./routes')(server);
routes.js code:
module.exports = function(app) {
app.get('/', function(req, res, next) {
return res.send("You have reached the test API");
});
var fixtures = require('./controllers/fixtures');
app.get('/getfixtures', fixtures.getFixtures); // Get All Fixtures
};
fixtures.js code snippet:
this.getFixtures = function (req, res, next) {
res.header("Access-Control-Allow-Origin", "*");
res.header("Access-Control-Allow-Headers", "X-Requested-With");
console.log("Get All Fixtures");
var mysql = mysqlDB.getConnection();
var query = "SELECT * FROM fixtures WHERE fixture_date >= CURDATE() ORDER BY fixture_date, fixture_time";
mysql.query(query,function(err,rows){
if(err) {
var status = mysqlDB.getErrorStatus(err.code);
return res.status(status.code).send("Error : "+ status.Message);
} else {
var data = [];
for (i in rows){
var item = rows[i];
var output = util.formatDate(item.fixture_date);
item.fixture_date = output;
data.push(item);
};
return res.send(data);
}
});
};
Can someone please send me in the right direction? I don't know where to add the caching part?
From the library file:
server.use(cache.before); is a middleware that will be triggered to load before the request is handled, going to Redis and checking if the header_{url} key and payload_{url} exits, and at that case the value is returned.
You could put it as mentioned in this gist:
https://gist.github.com/jeffstieler/3d84fa5468c7eadb7685
var server = restify.createServer({
name: 'test-api',
version: '0.0.1'
});
server.pre(restify.pre.sanitizePath());
server.use(cache.before);
server.use(restify.queryParser());
server.use(restify.acceptParser(server.acceptable));
server.use(restify.queryParser());
server.use(restify.bodyParser());
server.on('after', cache.after);
In your code I would add the cache.before after you sanitize the path as this will be saved in Redis. also a next() should be included in every route cached.
I ended up using node-cache
It was easy to use since I come from a Java/Play Framework background - hopefully it helps someone else in future.
Example usage:
var nodeCache = require( "node-cache" );
var myCache = new nodeCache();
var cachedValue = myCache.get("alltests", true);
if (cachedValue != undefined) {
return res.send(cachedValue);
} else {
// Do work here and then:
success = myCache.set("alltests", valueHere, cacheTime);
}

How to properly use Parse.query.equalTo?

In my web application when the user logs in they should see all the documents they made. However when I test my application, I see all the documents including the ones that I didn't make. I believe this is because I am not using Parse.query.equalTo correctly.
Here is my code:
router.post('/login', function (req, res) {
var tempname = req.body.username;
var username = tempname.toLowerCase();
var password = req.body.password;
var login = Promise.promisify(Parse.User.logIn);
var promerror = Promise.reject("error");
var Files = Parse.Object.extend("File");
var query = new Parse.Query(Files);
query.include('user');
var finds = query.find;
var doc = {};
var name, url;
return login(username, password).catch(function (error) {
}).then(function (user, error) {
query.equalTo("user", Parse.User.current());
console.log(JSON.stringify(Parse.User.current()));
if (user) {
res.render('logins', { Message: "Username and/or password don't match" });
}
var temp;
}).catch(function (err) {
res.render('logins');
}).then(query.find(function (results) {
for (var i = 0; i < results.length; i++) {
var object = results[i];
var userID = object.get('user');
console.log("current user");
var codefile = object.get('javaFile');
temp = codefile.name();
name = temp.split("-").pop();
url = codefile.url();
doc[name] = url;
}
})).catch(function (error) {
promerror(error);
}).finally(function () {
res.render('filename', { title: 'File Name', FIles: JSON.stringify(doc) });
}).done();
});
When the user logs in, it should go to the database retrieve all the documents the user made, and save it to doc, the doc is then send to the client side to be displayed.
here is the code for user saving a file:
newFile.save({ user: Parse.User.current(), fileName: newnames, javaFile: parseFile })
I'm not sure, but try setting the File user field to the object id instead of the user object itself. Parse objects sometimes will return a pointer, sometimes the id, etc. So you might be getting unexpected behavior where you're setting the field to Parse.User.current(). Besides that, you look like you're using equalTo correctly.
Otherwise, basically the only way to debug cloud code is to console.log and view the logs.
As a side note, you should be setting the ACL of each File before you save it... that way, a user wouldn't be able to view another user's files anyway (unless these files are meant to be public).

Categories