First of all, "I'm noob"
My first time running some script on a cloud server.
So, I have a code that counts the number of views from my youtube vídeo and writes it in the title.
But I want that code running 24/7.
I make it works running on a VM instance on google cloud using cron-node running the code every 5 minutes and make it run forever with "forever" on the node.
And my question is: Have a better way to run it? Like using cloud functions or something? Or I should still run it on VM instance?
If yes, what should I do? Because I don't know how to use external dependencies on Cloud function like google API, if I don't have a console to install googleapi dependencies. (Maybe it's easy to make it, but I just don't know yet).
var readline = require('readline');
var { google } = require('googleapis');
var OAuth2 = google.auth.OAuth2;
const cron = require("node-cron");
cron.schedule("*/5 * * * *", () => {
// If modifying these scopes, delete your previously saved credentials
// at ~/.credentials/youtube-nodejs-quickstart.json
var SCOPES = ['https://www.googleapis.com/auth/youtube.force-ssl'];
var TOKEN_DIR = (process.env.HOME || process.env.HOMEPATH ||
process.env.USERPROFILE) + '/.credentials/';
var TOKEN_PATH = TOKEN_DIR + 'update-youtube-title.json';
const youtube = google.youtube('v3');
const video_id = 'f0ARwVVxoBc';
// Load client secrets from a local file.
fs.readFile('credentials.json', function processClientSecrets(err, content) {
if (err) {
console.log('Error loading client secret file: ' + err);
return;
}
// Authorize a client with the loaded credentials, then call the YouTube API.
authorize(JSON.parse(content), makeAuthCall);
});
/**
* Create an OAuth2 client with the given credentials, and then execute the
* given callback function.
*
* #param {Object} credentials The authorization client credentials.
* #param {function} callback The callback to call with the authorized client.
*/
function authorize(credentials, callback) {
var clientSecret = credentials.installed.client_secret;
var clientId = credentials.installed.client_id;
var redirectUrl = credentials.installed.redirect_uris[0];
var oauth2Client = new OAuth2(clientId, clientSecret, redirectUrl);
// Check if we have previously stored a token.
fs.readFile(TOKEN_PATH, function (err, token) {
if (err) {
getNewToken(oauth2Client, callback);
} else {
oauth2Client.credentials = JSON.parse(token);
callback(oauth2Client);
}
});
}
/**
* Get and store new token after prompting for user authorization, and then
* execute the given callback with the authorized OAuth2 client.
*
* #param {google.auth.OAuth2} oauth2Client The OAuth2 client to get token for.
* #param {getEventsCallback} callback The callback to call with the authorized
* client.
*/
function getNewToken(oauth2Client, callback) {
var authUrl = oauth2Client.generateAuthUrl({
access_type: 'offline',
scope: SCOPES
});
console.log('Authorize this app by visiting this url: ', authUrl);
var rl = readline.createInterface({
input: process.stdin,
output: process.stdout
});
rl.question('Enter the code from that page here: ', function (code) {
rl.close();
oauth2Client.getToken(code, function (err, token) {
if (err) {
console.log('Error while trying to retrieve access token', err);
return;
}
oauth2Client.credentials = token;
storeToken(token);
callback(oauth2Client);
});
});
}
/**
* Store token to disk be used in later program executions.
*
* #param {Object} token The token to store to disk.
*/
function storeToken(token) {
try {
fs.mkdirSync(TOKEN_DIR);
} catch (err) {
if (err.code != 'EEXIST') {
throw err;
}
}
fs.writeFile(TOKEN_PATH, JSON.stringify(token), (err) => {
if (err) throw err;
console.log('Token stored to ' + TOKEN_PATH);
});
}
/**
* Lists the names and IDs of up to 10 files.
*
* #param {google.auth.OAuth2} auth An authorized OAuth2 client.
*/
/*
function getChannel(auth) {
var service = google.youtube('v3');
service.channels.list({
auth: auth,
part: 'snippet,contentDetails,statistics',
forUsername: 'GoogleDevelopers'
}, function (err, response) {
if (err) {
console.log('The API returned an error: ' + err);
return;
}
var channels = response.data.items;
if (channels.length == 0) {
console.log('No channel found.');
} else {
console.log('This channel\'s ID is %s. Its title is \'%s\', and ' +
'it has %s views.',
channels[0].id,
channels[0].snippet.title,
channels[0].statistics.viewCount);
}
});
}
*/
//código novo
const makeAuthCall = (auth) => {
//get views
youtube.videos.list(
{
auth: auth,
id: video_id,
part: 'snippet,statistics'
},
(err, response) => {
if (err) {
console.log(`A contagem de views falhou ${err}`)
return;
}
if (response.data.items[0]) {
//atualização encontrada
console.log('Encontramos uma atualização...');
updateVideoTitle(response.data.items[0], auth);
}
}
);
};
const updateVideoTitle = (video, auth) => {
let viewsText = video.statistics.viewCount.toLocaleString();
const newTitle = `Esse Vídeo tem ${viewsText} Visualizações`;
const oldTitle = video.snippet.title;
if (newTitle != oldTitle){
video.snippet.title = newTitle;
console.log(`Atualizando contagem de views para ${viewsText}`);
youtube.videos.update(
{
auth: auth,
part: 'snippet,statistics',
resource: video,
},
(err, response) => {
console.log(response)
if (err) {
console.log(`A contagem de views falhou ${err}`);
return;
}
if (response.data.items) {
console.log("Done");
}
}
);
}else{
console.log("Atualização não necessária");
};
}
})
This is a very broad question with too much to unpack. Keep in mind the differences below are very simplistically explained and there could be more steps.
Compute Engine/VM:
Step 1. Install javascript runtime (Nodejs) on the OS.
Step 2. Put your code & dependencies
Cloud Functions:
Step 1. Put your code & dependencies
Summary:
Cloud Functions removes the need to manage the operating system and
runtime. It might be easier for you. One way or another, you won't
escape the need for your dependency setup.
For Practice:
These "quick start" articles will give an understanding of the setup process. You learn by doing with a "hello world" example.
For Dependencies:
There is an article here that shows how to declare them in Cloud Functions. The sections of this article explains how you can load dependencies in different ways. The first example says: "The dependency is then imported in the function" and you see the part that says "require('escape-html')", it's the same as the "escape-html": "^1.0.3". This means that you can work backwards and see the requirements in your code to add them to your dependency manifest. But you may have to learn a bit more about the syntax by doing a Google search about package.json/npm and how it works in general.
For Authentication:
Assuming your code is properly written, the "require('googleapis')" at the top of your code means that you should only need to add dependencies like mentioned previously and your code may be good to go for authentication.
For Cron jobs:
In your code you are doing that by using "const cron = require("node-cron");" at the top of you code. This is like everything else I mentioned about dependencies and it's a software that triggers your code. I'm not sure if this will work like all other ones because Google has its own Cron jobs that work like this (outside the Cloud Functions environment, but triggers the Cloud Functions on the network). Keep in mind that this method could alter your code significantly. So if you can test with const cron = require("node-cron"); dependencies, then do that first.
Disclaimer:
Please open more specific questions if you can next time. Stackoverflow is not for consultation or broad questions with many follow-ups. It's more for coding answers to specific questions. Some may decide to close this post due to that.
Hope this helps.
Related
I am attempting to send an email via API call (Swagger) from a NodeMailer package (version 2.7.2). Functionally speaking, everything is basically working okay --- that is, the email is delivered as expected.
The only thing is, I don't get a response that works for the Swagger controller that calls the nodemailer package's sendEmail command.
Here is the code for the nodeMailer function. This works (sends the email), and outputs the following to the console:
Attempting to send mail to: ["someemail#gmail.com"]
250 2.0.0 OK
1550718405 w10sm28574425pge.8 - gsmtp
'use strict';
const fs = require('fs');
var nodemailer = require('nodemailer');
var emailConfig = require('../configs/email.json');
/**
* #since AlphaRC7
* #desc Config is loaded for nodemailer via emailConfig.json,
* for more information: see https://nodemailer.com/smtp/
* #param emails is a comma separated string sent from the controller processing things before hand
*
* #since AlphaRC8
* #param shareUrl is a string GUID
*/
exports.sendEmail = function (shareUrl, emails, pdfContent) {
return new Promise(function (req, resolve) {
var transporter = nodemailer.createTransport(emailConfig);
console.log(pdfContent.buffer);
// setup e-mail data with unicode symbols
var mailOptions = {
from: emailConfig.fromSenderEmail, // sender email address
to: emails, // list of receivers
subject: 'Your colleague shared a report with you!',
text: 'Hey there! Your colleague wants to collaborate with you! <br />' +
'Check here to visit: ' + shareUrl, // plaintext body'
html: 'Hey there! Your colleague wants to collaborate with you! <p>' +
'<b>Click here to visit: </b> <a href=' + shareUrl + '>' + shareUrl + '</a></p>',
attachments:[{
filename: 'report.pdf',
content: new Buffer(pdfContent.buffer, 'binary')
}]
};
console.log("Attempting to send mail to:");
console.log(emails);
return transporter.sendMail(mailOptions).then(function(info) {
console.log(info.response);
}).catch(function(err) {
console.log(err);
});
});
}
However, Swagger never receives the response in info.response from sendMails callback. Here is the Swagger controller that is calling the sendEmail function:
'use strict';
var utils = require('../utils/writer.js');
var email = require('../impl/EmailService.js');
var fs = require('fs');
/**
* This function simply instantiates the entry, so we don't need to pass
* it anything, just have an agreement on the security side.
*/
module.exports.sendEmail = function sendEmail (req, res, next) {
var shareUrl = req.swagger.params.shareUrl.value;
var emails = req.swagger.params.emails.value;
var pdfBlob = req.swagger.params.myblob.value;
email.sendEmail(shareUrl, emails, pdfBlob)
.then(function (response) {
console.log(response);
res.send(response);
utils.writeJson(res, response);
})
.catch(function (response) {
console.log(response);
res.send(response);
utils.writeJson(res, response);
});
};
The ".then" function is never reached from the controller, so Swagger just stalls out and never gets a response back (just stays stuck on loading):
Please let me know what I need to do to properly return the result from NodeMailer's callback to the function calling from the Swagger controller. I have tried returning the actual sendMail function as well as returning response.info, and neither triggers the code in the Swagger controller's .then() function.
I was able to solve my own question here. It turns out nodemailer already returns a Promise, so returning a promise of a promise was (reasonably) not acting as I thought a promise should. By removing the offending / returned "new Promise" code, I was able to get an appropriate response back in the controller file by returning nodeMailer's built-in Promise function.
I have a MongoDB instance and two JavaScript services running on a Linux server. The first service, moscaService.js, listens to MQTT topics on the server, and records what is sent in a MongoDB collection. The second service, integrationService.js, runs every second, reading data on the same MongoDB collection and, if there's a new register (or more), sends it to Ubidots.
The problem is that both services work on the same IP/port: localhost:27017; and, if there ever is an occasion in which both of them are active simultaneously (say, moscaService.js is recording something and then the integrationService.js tries to connect), there will be a connection error and the service will restart.
Here are the connection parts of both services:
var MongoClient = require('mongodb').MongoClient;
var url = 'mongodb://127.0.0.1:27017/myGateway';
//integrationService.js
var job1 = new CronJob('*/1 * * * * *', function() {
MongoClient.connect(url, function(err, db) {
if(err != null) {
logger.error({message: 'Connection error: ' + err});
process.exit(0);
} else {
executeService();
}
function executeService() {
// execution block
}
});
}, null, true, timeZone);
//moscaService.js
server.on('published', function(packet, client) {
//the packet is read here
MongoClient.connect(url, function(err, db) {
if(err != null) {
logger.error({message: 'Connection error: ' + err});
process.exit(0);
} else {
executeService();
}
function executeService() {
// execution block
}
});
});
What I need is a way to properly handle the err instead of just exiting the service, because if there are new messages being published while the service is restarting, they will be lost. Something like testing if the port is open before connecting, or open a different port.
I tried creating another instance of MongoDB on a different port, in order to have each service listen to one, but it looks like Mongo locks more than one instance if it's trying to connect to the same database.
The code snippets here are just a small part; if anyone needs more parts to answer, just say so and I'll add them.
I have made an alteration and it solved this issue. I altered the code in a way that integrationService connects to MongoDB before starting the CronJob; that way, it only connects once and it keeps the connection alive.
Here's the connection part of the code:
var MongoClient = require('mongodb').MongoClient;
var url = 'mongodb://127.0.0.1:27017/myGateway';
//integrationService.js
MongoClient.connect(url, function(err, db) {
var job1 = new CronJob('*/1 * * * * *', function() {
if(err != null) {
logger.error({message: 'Connection error: ' + err});
process.exit(0);
} else {
executeService();
}
function executeService() {
// execution block
}
}, null, true, timeZone); // end CronJob
}); // end MongoClient.connect
Since this has solved the problem, I've left the err treatment as is was (although a more elegant way to treat it is still desirable).
Solving the problem on integrationService has solved it on moscaService as well, but I plan to make the same alteration on the second service too.
I'm trying to follow Google's Node API to access gmail here.
When running their sample quickstart as node quickstart.js, I am given the following error:
TypeError: Cannot read property 'client_secret' of undefined
at authorize (/home/user/example/quickstart.js:32:43)
at processClientSecrets (/home/example/bstick/quickstart.js:21:3)
at FSReqWrap.readFileAfterClose [as oncomplete] (fs.js:439:3)
I have downloaded the key and renamed it client_secret.json, and copied it to working directory of quickstart.js
node --version = 7.8.0, package.json has "google-auth-library": "^0.10.0",
"googleapis": "^19.0.0"
Source code(Google's example):
var fs = require('fs');
var readline = require('readline');
var google = require('googleapis');
var googleAuth = require('google-auth-library');
// If modifying these scopes, delete your previously saved credentials
// at ~/.credentials/gmail-nodejs-quickstart.json
var SCOPES = ['https://www.googleapis.com/auth/gmail.readonly'];
var TOKEN_DIR = (process.env.HOME || process.env.HOMEPATH ||
process.env.USERPROFILE) + '/.credentials/';
var TOKEN_PATH = TOKEN_DIR + 'gmail-nodejs-quickstart.json';
// Load client secrets from a local file.
fs.readFile('client_secret.json', function processClientSecrets(err, content) {
if (err) {
console.log('Error loading client secret file: ' + err);
return;
}
// Authorize a client with the loaded credentials, then call the
// Gmail API.
authorize(JSON.parse(content), listLabels);
});
/**
* Create an OAuth2 client with the given credentials, and then execute the
* given callback function.
*
* #param {Object} credentials The authorization client credentials.
* #param {function} callback The callback to call with the authorized client.
*/
function authorize(credentials, callback) {
var clientSecret = credentials.installed.client_secret;
var clientId = credentials.installed.client_id;
var redirectUrl = credentials.installed.redirect_uris[0];
var auth = new googleAuth();
var oauth2Client = new auth.OAuth2(clientId, clientSecret, redirectUrl);
// Check if we have previously stored a token.
fs.readFile(TOKEN_PATH, function(err, token) {
if (err) {
getNewToken(oauth2Client, callback);
} else {
oauth2Client.credentials = JSON.parse(token);
callback(oauth2Client);
}
});
}
/**
* Get and store new token after prompting for user authorization, and then
* execute the given callback with the authorized OAuth2 client.
*
* #param {google.auth.OAuth2} oauth2Client The OAuth2 client to get token for.
* #param {getEventsCallback} callback The callback to call with the authorized
* client.
*/
function getNewToken(oauth2Client, callback) {
var authUrl = oauth2Client.generateAuthUrl({
access_type: 'offline',
scope: SCOPES
});
console.log('Authorize this app by visiting this url: ', authUrl);
var rl = readline.createInterface({
input: process.stdin,
output: process.stdout
});
rl.question('Enter the code from that page here: ', function(code) {
rl.close();
oauth2Client.getToken(code, function(err, token) {
if (err) {
console.log('Error while trying to retrieve access token', err);
return;
}
oauth2Client.credentials = token;
storeToken(token);
callback(oauth2Client);
});
});
}
/**
* Store token to disk be used in later program executions.
*
* #param {Object} token The token to store to disk.
*/
function storeToken(token) {
try {
fs.mkdirSync(TOKEN_DIR);
} catch (err) {
if (err.code != 'EEXIST') {
throw err;
}
}
fs.writeFile(TOKEN_PATH, JSON.stringify(token));
console.log('Token stored to ' + TOKEN_PATH);
}
/**
* Lists the labels in the user's account.
*
* #param {google.auth.OAuth2} auth An authorized OAuth2 client.
*/
function listLabels(auth) {
var gmail = google.gmail('v1');
gmail.users.labels.list({
auth: auth,
userId: 'me',
}, function(err, response) {
if (err) {
console.log('The API returned an error: ' + err);
return;
}
var labels = response.labels;
if (labels.length == 0) {
console.log('No labels found.');
} else {
console.log('Labels:');
for (var i = 0; i < labels.length; i++) {
var label = labels[i];
console.log('- %s', label.name);
}
}
});
}
It seems you must have selected something other than OAuth while generating the credentials.
So as per the steps
your config file should look like this
{"installed": //whatever
But if u select Web, then your config file will look like this
{"web":{ //whatever
So open your config file & based on that change this line.
var clientSecret = credentials.installed.client_secret;
//it should be credentials.whatever-key.client_secret.
//Example : If its web then this line should be credentials.web.client_secret.
2020 Update:
The "Other" application type no longer exists. But the "Desktop" option provides the same result.
You have either generated the wrong type of credential (see Parth Ghiya's answer) or selected the wrong type of application.
Do the following:
Go to https://console.developers.google.com/apis/credentials
and click Create credentials button.
Choose OAuth client ID option
Select application type: Other
(not Web application!)
Give it a name and click Create
Close popup (which shows client ID and client secret)
Click the download icon on your freshly created Credential in the OAuth 2.0 client IDs list
move the downloaded .json file to your project folder
rename the file to client_secret.json
run node quickstart.js
You should be good to go.
I'm trying to build a simple tool that pings a bunch of url's to monitor their status and it updates a variable with each app's status.
I also have another file which i'd like to be able to execute at any time to get the current status of each app from that variable.
Here's my main file and you can see there are 2 exports - start, and getStatuses.
index.js
'use strict';
const rest = require('restler');
const time = require('simple-time');
const seconds = time.SECOND;
// The list of apps to check if are running
var apps = {
myApp: {
url: 'http://myUrl.com',
status: null,
lastUpdatedAt: new Date()
}
};
/**
* Loop through and check the status of every app
*/
function checkAllStatuses() {
for (var name in apps) {
if (apps.hasOwnProperty(name)) {
var app = apps[name];
console.log('app = ', app);
checkAppStatus(name, app);
}
}
}
/**
* Checks the status of an app
*
* #param name - The name of the app
* #param app - The app that we're checking the status of
*/
function checkAppStatus(name, app) {
var req = rest.get(app.url);
req.on('complete', function(result, response) {
if(response.statusCode !== app.status) {
updateStatus(name, response.statusCode);
}
});
req.on('error', function(e) {
console.log('ERROR: ' + e.message);
});
req.on('timeout', function(data, response) {
console.log('Request timed out');
});
}
/**
* Updates the status of an app
*
* #param app - The app to update the status of
* #param status - The status to update the app to
*/
function updateStatus(name, status) {
apps[name].status = status;
apps[name].lastUpdatedAt = new Date();
}
function getStatuses() {
return apps;
}
function start() {
// Check every 5 seconds
setInterval(checkAllStatuses, 5*seconds);
}
module.exports.start = start;
module.exports.getStatuses = getStatuses;
Then i have a file which starts the process:
start.js
'use strict';
const status = require('./index');
status.start();
Then i have a file that I want to execute to get the current status of the apps:
consume.js
'use strict';
const status = require('./index');
console.log(status.getStatuses());
The problem is that consume.js just displays exactly what's in the initial app variable in index.js which is:
{
myApp: {
url: 'http://myUrl.com',
status: null,
lastUpdatedAt: new Date()
}
};
while the process running the start() command is displaying an updated status that is NOT null.
How can I make it so consume.js can see the value of a variable that start.js is updating?
I'd like to not have to use a datastore if possible. Worst case scenario is i write to a file, run redis, mongo, or some other datastore but i'm trying to avoid that making this app as simple as possible.
You are using the same code, index.js in both start.js and consume.js, but creating two separate instances of it when you run each file.
That is, the apps variable is changing in the instance created by start.js, but nothing in consume.js tells your code to change the apps variable.
If you are not saving a history of statuses, or saving the data to a datastore, what's the point in the start routine? You could just call checkAllStatuses and then return the results when you wish to consume the data.
Edit
Here's an example of combining the two files (start.js and consume.js) into one file. It also adds in a sample socket.io implementation since you stated that providing the statuses via websockets to clients was the evntual goal.
var app = require('http').createServer(handler)
var io = require('socket.io')(app);
var fs = require('fs');
//// Your status library
var status = require('./index');
//// Start getting statuses
status.start();
app.listen(80);
//
// This is just the default handler
// in the socket.io example
//
function handler (req, res) {
fs.readFile(__dirname + '/index.html',
function (err, data) {
if (err) {
res.writeHead(500);
return res.end('Error loading index.html');
}
res.writeHead(200);
res.end(data);
});
}
io.on('connection', function (socket) {
// Someone wants the list of statuses
// This uses socket.io acknowledgements
// to return the data. You may prefer to use
// `socket.emit` instead or an altogether different socket library.
socket.on('status_fetch', function (data, callback_fn) {
callback_fn( status.getStatuses() );
});
});
I would like to reuse a RabbitMQ channel in different node modules. Since the channel is created asynchronously, I am not sure what the best approach would be to "inject" this channel object to other modules.
If possible, I would like to avoid external dependencies like DI containers for this.
Below, you find my simplified code.
Thank you in advance for any advice.
web.js
require('./rabbitmq')(function (err, conn) {
...
// Start web server
var http = require('./http');
var serverInstance = http.listen(process.env.PORT || 8000, function () {
var host = serverInstance.address().address;
var port = serverInstance.address().port;
});
});
rabbitmq.js:
module.exports = function (done) {
...
amqp.connect(rabbitMQUri, function (err, conn) {
...
conn.createChannel(function(err, ch) {
...
// I would like to reuse ch in other modules
});
});
}
someController.js
module.exports.post = function (req, res) {
// Reuse/inject "ch" here, so I can send messages
}
you could always attach your channel handle to global object and it can be accessed across module:
// rabbitmq.js
global.ch = ch
// someController.js
global.ch // <- is the channel instance
I would prefer to inject the handle into each function because i think it is more explicit and easier to reason about and unittest. You could do that with partial function application
with lodash:
var post = function (req, res, rabbitMQchannel) {
// Reuse/inject "ch" here, so I can send messages
}
var ch = getYourChannel();
// post is function object that accepts two params, and the
// third is bound to your channel
module.exports.post = _.partialRight(post, ch);
a third option is to have rabbitmq.js export a function to getting a channel. A channel could be a singleton so there would only ever be one instance of it. Then any method that whats a channel handle would request one from the method, which wouldn't require channels to be passed around through functions, or globally.