I have an own database of Youtube videos. What it contains so far is the id of these certain videos; the one which can be found in the video link as well.
I'm looking for a method that I can use to insert both the title and the description of them into the database by the use of the Youtube API and MySQL.
By the use of the mysql npm, I could connect to the database, but the async nature confuses me a bit.
Here's my original (incomplete) code:
var request = require('request');
var mysql = require('mysql');
var connection = mysql.createConnection({
host : 'localhost',
user : 'root',
password : (this is secret)
database : 'video_db'
});
connection.query('SELECT id FROM videos', function(err, rows, fields) {
if (err) throw err;
for(i=0;i<rows.length-1;i++){
var APIkey = (this is also secret)
var videoid = rows[i].id;
//gets data of a single video
var API_URL = 'https://www.googleapis.com/youtube/v3/videos'
+ '?part=snippet'
+ '&id=' + videoid
+ '&key=' + APIkey;
request(API_URL, function (error, response, body) {
if (!error && response.statusCode == 200) {
console.log(JSON.parse(body));
} else {
console.log(error);
};
});
};
});
Questions:
1.) To make it work, another connection.query would be necessary within the request, but since that also works asynchronously, I'm quite unsure about the outcome. What is the proper way to write elements of that response into the database? Should I use another kind of apporach? Is it possible that writing the whole logic within 'SELECT id FROM videos' was a bad idea?
2.) I tried the API links and they worked in the browser, but when the code itself runs, request throws and error, which consists of the following message:
{ [Error: connect ENOBUFS 216.58.214.202:443 - Local (undefined:undefined)]
code: 'ENOBUFS',
errno: 'ENOBUFS',
syscall: 'connect',
address: '216.58.214.202',
port: 443 }
What's the source of this problem? (if it's viable to be another question on its own, I'm willing to remove from the original question, as this is less of the part of the original issue)
Make Your query safe: add "ORDER BY id DESC LIMIT 10" to Your query. Otherwise it reads all records from table.
You're sending too many requests inside of for loop so You're wasting memory for requests (no buffers error) without waiting for one to finish.
Better to do it like this:
var request = require('request');
var mysql = require('mysql');
var async = require ('async');
var connection = mysql.createConnection({
host : 'localhost',
user : 'root',
password : 'this is secret',
database : 'video_db'
});
var APIkey = "SECRET KEY";
var apiUrls = [];
connection.query('SELECT id FROM videos ORDER id LIMIT 10', function(err, rows, ) {
if (err) throw err;
rows.forEach(function(row){
var API_URL = 'https://www.googleapis.com/youtube/v3/videos'
+ '?part=snippet'
+ '&id=' + row.id
+ '&key=' + APIkey;
apiUrls.push(API_URL);
};
});
async.eachSeries(apiUrls, function(apiUrl, next){
request(apiUrl, function (error, response, body) {
if(error || response.statusCode != 200) {
console.error(response.statusCode, error);
return next();
}
console.log(JSON.parse(body));
next();
});
});
OR if You insist to loop all of records:
var request = require('request');
var mysql = require('mysql');
var async = require ('async');
var connection = mysql.createConnection({
host : 'localhost',
user : 'root',
password : 'this is secret',
database : 'video_db'
});
var APIkey = "SECRET KEY";
connection.query('SELECT id FROM videos', function(err, rows) {
if (err) throw err;
async.eachSeries(rows, function(row, next){
//gets data of a single video
var apiUrl = 'https://www.googleapis.com/youtube/v3/videos'
+ '?part=snippet'
+ '&id=' + row.id
+ '&key=' + APIkey;
request(apiUrl, function (error, response, body) {
if(error || response.statusCode != 200) {
console.error(response.statusCode, error);
return next();
}
console.log(JSON.parse(body));
next();
});
};
});
Related
This is my code:
I have to request twice to get the data. Any problem? Thanks
var http = require('http');
var url = require('url');
var input = "";
const MongoClient = require('mongodb').MongoClient;
const url2 = "mongodb+srv://ramaty01:password#cluster0-hi4fv.mongodb.net/test?retryWrites=true&w=majority";
http.createServer(function(req, res){
if (req.url === '/favicon.ico') {
res.writeHead(200, {'Content-Type': 'image/x-icon'} );
res.end();
console.log('favicon requested');
return;
}
res.writeHead(200, {'Content-Type': 'text/html'});
res.write(
"<html><head><title> MongoDB and NodeJS</title></head>" +
"<body><h1> COMPANY STOCK STICKER</h1><form>" +
"<input type='radio' id='company' name='types' value='company' required>" +
"<label for='company'>Company Name</label><br>" +
"<input type='radio' id='stock' name='types' value='stock'>" +
"<label for='stock'>Stock Ticker</label><br><br>" +
"<label for='txt'>Input: </label>" +
"<input type='text' id='txt' name='txt'><br><br>" +
"<input type='submit' value='Submit'>" +
"</form><div id='output'></div></body></html>"
);
var qobj = url.parse(req.url, true).query;
//var items;
mong(qobj);
res.write(input);
res.end()
input= "";
}).listen(8080);
function mong(qobj){
MongoClient.connect(url2, { useUnifiedTopology: true }, function(err, db) {
if(err) { console.log("Connection err: " + err); return; }
var dbo = db.db("Company");
var coll = dbo.collection('companies');
var query;
var inp = qobj.txt;
if (qobj.types == "company") {
query = { "company_name" : inp};
} else if (qobj.types == "stock") {
query = { "stock_ticker" : inp};
}
var s = coll.find(query,{projection: {"company_name":1, "stock_ticker":1, "_id":0}}).stream();
s.on("data", function(item) {input = input + item.company_name + " " + item.stock_ticker;});
s.on("end", function() {console.log("end of data"); db.close();});
});
}
First of all, I recommend that you immediately change your monogdb atlas password, and that you be more careful next time you're posting anything with sensitive information online.
With that being said, having worked with mongodb Atlas for sometime now, I've noticed that connection normally takes sometime, and that introduces some noticeable lag the fist time you fire up your server. That is probably what is happening, and since from your code I can't see evidence that your local server is waiting for this connection to happen before "serving" requests to your browser, there is no guarantee of the succession of events
To test this hypothesis, I suggest you wait for sometime before accessing the local server. This can be easily solved using an async function, or chaining a promise before starting your local server
I am using a query to a postgres db to store the result in a variable in a GET request on the server-side of an Express app.
My query returns the issueID that i am looking for when i run it in psql, but I am having a hard time getting the response to store in a variable so I can send the response to the client side.
My code looks like this:
const pg = require('pg');
const connection = 'postgres://aa2:aa2#localhost:5432/bugtrackingdb';
const client = new pg.Client(connection) ;
client.connect();
app.get('/tracking', function(req, res) {
var sql = "SELECT bugtracking.issues.issueID FROM bugtracking.issues WHERE bugtracking.issues.issueID = (SELECT MAX(bugtracking.issues.issueID) FROM bugtracking.issues);"
var issueID;
client.query(sql, function(err, result) {
if (err){
console.log("error in tracking");
}
issueID = JSON.stringify(result.rows[0]);
console.log("Tracking Result" + issueID);
//GET results
res.json(res.statusCode);
// res.json(issueID);
console.log("Selected Bug Tracking IssueID. Status Code: " + res.statusCode);
});
});
How do I get the data from the query to store the data in the issueID variable to send it to the client-side of my app?
I am using body-parser also.
EDIT:
It's almost as if the query isn't even executing because I am not able to get my console.log statements to print. This is my front-end code to help understand better:
$(window).load(function() {
$.ajax({
type: 'GET',
url: 'http://139.169.63.170:' + port + '/tracking',
dataType: 'json',
cache: true,
success: function (data) {
console.log("DEBUG DONE WITH CAPTURING tacking#: " + data);
if (data === Number(200)) {
issueID = issueID + 1;
$(".trackingID").val(issueID);
}
}
});
console.log("Window loads & Ajax runs???");
});
2nd EDIT:
Here are the results of running my query in psql. I am trying to get the number '33' to return from my get request:
bugtrackingdb=# SELECT bugtracking.issues.issueID FROM bugtracking.issues WHERE bugtracking.issues.issueID = (SELECT MAX(bugtracking.issues.issueID) FROM bugtracking.issues);
issueid
---------
33
(1 row)
AFter a few changes this is what I am getting to print from the back end console.log statements:
Tracking Result: [object Object]
Selected Bug Tracking IssueID. Status Code: 304
EDIT (Getting closer):
I stringified the result.rows[0] from the object returned from my query. I am now able to get the result to print to the server-side console.log which gives me the following results:
Tracking Result: {"issueid":"37"}
Selected Bug Tracking IssueID. Status Code: 304
status: 200
However, when I uncomment the line res.json(issueID) I get an error.
How do I send my JSON object back to the client side so that I can display it as needed?
app.get('/tracking', function(req, res) {
var sql = "SELECT bugtracking.issues.issueID FROM bugtracking.issues WHERE bugtracking.issues.issueID = (SELECT MAX(bugtracking.issues.issueID) FROM bugtracking.issues);"
var issueID;
client.query(sql, function(err, result) {
if (err){
console.log("error in tracking");
}
issueID = JSON.stringify(result.rows[0]);
console.log("Tracking Result" + issueID);
// TRY THIS
res.send(issueID);
});
I am having issues getting node to make a database call without proceeding despite the database function has not returned a value.
Here is the basic http server code:
var http = require('http');
http.createServer(function (request, response) {
response.writeHead(200, {
'Content-Type': 'text/plain',
'Access-Control-Allow-origin': '*' // implementation of CORS
});
response.end("ok");
;
}).listen(8080,'0.0.0.0');
Using the request.on('data') function, I am able to decode JSON from requests and proceed that to make a database call:
request.on('data', function (chunk) {
var json = JSON.parse(chunk);
var id = parseInt(json["id"]);
response.end(callDatabase(id));
});
The database function goes something like this:
function callDatabase(id) {
var result;
var connection = mysql.createConnection(
{
host : '192.168.1.14',
user : 'root',
password : '',
database : 'test'
}
);
connection.connect();
var queryString = 'SELECT name FROM test WHERE id = 1';
connection.query(queryString, function(err, rows, fields) {
if (err) throw err;
for (var i in rows) {
result = rows[i].name;
}
});
connection.end();
return result;
}
}
However under testing, this proves that I am doing it wrong. I am aware that I probably want to be using the node asynchronous module, which I have tired. I have also tried using the waterfall method, as well as parallel and many other tutorials online. I feel that the request.on function should be in parallel, then the database call async, so whilst node is waiting for the response from the database server, it is free to get on with any other requests, leaving the queued time to a minimum.
Please inform me if I have miss-understood any of the concepts of node js.
You are returning result and closing the connection before the query has returned it's value from the db. Place that code inside the callback.
Fixing your code, it should look like this:
function callDatabase(id) {
var result;
var connection = mysql.createConnection(
{
host : '192.168.1.14',
user : 'root',
password : '',
database : 'test'
}
);
connection.connect();
var queryString = 'SELECT name FROM test WHERE id = 1';
connection.query(queryString, function(err, rows, fields) {
if (err) throw err;
for (var i in rows) {
result = rows[i].name;
}
connection.end();
return result;
});
}
Although, this will only solve part of the problem, since now you're still calling response.end(callDatabase(id)); before waiting for a response from the query.
In order to fix this, you need to return some kind of callback.
function callDatabase(id, callback) {
// the method code here...
connection.query(queryString, function(err, rows, fields) {
// code...
// instead of returning the result, invoke the callback!
callback(rows);
});
}
Now you can call it like this :
request.on('data', function (chunk) {
var json = JSON.parse(chunk);
var id = parseInt(json["id"]);
callDatabase(id, function(res) {
response.end(res);
});
});
I am attempting to get data from a mysql table using nodejs. I call the sql routine which is in another node js file. However, I cannot get my callback to return the data. I think the problem might be that I am calling an asynchronous routine from a node js file rather than from an angular or regular javascript program. Below is the exports script I am calling.
exports.getVenueById = function(db, id) {
var http = require('http');
var mysql = require('mysql');
var query = "SELECT * FROM venues WHERE auto_increment = ? "
query = mysql.format(query, id);
console.log("query=" + query);
db.query(
query,
function(err, rows) {
if(err) console.log("error=" + err);
console.log("rows=" + rows[0]['auto_increment']);
res.json({success: true, response: rows});
//return rows[0];
});
}
I know this is working because I am writing the results to the console. The problem is the data never gets back to the calling node js routine below.
function getVenueData (id , callback) {
(function () {
venueData = venueTable.getVenueById(db, id);
if(venueData) {
callback();
console.log("callback in loop");
}
console.log("callback after loop");
});
}
getVenueData(id, gotVenueData);
The program immediately returns and displays the timeout message. I admit that I am not an expert in nodejs or its callback feature. I would appreciate any feedback as to how I can get the program to wait for the asynchronous data to return to the calling program.
function gotVenueData() {
console.log("gotVenueData");
}
setTimeout(function() { console.log("timeout for 10 seconds");} , 10000);
console.log("venuedata=" + venueData);
You're trying to return async data syncronously. Instead, add a callback to getVenueById:
exports.getVenueById = function (db, id, cb) {
var http = require('http');
var mysql = require('mysql');
var query = "SELECT * FROM venues WHERE auto_increment = ? ";
query = mysql.format(query, id);
console.log("query=" + query);
db.query(
query,
function (err, rows) {
if (err) console.log("error=" + err);
console.log("rows=" + rows[0]['auto_increment']);
cb({
success: true,
response: rows
});
// return rows[0];
});
and use it as such:
venueTable.getVenueById(db, id, function(data) {
console.log(data);
});
One caveat: Traditionally the callback has the first parameter as the error (or null) and then the actual data. With that in mind:
exports.getVenueById = function (db, id, cb) {
var http = require('http');
var mysql = require('mysql');
var query = "SELECT * FROM venues WHERE auto_increment = ? ";
query = mysql.format(query, id);
console.log("query=" + query);
db.query(
query,
function (err, rows) {
if (err) { cb(err); }
console.log("rows=" + rows[0]['auto_increment']);
cb(null, {
success: true,
response: rows
});
// return rows[0];
});
and use it as such:
venueTable.getVenueById(db, id, function(err, data) {
if (err) { return console.log(err); }
console.log(data);
});
I have an application in windows phone and make registration with facebook and I store some data in a table, however, a single user it is being stored more than once in the table. I tried (getting the fb-id) check on the table if a record with that fb-id, but before re registrare l new user should check whether there is, however, such as asynchronous methods there is no order and always first executes the query insertion, as I can resolve this?
client side (limited)
await App.MobileService.LoginAsync(MobileServiceAuthenticationProvider.Facebook);
Message = string.Format("User Authenticate - {0}", App.MobileService.CurrentUser.UserId);
//***** Get fb info
var userId = App.MobileService.CurrentUser.UserId;
var facebookId = userId.Substring(userId.IndexOf(':') + 1);
var client = new HttpClient();
var fbUser = await client.GetAsync("https://graph.facebook.com/" + facebookId);
var response = await fbUser.Content.ReadAsStringAsync();
var jo = JObject.Parse(response);
var FbidUser = jo.GetValue("id");
var userName = jo.GetValue("name");
var genero = jo.GetValue("gender");
but, i slould be do at server client, but how to insert info data one time, i mean, Check the record in the table before inserting.
Server side Azure:
function insert(item, user, request)
{
item.UserName = "<unknown>"; // default
var identities = user.getIdentities();
var req = require('request');
if (identities.facebook)
{
var fbAccessToken = identities.facebook.accessToken;
var url = 'https://graph.facebook.com/me?access_token=' + fbAccessToken;
req(url, function (err, resp, body)
{
if (err || resp.statusCode !== 200)
{
console.error('Error sending data to FB Graph API: ', err);
request.respond(statusCodes.INTERNAL_SERVER_ERROR, body);
}
else
{
try
{
var userData = JSON.parse(body);
item.UserName = userData.name;
request.execute();
} catch (ex)
{
console.error('Error parsing response from FB Graph API: ', ex);
request.respond(statusCodes.INTERNAL_SERVER_ERROR, ex);
}
}
});
}
}
according to the above code, i have a second question, in the developer center on facebook, I have access to email and photos (my application), I guess userData variable contains this information?, how do I access it?, how to call those properties where the rest of the information?
If all you want is to prevent two items for users with the same name to be inserted, the easiest way would be to not use the name at all, but instead use the FB id (after all, it's possible that two different people have the same name). That you can do with the script below:
function insert(item, user, request)
{
item.UserId = user.userId;
var currentTable = tables.current;
currentTable.where({ UserId: user.userId }).read({
success: function(results) {
if (results.length > 0) {
// an item with that user id already exists in the table
request.respond(400,
{ error: 'item already in the table' });
} else {
// new user, can insert it here
request.execute();
}
}
});
}
Now, if you really want to use the user name as the "key" for your table, you can do something similar as well:
function insert(item, user, request)
{
item.UserName = "<unknown>"; // default
var identities = user.getIdentities();
var req = require('request');
var currentTable = tables.current;
if (identities.facebook)
{
var fbAccessToken = identities.facebook.accessToken;
var url = 'https://graph.facebook.com/me?access_token=' + fbAccessToken;
req(url, function (err, resp, body)
{
if (err || resp.statusCode !== 200)
{
console.error('Error sending data to FB Graph API: ', err);
request.respond(statusCodes.INTERNAL_SERVER_ERROR, body);
} else {
try {
var userData = JSON.parse(body);
item.UserName = userData.name;
currentTable.where({ UserName: item.UserName }).read({
success: function(results) {
if (results.length > 0) {
request.respond(statusCodes.BAD_REQUEST,
{ error: 'Name already in the table' });
} else {
request.execute();
}
}
});
} catch (ex) {
console.error('Error parsing response from FB Graph API: ', ex);
request.respond(statusCodes.INTERNAL_SERVER_ERROR, ex);
}
}
});
}
}