Sending data to psql from node - javascript

I had an excel file that I managed to read in the NodeJS, here is the excel format:
I have no header, only 4 columns. And the code I used is the following:
const { Client } = require("pg");
const xlsx = require("node-xlsx");
var data = xlsx.parse(__dirname + "/disease_data.xlsx");
var diseaseCode = [];
var disease = [];
var symptomCode = [];
var symptom = [];
const client = new Client({
host: "localhost",
user: "postgres",
port: 5432,
password: "dizertatie",
database: "SymptomChecker",
});
client.connect();
for (var i = 0; i < data.length; i++) {
var sheet = data[i];
console.log(sheet["data"][i][1]);
for (var j = 0; j < sheet["data"].length; j++) {
//add the row to the rows array
// diseaseCode.push(sheet["data"][j][0]);
//Here is the problem
client.query(`INSERT INTO diseases (diseaseCode, disease, symptomCode, symptom) VALUES ('${}')`)
}
}
The problem is, I managed to stack each column from the excel into different arrays, but I don't know to push them corresponding line by line. What can I do?

Related

How can I iterate through multiple URLs to fetch JSON response in Google Apps Script?

I am working with a third-party API (from company called Simpli.fi) in Google Apps Script to pull some data into a spreadsheet. I am able to authenticate my API call just fine and can pull all of my required data with one URL. The issue is that the way the URL to call this API is formatted is as follows:
https://app.simpli.fi/api/organizations/{CLIENT_ID}/{SOME_ENDPOINT}
It works when I plug in one client id and one endpoint, however I do not want to pull the data individually for each client with each data endpoint.
I wish to pull data on all of my clients and also wish to access multiple endpoints, such as "/audiences" or "/campaigns". I am hoping there is a way (similar to Promises in JavaScript) that I can iterate through multiple URLs to fetch all of the data from the API.
For now, I am simply focusing on pulling all the data I want from the "/audiences" endpoint for all of my clients. I have set up an array accessing my Google Sheet that contains all the client codes and have plugged this into the URL with a for loop, which works just fine:
// iterate through all URL endpoints and client codes
var ss = SpreadsheetApp.getActiveSpreadsheet();
var sheet = ss.getSheetByName('formatting');
var range = sheet.getRange(['B2:B']).getValues();
var clients = range.filter(String);
var urlOneArray = [];
for (var i = 0; i < clients.length; i++) {
var urlOne = [baseURL + clients[i] + '/audiences'];
for (var j = 0; j < urlOne.length; j++) {
urlOneArray = urlOne[j];
Logger.log(urlOneArray);
}
}
The above logs a list of each built out URL as desired.
After pushing all of the built-out URLs into the urlOneArray, I tried calling with UrlFetchApp.fetchAll:
for (i=0; i < urlOneArray.length; i++) {
var response = UrlFetchApp.fetchAll(urlOneArray[i], params);
Utilities.sleep(500);
Logger.log(response);
}
When trying to use this method, I receive this error:
"Cannot find method fetchAll(string,object). (line 35, file "Code")"
If there is a way to iterate through multiple URLs to gather all of the data from the API in one pull, I would really appreciate some pointers.
Here is the full script:
// authenticate API call
var X_USER_KEY = 'XXXX';
var X_APP_KEY = 'XXXX';
function simplifiService() {
var baseURL = 'https://app.simpli.fi/api/organizations';
// iterate through all URL endpoints and client codes
var ss = SpreadsheetApp.getActiveSpreadsheet();
var sheet = ss.getSheetByName('formatting');
var range = sheet.getRange(['B2:B']).getValues();
var clients = range.filter(String);
var urlOneArray = [];
for (var i = 0; i < clients.length; i++) {
var urlOne = [baseURL + clients[i] + '/audiences'];
for (var j = 0; j < urlOne.length; j++) {
urlOneArray = urlOne[j];
Logger.log(urlOneArray);
}
}
var params = {
method: 'GET',
headers: {
"x-app-key": X_APP_KEY,
"x-user-key": X_USER_KEY
},
muteHttpExceptions: true
}
for (i=0; i < urlOneArray.length; i++) {
var response = UrlFetchApp.fetchAll(urlOneArray[i], params);
Utilities.sleep(500);
Logger.log(response);
}
if (response.getResponseCode() === 200) {
var data = JSON.parse(response);
Logger.log(data);
} else {
Logger.log('Error: ' + response.getResponseCode());
}
getData(data);
}
// parse out JSON data
function getData(data) {
var date = new Date();
var geoFenceId = data.audiences;
var geoFenceName = data.audiences[0].name;
var ss = SpreadsheetApp.getActiveSpreadsheet();
var sheet = ss.getSheetByName('Campaign Data');
//sheet.appendRow([date, geoFenceId, geoFenceName]);
}
Issue:
Invalid syntax: UrlFetchApp.fetchAll(requests Object[]) accepts object array, while you're providing a string and a object as argument.
Solution:
Valid Syntax: Create a object array for each endpoint/client and provide it as a argument to fetchAll()
Snippet:
function copyParams() {//shallow clone params object
for (var i in params) {
this[i] = params[i];
}
}
var endPoints = ['/audiences', '/campaigns'];
var requests = [];
var url, obj;
clients.forEach(function(client) {
endPoints.forEach(function(endPoint) {
obj = new copyParams();
url = baseUrl + '/' + client[0] + endPoint;
obj.url = url;
requests.push(obj);
});
});
console.log(requests);
var responseArray = UrlFetchApp.fetchAll(requests);
console.log(responseArray);
References:
UrlFetchApp.fetchAll
Array#forEach

Copying values into a spreadsheet if the value does not exist already - Google Script

I'm trying to:
Go into a Drive folder and retrieve the spreadSheetIDs of existing spreadsheets
Go into each spreadsheet and get some data
Copy that data into a TargetSheet
With the condition that the data does not exist already
From 1-3 I have no issues, but I cannot correctly search and match if the data exists already.
This is my code so far. If I run the code twice on the same data set, some data is copied when it should not be, since it already exists.
Any help, please?
function getReportData() {
//Sources
var reportFolder = DriveApp.getFolderById('ReportFolderID') // Get Status reports folder
var reportsList = reportFolder.getFiles(); //Returns FileIterator object
var spreadSheets = [];
var targetSSheet = SpreadsheetApp.openById('TargetSheetID');
var targetSheet = targetSSheet.getActiveSheet();
var lastRow = targetSheet.getLastRow();
var searchRange = targetSheet.getRange(2, 1, lastRow, 2);
var searchRangeV = searchRange.getValues();
var allStatuses = [];
//Populate the reportSheets list with latest report sheet IDs
while (reportsList.hasNext()) {
var reports = reportsList.next(); //Object of type file
spreadSheets.push(reports.getId());
}
// Loop through the list of report sheets
for (i = 0; i < spreadSheets.length; i++) {
var spreadSheet = SpreadsheetApp.openById(spreadSheets[i]);
var activeSheet = spreadSheet.getActiveSheet();
var individualStatus = [];
// Gets project report data
var projectName = activeSheet.getRange("B3:B4").getValue();
var reportDate = activeSheet.getRange("I3:I4").getValue();
var projectStatus = activeSheet.getRange("G9:i9").getValue();
// Creates reports array
individualStatus.push(reportDate, projectName, projectStatus);
allStatuses.push(individualStatus)
}
//Cleans the status array of existing ones
for (j = 0; j < allStatuses.length; j++) {
var searchKey = allStatuses[j][0] + allStatuses[j][1];
searchKey
Logger.log(searchKey)
for (k = 0; k < searchRangeV.length; k++) {
var matchKey = searchRangeV[k][0] + searchRangeV[k][1];
if (searchKey == matchKey) {
allStatuses.splice(j, 1)
break;
} else {
Logger.log(searchKey)
Logger.log(matchKey)
}
}
}
//Copies the data to Target Sheet
for (var project = 0; project < allStatuses.length; project++) {
//Gets the last row each time it goes through the loop
var latestRow = targetSheet.getLastRow();
var lastColumn = targetSheet.getLastColumn();
for (var status = 0; status < allStatuses[project].length; status++) {
targetSheet.getRange(latestRow + 1, status + 1, 1, 1).setValue(allStatuses[project][status])
}
}
}

Trouble accessing javascript objects within array

I have some kind of error in my javascript that I can't seem to figure out. I am creating objects of cars out of a MySql query. Further I am storing these objects into an array (carArray).
The problem seems to appear once I try to access these objects later on. In this example, I want to present the objects within a table.
function AddTableOfCars(){
// Table headers
var heading = new Array();
heading[0] = "Merke";
heading[1] = "Reg.nr.";
heading[2] = "Sist endret";
// Database connection
var mysql = require('mysql');
var connection = mysql.createConnection({
host : 'db_host',
user : 'db_user',
password : 'db_password',
database : 'db_name'
});
// Empty array to store cars
var carArray = [];
connection.connect();
var stmt = 'SELECT `VehicleID`,`VehicleMake`,`LicenseNumber`,`IsActive`,`DateChanged` FROM `db_table`';
connection.query(stmt, function (error, rows, fields) {
if (error) console.log(error.code);
else {
// Loop through query result
// Create car objects
// Store car objects in array
for (var i = 0; i < rows.length; i++) {
var carObj = new Car(rows[i].VehicleID, rows[i].VehicleMake, rows[i].LicenseNumber, rows[i].IsActive, rows[i].DateChanged);
carArray.push(carObj);
}
}
});
connection.end();
// Table columns
var table = document.getElementById('car-table');
for (var i = 0; i < carArray.length; i++) {
var row = table.insertRow(i);
var cellMake = row.insertCell(0);
var cellLicense = row.insertCell(1);
var cellDateChanged = row.insertCell(2);
cellMake.innerHTML = carArray[i].VehicleMake;
cellLicense.innerHTML = carArray[i].LicenseNumber;
cellDateChanged.innerHTML = carArray[i].DateChanged;
}
// Logs for debugging purposes
console.log(carArray);
console.log(carArray[0]);
}
My console.log(carArray); returns the following within the console:
[]
0: Car
1: Car
length: 2
So far, it seems to add up. However, when I try to access one of the objects within the array using console.log(carArray[0]);, it returns undefined.
Does anyone have any pointers for me at this point. It's most likely just a tiny detail that I have missed. But I have been looking at my code for quite some time now, I am starting to see animals instead of code...
Any help would be greatly appreciated.
I think you might be experiencing some Async issues. See if this sorts it:
function AddTableOfCars() {
let tableHeaders = ["Merke", "Reg.nr.", "Sist endret"];
let mysql = require('mysql');
let stmt = 'SELECT `VehicleID`,`VehicleMake`,`LicenseNumber`,`IsActive`,`DateChanged` FROM `db_table`';
let connection = mysql.createConnection({
host: 'db_host',
user: 'db_user',
password: 'db_password',
database: 'db_name'
});
connection.connect();
connection.query(stmt, function (error, rows) {
if (error) {
console.log(error.code);
} else {
let carArray = [];
rows.forEach(row => {
carArray.push(new Car(row.VehicleID,
row.VehicleMake,
row.LicenseNumber,
row.IsActive,
row.DateChanged));
});
buildTable(carArray);
}
});
connection.end();
}
function buildTable(carArray){
// Table columns
var table = document.getElementById('car-table');
carArray.forEach(car => {
var row = table.insertRow(i);
var cellMake = row.insertCell(0);
var cellLicense = row.insertCell(1);
var cellDateChanged = row.insertCell(2);
cellMake.innerHTML = car.VehicleMake;
cellLicense.innerHTML = car.LicenseNumber;
cellDateChanged.innerHTML = car.DateChanged;
});
}
Note: I couldn't see your use of tableHeaders but I left it in anyway.

Node / MySQL - code: 'ER_PARSE_ERROR', when trying to insert ~800 records

I am working on small idea to collect errors from pages and to store them in DB and then use graph API to display information visually.
There is 8 sites and on each of them there is 100 entries - so 800 transactions per time.
I loop through each site and then sub-loop through table of errors and collect them.
I got it working if I make insert query on each of those sub-loops for all 800 entries but I am getting some sort of memory leak from so many transactions and after few minutes - Node breaks due to memory exceeding.
So I tried queuing all 800 entries into Array of Arrays and then performing multi-insert at the end of every iteration but I am getting ER_PARSE_ERROR.
var tabletojson = require('tabletojson');
var mysql = require("mysql");
var striptag = require("striptags");
var fs = require("fs");
var path = require('path');
var startCollector;
var iterations = 0;
var insertions = 0;
var duplicated = 0;
var datas = [];
var clients = ["ClientA", "ClientB", "ClientC", "ClientD", "ClientE", "ClientF", "ClientG", "ClientH"];
var appDir = path.dirname(require.main.filename);
var errorList = ["err1", "err2", "err3", "err4", "err5", "err6"];
var con = mysql.createPool({
host: "localhost",
user: "User",
password: "Password",
database: "errors"
});
function CollectErrors() {
startCollector = new Date();
for(var a = 0; a < clients.length; a++) {
(function(a) {
tabletojson.convertUrl("http://example.com" + clients[a] + "/page.php?limit=100", { stripHtmlFromCells: false }, function(response) {
var rs = response[0];
for(var l = rs.length-1; l > -1; l--) {
var newDate = formatDate(striptag(rs[l]["Date"]), striptag(rs[l]["Time"]));
var user = getUser(striptag(rs[l]["User"]));
var msg = striptag(rs[l]["Error"]);
var splitError = rs[l]["Error"].split("<a href=\"");
var link = getUrl(splitError[1]);
var id = getId(link);
var type = getType(striptag(splitError[0]));
var temp = [newDate, link, type, user, clients[a], id, msg];
datas.push(temp);
}
});
})(a);
}
con.getConnection(function(err, connection) {
connection.query("INSERT IGNORE INTO entries (time, url, type, author, client, uid, message) VALUES ?", [datas], function(err, rows) {
console.log(err);
});
connection.release();
datas = [];
});
setTimeout(CollectErrors, 10000);
}
function formatDate(date, time) {
var newdate = date.split("/").reverse().join("-");
var newtime = time+":00";
return newdate + " " + newtime;
}
function getUrl(uri) {
return "http://example.com/"+uri.split("\">Details")[0];
}
function getId(url) {
return decodeURIComponent((new RegExp('[?|&]' + "id" + '=' + '([^&;]+?)(&|#|;|$)').exec(url) || [null, ''])[1].replace(/\+/g, '%20')) || null;
}
function getType(error) {
for(var a = 0; a < errorList.length; a++) {
if(error.indexOf(errorList[a]) !== -1) {
return errorList[a];
}
}
return "Other";
}
function getUser(user) {
if(user == "" || user == " " || user == null) {
return "System";
}
return user;
}
CollectErrors();
I've tried mysql.createConnection too but that also gave me same issue.
I've been stuck for past 12 hours and I can't see what's wrong, I've even tried populating Datas table with just strings but got same error.
I've changed your code to use ES6 and correct modules features.
Useful links: correct pooling with mysql, correct insert query, async/await, IIFE, enhanced object
const tabletojson = require('tabletojson'),
mysql = require("mysql"),
striptag = require("striptags"),
fs = require("fs"),
path = require('path');
const startCollector,
iterations = 0,
insertions = 0,
duplicated = 0;
let datas = [];
const clients = ["ClientA", "ClientB", "ClientC", "ClientD", "ClientE", "ClientF", "ClientG", "ClientH"];
const appDir = path.dirname(require.main.filename);
const errorList = ["err1", "err2", "err3", "err4", "err5", "err6"];
const con = mysql.createPool({
host: "localhost",
user: "User",
password: "Password",
database: "errors"
});
// We'll use async/await from ES6
const collectErrors = async() => {
// Up to here I've only changed syntax to ES6
let startCollector = new Date();
// We'll try to iterate through each client. And we use here for..of syntax to allow us using await
for (let client of clients) {
// Please, check that client value return correct data. If not, change for..of to your for..each and client variable to clients[a]
const tbj = await tabletojson.convertUrl("http://example.com" + client + "/page.php?limit=100", {
stripHtmlFromCells: false
});
const result = tgj[0];
for (rs of result) {
// I can't check this part, but I hope your example was with correct values.
let newDate = formatDate(striptag(rs[l]["Date"]), striptag(rs[l]["Time"]));
let user = getUser(striptag(rs[l]["User"]));
let link = getUrl(splitError[1]);
let msg = striptag(rs[l]["Error"]);
let id = getId(link);
let splitError = rs[l]["Error"].split("<a href=\"");
let getType = getType(striptag(splitError[0]));
// ES6 enhanced object syntax
datas.push({
newDate,
user,
msg,
id,
splitError,
link,
getType,
temp: [newDate, link, type, user, client, id, msg]
});
}
}
// OK, here we have fulfilled datas array. And we want to save it.
con.getConnection((err, connection) => {
// Please, notice, here I've changed your insert query to prepared statement.
connection.query("INSERT IGNORE INTO entries SET ?", datas, (err, rows) => {
console.log(err);
connection.release();
datas = [];
});
});
// I don't see why do you need timeout here, so I've left it commented.
// setTimeout(CollectErrors, 10000);
};
// Here your other methods go....
// And to call your async function we'll use IIFE
(async() => {
await collectErrors();
})();
Probably there may be errors with mysql insert, but that's not for sure. If occurred, please write in comments and I'll help you with that.

nested javascript queries in parse

I have the code below. Basically I have 3 nested parse queries. One is getting a number of "followers" and for each follower I am getting a number of "ideas" and for each idea I would like to get that idea creator's name (a user in the user table).
The first two nested queries work but then when i try to get the name of the user (the creator of the idea), that last nested query DOES NOT execute in order. That query is skipped, and then it is executed later in the code. Why is this happening please?
var iMax = 20;
var jMax = 10;
var IdeaList = new Array();
var IdeaListCounter = 0;
var myuser = Parse.User.current();
var Followers = new Parse.Query("Followers");
Followers.equalTo("Source_User",{__type: "Pointer",className: "_User",objectId: myuser.id});
Followers.find({
success: function(results) {
for (var i = 0; i < results.length; i++) {
var object = results[i];
var Ideas = new Parse.Query("Ideas");
Ideas.equalTo("objectId_User", {__type: "Pointer",className: "_User",objectId: object.get('Destination_User').id});
Ideas.find({
success: function(results2) {
for (i=0;i<iMax;i++) {
IdeaList[i]=new Array();
for (j=0;j<jMax;j++) {
IdeaList[i][j]=0;
}
}
for (var j = 0; j < results2.length; j++) {
var object2 = results2[j];
var ideausername2 = "";
IdeaListCounter++;
var ideausername = new Parse.Query("User");
ideausername.equalTo("objectId",object2.get('objectId_User').id);
ideausername.first({
success: function(ideausernameresult) {
ideausername2 = ideausernameresult.get("name");
}
});
IdeaList[IdeaListCounter,0] = object2.get('objectId_User').id + " " + ideausername2; //sourceuser
IdeaList[IdeaListCounter,1] = object2.get('IdeaText'); //text
IdeaList[IdeaListCounter,2] = object2.get('IdeaImage'); //image
IdeaList[IdeaListCounter,3] = object2.get('IdeaLikes'); //likes
IdeaList[IdeaListCounter,4] = object2.get('IdeaReport'); //reports
Your nested query is asynchronous.
Check out the answer at the following for guidance:
Nested queries using javascript in cloud code (Parse.com)

Categories