How to connect my data model with a view object? - javascript

I have made class model for my view the code below (I'm using nw.js ) :
var sqlite3 = require("sqlite3").verbose();
var db = new sqlite3.Database('./../data.sqlite');
class store_model{
constructor() {
this.state = {
_stmt: null
}
}
readAll() {
db.each("SELECT * FROM store ", (res, err) => {
if (err)
console.log(err);
else
console.log(res);
});
}
read(obj) {
db.get("SELECT * FROM store WHERE ID =" + obj.id, (res, err) => {
if (err)
console.log(err);
else if (res !== null)
console.log(res);
else
console.log()
});
}
create(obj) {
try {
db.serialize(() => {
this.state._stmt = db.prepare("INSERT INTO store (name,mail) VALUES (?,?)");
this.state._stmt.run(obj.name , obj.mail);
this.state._stmt.finalize();
})
}
catch (err) {
console.log("There is an error " + err);
}
}
update(obj) {
db.serialize(() => {
this.state._stmt = db.prepare("UPDATE store SET name = COALESCE(?, name) , mail=COALESCE(?,mail) WHERE ID=? ");
console.log(obj.name)
try {
this.state._stmt.run(obj.name, obj.mail, obj.id);
}
catch (err){
console.log(err);
}
this.state._stmt.finalize();
console.log("updated ....");
})
}
delete(obj) {
this.state._stmt = db.prepare("DELETE FROM store WHERE ID=?");
this.state._stmt.run(obj.id);
console.log("supprimer");
}
}
After Made it I want to pass the response of some of my requests to my view in vue.js the problem I meet is that my requests are in asynchronous function so I can't pass directly my response to my view object because of the scope .I would like someone guide me ( A way to pass the responses of my requests) to be able to transmit my responses to my view object
(My view object have attribute to receive data) .

Related

How to check if value already exists in javascript JSON array?

var ComfyJS = require("comfy.js");
var fs = require('fs');
const dataBuffer = fs.readFileSync('database.json');
const dataJSON = dataBuffer.toString();
const scoreBoard = JSON.parse(dataJSON);
ComfyJS.onChat = (user, message, flags, self, extra) => {
for (let i = 0; i < scoreBoard.length; i++) {
if (scoreBoard[i].name == user) {
console.log('The name already exist');
}
else{
scoreBoard.push({name:user,score:message});
}
}
var data = JSON.stringify(scoreBoard);
fs.writeFile('database.json', data, function (err) {
if (err) {
console.log('There has been an error saving your configuration data.');
console.log(err.message);
return;
}
console.log('Configuration saved successfully.')
});
}
Hi I'm new to code and I'd like to build a twitch bot and I want to save my data on a JSON file. ComfyJS.onchat triggers when somebody types something on chat and I want to take their name and message(score) as value and save it on my database but I need to save them one by one so if the name already exists in JSON file I don't want to add it. How should I fix it?
ComfyJS.onChat = (user, message, flags, self, extra) => {
const exists = scoreBoard.find(fn => fn.name === user)
if (exists) return;
scoreBoard.push({
name: user,
score: message
});
var data = JSON.stringify(scoreBoard);
fs.writeFile('database.json', data, function(err) {
if (err) {
console.log('There has been an error saving your configuration data.');
console.log(err.message);
return;
}
console.log('Configuration saved successfully.')
});
}

Avoid duplicates when saving new data with mongoose

I am working on an application where I can save destinations to my Mongo DB. I would like to throw a custom error when trying to save a destination that already exsist in the DB. Mongoose prevents that from happening but I want clear and userfriendly error handling.
// post a new destination
router.post('/',
(req, res) => {
const newCity = new cityModel(
{
name: req.body.name,
country: req.body.country
}
)
newCity.save()
.then(city => {
res.send(city)
})
.catch(err => {
res.status(500).send('Server error')
})
});
Before saving a new destination, you can check if there is document already using findOne method, and if it exists you can return a custom error.
router.post("/", async (req, res) => {
const { name, country } = req.body;
try {
const existingDestination = await cityModel.findOne({name,country});
if (existingDestination) {
return res.status(400).send("Destionation already exists");
}
let newCity = new cityModel({ name, country });
newCity = await newCity.save();
res.send(city);
} catch (err) {
console.log(err);
res.status(500).send("Server error");
}
});
Note that I guessed the duplication occurs when the same country and name exist. If it is not what you want, you can change the query in findOne.
Since you've created unique index, When you try to write duplicate then the result would be :
WriteResult({
"nInserted" : 0,
"writeError" : {
"code" : 11000,
"errmsg" : "E11000 duplicate key error index: test.collection.$a.b_1 dup key: { : null }"
}
})
Your code :
Constants File :
module.exports = {
DUPLICATE_DESTINATION_MSG: 'Destionation values already exists',
DUPLICATE_DESTINATION_CODE: 4000
}
Code :
//post a new destination
const constants = require('path to constants File');
router.post('/',
(req, res) => {
const newCity = new cityModel(
{
name: req.body.name,
country: req.body.country
}
)
try {
let city = await newCity.save();
res.send(city)
} catch (error) {
if (error.code == 11000) res.status(400).send(`Destination - ${req.body.name} with country ${req.body.country} already exists in system`);
/* In case if your front end reads your error code &
it has it's own set of custom business relates messages then form a response object with code/message & send it.
if (error.code == 11000) {
let respObj = {
code: constants.DUPLICATE_DESTINATION_CODE,
message: constants.DUPLICATE_DESTINATION_MSG
}
res.status(400).send(respObj);
} */
}
res.status(500).send('Server error');
})

Listing records with Airtable API

I have an Airtable base that I can retrieve records from (see code below), but I'd like to get the value for other fields besides just "Location". Using "console.log('Retrieved: ', record.get('Location'));", how do I modify this line to include in the output the field values for a field called "Size" in addition to the "Location" field? I tried "console.log('Retrieved: ', record.get('Location', 'Size'));", but that didn't work.
Here's an excerpt from my code:
// Lists 3 records in Bins
base('Bins').select({
// Selecting the first 3 records in Grid view:
maxRecords: 3,
view: "Grid view"
}).eachPage(function page(records, fetchNextPage) {
// This function (`page`) will get called for each page of records.
records.forEach(function(record) {
console.log('Retrieved: ', record.get('Location'));
});
// To fetch the next page of records, call `fetchNextPage`.
// If there are more records, `page` will get called again.
// If there are no more records, `done` will get called.
fetchNextPage();
}, function done(err) {
if (err) { console.error(err); return; }
});
OUTPUT
Retrieved 170000118
Retrieved 170000119
Retrieved 170000120
I found this repo to help in when I tried to product situations like this.
A wrapper for common functions for accessing data on an airtable.com database. All queries return promises.
Here is how it works if you want to avoid using an npm package. But ultimatly the jist of it is to either use request or some short of promise fulfillment menthod to retrive the Records.
import Airtable from 'airtable'
import _ from 'lodash'
const ENDPOINT_URL = 'https://api.airtable.com'
let API_KEY // Can only set the API key once per program
export default class AirTable {
constructor({apiKey, databaseRef}) {
if(!API_KEY) {
API_KEY = apiKey
Airtable.configure({
endpointUrl: ENDPOINT_URL,
apiKey: API_KEY
});
}
this.base = Airtable.base(databaseRef)
this.get = {
single: this.getSingleRecordFrom.bind(this),
all: this.getAllRecordsFrom.bind(this),
match: this.getAllMatchedRecordsFrom.bind(this),
select: this.getRecordsSelect.bind(this)
}
this.insert = this.createRecord.bind(this)
this.add = this.insert
this.create = this.insert
this.update = this.updateRecord.bind(this)
this.set = this.update
this.remove = this.deleteRecord.bind(this)
this.delete = this.remove
this.destroy = this.remove
this.rem = this.remove
}
async createRecord({tableName, data}) {
return new Promise((resolve, reject) => {
this.base(tableName).create(data, (err, record) => {
if (err) {
console.error(err)
reject()
return
}
console.log("Created " + record.getId())
resolve(record)
})
})
}
async updateRecord({tableName, id, data}) {
return new Promise((resolve, reject) => {
this.base(tableName).update(id, data, (err, record) => {
if (err) {
console.error(err)
reject()
return
}
console.log("Updated " + record.getId())
resolve(record)
})
})
}
async deleteRecord({tableName, id, data}) {
return new Promise((resolve, reject) => {
this.base(tableName).destroy(id, (err, record) => {
if (err) {
console.error(err)
reject()
return
}
console.log("Deleted " + record.getId())
resolve(record)
})
})
}
async getSingleRecordFrom({tableName, id}) {
console.log(tableName, id)
return new Promise((resolve, reject) => {
this.base(tableName).find(id, function(err, record) {
if (err) {
console.error(err)
reject(err)
}
resolve(record)
})
// console.log(record);
})
}
async getAllRecordsFrom(tableName) {
return this.getRecordsSelect({tableName, select: {} })
}
async getAllMatchedRecordsFrom({tableName, column, value}) {
return this.getRecordsSelect({tableName, select: {filterByFormula:`${column} = ${value}`} }) // TODO: validate input
}
async getRecordsSelect({tableName, select}) {
return new Promise((resolve, reject) => {
let out = []
this.base(tableName).select(select).eachPage((records, fetchNextPage) => {
// Flatten single entry arrays, need to remove this hacky shit.
_.map(records, r => {
_.forOwn(r.fields, (value, key) => { // If array is single
if(_.isArray(value) && value.length == 1 && key != 'rooms') {
r.fields[key] = value[0]
}
});
})
out = _.concat(out, records)
fetchNextPage();
}, (err) => {
if (err) {
console.error(err)
reject(err)
} else {
// console.log(JSON.stringify(out, null, 4))
// console.log("HI")
resolve(out)
}
})
})
}
}
Hope this Makes sense, Also trying to make an API-Proxy fetching a whole table or even use Express to fetch record id's as arrays can work as well
You can use this code line.
records.forEach(function(record) {
console.log('Retrieved: ', record.get('Location') + ' ' + record.get('Size'));
});

How can I avoid logging into SalesForce databases on every router in Express.js?

I need to do this to log into SalesForce Databases and pass a query. Now I will be passing a lot of queries on many routers of express.js and its a real pain to login in every router. Please let me know if you know how I can avoid this.
var conn = new jsforce.Connection({
oauth2 : salesforce_credential.oauth2
});
var username = salesforce_credential.username;
var password = salesforce_credential.password;
// I want to avoid this login on every router
conn.login(username, password, function(err, userInfo) {
if (err) {
return console.error(err);
}
conn.query("SELECT id FROM Sourcing__c WHERE id = 'req.session.ref'",function(err, result) {
if (err) {
return console.error(err);
}
if(result.records.length === 0){
req.session.ref = "";
}
var body = {
"Auth__c": req.user.id,
"Stus__c": "Pending - New Hire",
"Record": "012lvIAC",
"Sourcing__c": req.session.ref
};
conn.sobject("SFDC_Employee__c").create(body, function(err, ret) {
if (err || !ret.success) {
return console.error(err, ret);
}
console.log("Created record id : " + ret.id);
// ...
});
});
});
You may save the login status and check it every time when do a query,
here I add a property on conn (the instance of 'jsforce.Connection'), I'm not sure but I think there may be a method or property that show the login status of the 'conn', you may dig into its documents.
var conn = new jsforce.Connection({
oauth2 : salesforce_credential.oauth2
});
var username = salesforce_credential.username;
var password = salesforce_credential.password;
conn._isLogin = false;
// here in your route handler
if (!conn._isLogin) {
conn.login(username, password, function(err, userInfo) {
if (err) {
return console.error(err);
}
conn._isLogin = true;
doQuery(conn);
});
} else {
doQuery(conn);
}
function doQuery (conn) {
conn.query("SELECT id FROM Sourcing__c WHERE id = 'req.session.ref'",function(err, result) {
if (err) {
return console.error(err);
}
if(result.records.length === 0){
req.session.ref = "";
}
var body = {
"Auth__c": req.user.id,
"Stus__c": "Pending - New Hire",
"Record": "012lvIAC",
"Sourcing__c": req.session.ref
};
conn.sobject("SFDC_Employee__c").create(body, function(err, ret) {
if (err || !ret.success) {
return console.error(err, ret);
}
console.log("Created record id : " + ret.id);
// ...
});
});
}

Getting data from rethinkdb database, manipulating said data, then updating the database with the manipulated docs

I am looking to do a get, run a function on the results which will do some manipulation by updating a field, and then put that doc back into the database. Really my issue is being able to chain together multiple DB calls. I have been struggling with this the past week or so. Any suggestions appreciated, thanks.
Here is what I have tried so far but I am receiving an error:
function geocode_cleanup(request, response, next) {
r.table('dealer_locations').filter(r.row('geodata').match('No geodata found.'))
.do(function(row) {
var geodata = opencage_geocoder.geocode(row.Address, function(error, response) {
if (error) {
console.log("Error.");
row.geodata = "No geodata found.";
row.active = true;
} else if (response.length == 0) {
console.log("Empty response.");
} else {
console.log("Success.");
console.log(response);
var latitude = response[0].latitude;
var longitude = response[0].longitude;
row.geodata = r.point(longitude, latitude);
row.active = true;
}
});
return r.table('dealer_locations').update({
geodata: geodata
})
}).run(conn, function(error, cursor) {
response.setHeader("Content-Type", "application/json");
if (error) {
handleError(response, error);
} else {
cursor.toArray(function(error, results) {
if (error) {
handleError(response, error);
} else {
response.send(results);
};
});
}
next();
})
};
Also, this gives the desired results returned in the response, but the second db action never happens because I am still inside of the same db connection I think:
function geocode_cleanup(request, response, next) {
var conn = request._rdbConn;
r.table('dealer_locations').filter({geodata: "No geodata found."}).run(conn, function(error, cursor) {
if (error) {
handleError(response, error);
} else {
cursor.toArray(function(error, results) {
if (error) {
handleError(response, error);
} else {
var i = 1;
async.forEach(results, function(item, callback) {
var address = (item.Address + " " + item.City).toString();
opencage_geocoder.geocode(address, function(err, res) {
if (err) {
console.log(i);
console.log("Error.");
item.id = i;
item.geodata = "No geodata found.";
item.active = true;
i++;
callback();
} else if (res.length == 0) {
console.log(i);
console.log("Empty response.");
i++;
callback();
} else {
console.log(i);
console.log("Success.");
console.log(res);
var latitude = res[0].latitude;
console.log(i + " " + latitude);
var longitude = res[0].longitude;
console.log(i + " " + longitude);
item.id = i;
item.geodata = r.point(longitude, latitude);
item.active = true;
i++;
callback();
}
});
}, function() {
r.table('dealer_locations').insert(results, {
conflict: "replace"
}).run(request._rdbConn, function(error, results) {
if (error) {
console.log("Data not inserted!");
} else {
console.log("Data inserted!");
}
});
console.log("Done!");
response.send(results);
});
}
})
}
})
}
Here's a possible solution which uses promises to organize the code a little bit.
// Guarantee support for promises and provide the `promisify` function
var Promise = require('bluebird');
// Promisify the geocode function to make it easier to use
var geocode = Promise.promisify(opencage_geocoder.geocode);
function geocode_cleanup(request, response, next) {
var conn = request._rdbConn;
r
.table('dealer_locations')
.filter(r.row('geodata').match('No geodata found.'))
.coerceTo('array')
.run(conn)
.then(function(rows) {
// This promise will be resolve when all rows have been geocoded and updated
// We map the rows into an array of promises, which is what Promise.all takes
return Promise.all(rows.map(function (row) {
return geocode(row.Address)
.then(function (response) {
console.log("Success.");
var latitude = response[0].latitude;
var longitude = response[0].longitude;
row.geodata = r.point(longitude, latitude);
row.active = true;
// Return the row
return row;
});
});
}));
})
.then(function (rows) {
// Now that all `dealer_locations` have been updated, re-query them
return r
.table('dealer_locations')
.insert(rows, {conflict: "update", return_changes: true})
.run(conn);
})
.then(function (results) {
// Send the response;
response.setHeader("Content-Type", "application/json");
response.send(results);
return;
})
.catch(function (err) {
return handleError(null, error);
})
};
Some problems I noticed with your code:
1. Use of do
r.table('dealer_locations').filter(r.row('geodata').match('No geodata found.'))
.do(function(row) {
var geodata = opencage_geocoder.geocode ...
})
In this code snippet, you use a JS function inside of do. You can't do that. Remember that what happens inside of do happens in the RethinkDB server (not in your Node.js server). Your RethinkDB server has no knowledge of your opencage_geocoder function and so this woudn't work.
Whatever do returns must be a valid ReQL query or ReQL expression. You can't execute arbitrary JavaScript inside of it.
If you want to run JavaScript with your query results, you have to .run the query and then do whatever you want to do inside the callback or .then function. At that point, that code will get executed in JavaScript and not in your RethinkDB server.
2. Use of update
return r.table('dealer_locations').update({
geodata: geodata
})
The update method can only update a single document. You can't pass it an array of documents. In this scenario you what have needed to do r.table().get().update() in order for this to work, because you have to be referencing a single document when you update something.
If you have an array of documents that you want to update, you can use the forEach method.
r.table('hello')
.merge({
'new_property': 'hello!'
})
.forEach(function (row) {
// Insert that property into the document
return r.table('hello').get(row.id).update(row);
})
You can also do this (which you are already doing):
r.table('hello')
.merge({
'new_property': 'hello!'
})
.do(function (rows) {
// Insert that property into the document
return r.table('hello')
.insert(rows, {conflict: "update", return_changes: true});
})
OK, I have a suggestion. This queries for the documents you're interested in, modifies them (on your app server, not in the db) and then reinserts them using the nifty conflict: 'update' option. It also uses promises because I think that's a bit cleaner.
function geocode_cleanup(request, response, next) {
r.table('dealer_locations')
.filter(r.row('geodata').match('No geodata found.'))
.run(conn).then(function(cursor) {
var to_update = [];
return cursor.toArray().then(function getGeocodes(rows) {
return rows.map(function getGeocode(row) {
row.geodata = opencage_geocoder.geocode(row.Address, function(error, response) {
if (error) {
console.log("Error.");
row.geodata = "No geodata found.";
row.active = true;
} else if (response.length == 0) {
console.log("Empty response.");
} else {
console.log("Success.");
console.log(response);
var latitude = response[0].latitude;
var longitude = response[0].longitude;
row.geodata = r.point(longitude, latitude);
row.active = true;
}
});
return row;
});
});
}).then(function doneGeocoding(modified_rows){
return r.table('dealer_locations')
.insert(modified_rows, {conflict: "update", return_changes: true})('changes')
.coerceTo('array')
.run(conn);
}).then(function finishResponse(changes){
response.setHeader("Content-Type", "application/json");
response.send(results);
next();
}).catch(function(err) {
// handle errors here
});
};
Caveat emptor, I haven't run this, so there may be syntax errors and things

Categories