Save deletedAt Attribute of an Object in Sequelize with Paranoid Setting - javascript

I want to do an update to a Sequelize object (with paranoid true) and immediately delete it. Below code works, but it invokes 2 update queries and a transaction block.
const object = await Car.findOne({});
object.wheels = 3;
object.canRun = false;
await sequelize.transaction({ isolationLevel: Transaction.ISOLATION_LEVELS.REPEATABLE_READ }, async (transaction) => {
await object.save({ transaction });
await object.destroy({ transaction });
});
I've tried to make it one single update, but below code doesn't work.
const object = await Car.findOne({});
object.wheels = 3;
object.canRun = false;
object.deletedAt = new Date();
await object.save({ paranoid: false });
Is there any way to do it with only 1 query?

Related

Can't update firebase collection field - Expected type 'ya', but it was: a custom Ia object

I am trying to make barbershop web app where costumer can see list of free appointments and when they reserve free appointment I want to delete that field from firebase.
I have a collection which represents one barber.
This is how it looks in firebase.
As you see radno_vrijeme is object or map in firebase which contains 6 arrays, and in each array there is list of free working hours.
In my function I am able to do everthing except last line where I need to update firebase collection.
const finishReservation = async () => {
try {
const freeTimeRef = collection(db, `${barber}`);
const q = query(freeTimeRef);
const querySnap = await getDoc(q);
querySnap.forEach(async (doc) => {
const radnoVrijeme = doc.data().radno_vrijeme;
// Find the index of the hour you want to delete
const index = radnoVrijeme["Mon"].indexOf(hour);
// Remove the hour from the array
radnoVrijeme["Mon"].splice(index, 1);
// Update the document in the collection
console.log(radnoVrijeme);
const radnoVrijemeMap = new Map(Object.entries(radnoVrijeme));
await freeTimeRef.update({ radno_vrijeme: radnoVrijemeMap });
});
} catch (error) {
console.log(error);
}
};
I tried to pass it as JSON stringified object, but it didn't work. I always get this error :
"FirebaseError: Expected type 'ya', but it was: a custom Ia object"
When you are trying to fetch multiple documents using a collection reference or query, then you must use getDocs():
const finishReservation = async () => {
try {
const freeTimeRef = collection(db, `${barber}`);
const q = query(freeTimeRef);
const querySnap = await getDocs(q);
const updates = [];
querySnap.forEach((d) => {
const radnoVrijeme = d.data().radno_vrijeme;
const index = radnoVrijeme["Mon"].indexOf(hour);
radnoVrijeme["Mon"].splice(index, 1);
const radnoVrijemeMap = new Map(Object.entries(radnoVrijeme));
updates.push(updateDoc(d.ref, { radno_vrijeme: radnoVrijemeMap }))
});
await Promise.all(updates);
console.log("Documents updated")
} catch (error) {
console.log(error);
}
};
getDoc() is used to fetch a single document using a document reference.

Loop to create or update nodes in Neo4j

I am new to Neo4j so I quite stuck with looping through some values.
I have a list of skill to skill strings
let data = [
'big_data, business_intelligence',
'big_data, data_collection',
'big_data, economic_growth',
'big_data, economy'
]
And I want to create or update the relation between left side with right side
for (let item of data) {
CreateSkillToSkillRelation(item);
}
const CreateSkillToSkillRelation = async (relation) => {
let mainSkill = relation.split(",")[0];
let secundarySkill = relation.split(",")[1];
try {
// Check if the relationship exists
let { records } = await session.run(
"MATCH(main:SKILL {name:$mainSkill}) -[relation:SKILL_TO_SKILL]-> (secundary:SKILL {name:$secundarySkill}) RETURN relation",
{ mainSkill, secundarySkill }
);
let count =
records[0]?._fields[0].properties.count > 0
? records[0]._fields[0].properties.count + 1
: 1;
// If count is greater then 1 then lets update the counter
if (count > 1) {
await session.run(
"MATCH(main:SKILL {name:$mainSkill}) -[relation:SKILL_TO_SKILL]-> (secundary:SKILL {name:$secundarySkill}) SET relation.count = $count RETURN main, secundary",
{
mainSkill,
secundarySkill,
count,
}
);
}
// Otherwise the skill relation is not created so lets create one
else {
await session.run(
"CREATE(main:SKILL {name:$mainSkill}) -[:SKILL_TO_SKILL {count:$count}]-> (secundary:SKILL {name:$secundarySkill}) RETURN main, secundary",
{
mainSkill,
secundarySkill,
count,
}
);
}
await session.close();
} catch (error) {
console.log(error);
}
};
But every time when I run this I get the following error Neo4jError: Queries cannot be run directly on a session with an open transaction; either run from within the transaction or use a different session.
Any idea how can I solve this?
for (let item of data) {
CreateSkillToSkillRelation(item);
}
Is not awaiting the promises you create and so you are basically trying to run all of these promises concurrently against a single session which only supports a single concurrent transaction.
You should create a session in each call of CreateSkillToSkillRelation or await each call to it using a single session.
Though note you close the session at the end of CreateSkillToSkillRelation but only on success, might I suggest moving await session.close(); into a finally block.
The answer of the colleague #just_another_dotnet_dev is absolutely correct: you run asynchronous functions in a loop, and close the session in one of them.
The Cipher language is very rich, and you can use it to do everything that you tried to do with a loop in Javascript. Something like this, using UNWIND and MERGE:
const CreateSkillToSkillRelations = async (data) => {
const session = driver.session();
try {
let { records } = await session.run(
`WITH split(row, ',') as rels
WITH trim(rels[0]) as mainSkill,
trim(rels[1]) as secundarySkill
MERGE (main:SKILL {name: mainSkill})
-[relation:SKILL_TO_SKILL]->
(secundary:SKILL {name: secundarySkill})
ON CREATE SET relation.count = 1
ON MATCH SET relation.count = relation.count + 1
RETURN main, relation, secundary`,
{ data }
);
} catch (error) {
console.log(error);
} finally {
await session.close()
}
};

How to correctly use 'async, await and promises' in nodejs, while allocating values to a variable returned from a time-consuming function?

Problem Statement:
Our aim is to allocate values in the array ytQueryAppJs, which are returned from a time consuming function httpsYtGetFunc().
The values in ytQueryAppJs needs to be used many times in further part of the code, hence it needs to be done 'filled', before the code proceeds further.
There are many other arrays like ytQueryAppJs, namely one of them is ytCoverAppJs, that needs to be allocated the value, the same way as ytQueryAppJs.
The values in ytCoverAppJs further require the use of values from ytQueryAppJs. So a solution with clean code would be highly appreciated.
(I am an absolute beginner. I have never used async, await or promises and I'm unaware of the correct way to use it. Please guide.)
Flow (to focus on):
The user submits a queryValue in index.html.
An array ytQueryAppJs is logged in console, based on the query.
Expected Log in Console (similar to):
Current Log in Console:
Flow (originally required by the project):
User submits query in index.html.
The values of arrays, ytQueryAppJs, ytCoverAppJs, ytCoverUniqueAppJs, ytLiveAppJs, ytLiveUniqueAppJs gets logged in the console, based on the query.
Code to focus on, from 'app.js':
// https://stackoverflow.com/a/14930567/14597561
function compareAndRemove(removeFromThis, compareToThis) {
return (removeFromThis = removeFromThis.filter(val => !compareToThis.includes(val)));
}
// Declaring variables for the function 'httpsYtGetFunc'
let apiKey = "";
let urlOfYtGetFunc = "";
let resultOfYtGetFunc = "";
let extractedResultOfYtGetFunc = [];
// This function GETs data, parses it, pushes required values in an array.
async function httpsYtGetFunc(queryOfYtGetFunc) {
apiKey = "AI...MI"
urlOfYtGetFunc = "https://www.googleapis.com/youtube/v3/search?key=" + apiKey + "&part=snippet&q=" + queryOfYtGetFunc + "&maxResults=4&order=relevance&type=video";
let promise = new Promise((resolve, reject) => {
// GETting data and storing it in chunks.
https.get(urlOfYtGetFunc, (response) => {
const chunks = []
response.on('data', (d) => {
chunks.push(d)
})
// Parsing the chunks
response.on('end', () => {
resultOfYtGetFunc = JSON.parse((Buffer.concat(chunks).toString()))
// console.log(resultOfYtGetFunc)
// Extracting useful data, and allocating it.
for (i = 0; i < (resultOfYtGetFunc.items).length; i++) {
extractedResultOfYtGetFunc[i] = resultOfYtGetFunc.items[i].id.videoId;
// console.log(extractedResultOfYtGetFunc);
}
resolve(extractedResultOfYtGetFunc);
})
})
})
let result = await promise;
return result;
}
app.post("/", function(req, res) {
// Accessing the queryValue, user submitted in index.html. We're using body-parser package here.
query = req.body.queryValue;
// Fetching top results related to user's query and putting them in the array.
ytQueryAppJs = httpsYtGetFunc(query);
console.log("ytQueryAppJs:");
console.log(ytQueryAppJs);
});
Complete app.post method from app.js:
(For better understanding of the problem.)
app.post("/", function(req, res) {
// Accessing the queryValue user submitted in index.html.
query = req.body.queryValue;
// Fetcing top results related to user's query and putting them in the array.
ytQueryAppJs = httpsYtGetFunc(query);
console.log("ytQueryAppJs:");
console.log(ytQueryAppJs);
// Fetching 'cover' songs related to user's query and putting them in the array.
if (query.includes("cover") == true) {
ytCoverAppJs = httpsYtGetFunc(query);
console.log("ytCoverAppJs:");
console.log(ytCoverAppJs);
// Removing redundant values.
ytCoverUniqueAppJs = compareAndRemove(ytCoverAppJs, ytQueryAppJs);
console.log("ytCoverUniqueAppJs:");
console.log(ytCoverUniqueAppJs);
} else {
ytCoverAppJs = httpsYtGetFunc(query + " cover");
console.log("ytCoverAppJs:");
console.log(ytCoverAppJs);
// Removing redundant values.
ytCoverUniqueAppJs = compareAndRemove(ytCoverAppJs, ytQueryAppJs);
console.log("ytCoverUniqueAppJs:");
console.log(ytCoverUniqueAppJs);
}
// Fetching 'live performances' related to user's query and putting them in the array.
if (query.includes("live") == true) {
ytLiveAppJs = httpsYtGetFunc(query);
console.log("ytLiveAppJs:");
console.log(ytLiveAppJs);
// Removing redundant values.
ytLiveUniqueAppJs = compareAndRemove(ytLiveAppJs, ytQueryAppJs.concat(ytCoverUniqueAppJs));
console.log("ytLiveUniqueAppJs:");
console.log(ytLiveUniqueAppJs);
} else {
ytLiveAppJs = httpsYtGetFunc(query + " live");
console.log("ytLiveAppJs:");
console.log(ytLiveAppJs);
// Removing redundant values.
ytLiveUniqueAppJs = compareAndRemove(ytLiveAppJs, ytQueryAppJs.concat(ytCoverUniqueAppJs));
console.log("ytLiveUniqueAppJs:");
console.log(ytLiveUniqueAppJs);
}
// Emptying all the arrays.
ytQueryAppJs.length = 0;
ytCoverAppJs.length = 0;
ytCoverUniqueAppJs.length = 0;
ytLiveAppJs.length = 0;
ytLiveUniqueAppJs.length = 0;
});
Unfortunately you can use the async/await on http module when making requests. You can install and use axios module . In your case it will be something like this
const axios = require('axios');
// Declaring variables for the function 'httpsYtGetFunc'
let apiKey = "";
let urlOfYtGetFunc = "";
let resultOfYtGetFunc = "";
let extractedResultOfYtGetFunc = [];
// This function GETs data, parses it, pushes required values in an array.
async function httpsYtGetFunc(queryOfYtGetFunc) {
apiKey = "AI...MI"
urlOfYtGetFunc = "https://www.googleapis.com/youtube/v3/search?key=" + apiKey + "&part=snippet&q=" + queryOfYtGetFunc + "&maxResults=4&order=relevance&type=video";
const promise = axios.get(urlOfYtGetFunc).then(data => {
//do your data manipulations here
})
.catch(err => {
//decide what happens on error
})
Or async await
const data = await axios.get(urlOfYtGetFunc);
//Your data variable will become what the api has returned
If you still want to catch errors on async await you can use try catch
try{
const data = await axios.get(urlOfYtGetFunc);
}catch(err){
//In case of error do something
}
I have just looked at the code I think the issue is how you are handling the async code in the request handler. You are not awaiting the result of the function call to httpsYtGetFunc in the body so when it returns before the promise is finished which is why you get the Promise {Pending}.
Another issue is that the array is not extractedResultOfYtGetFunc is not initialised and you may access indexes that don't exist. The method to add an item to the array is push.
To fix this you need to restructure your code slightly. A possible solution is something like this,
// Declaring variables for the function 'httpsYtGetFunc'
let apiKey = "";
let urlOfYtGetFunc = "";
let resultOfYtGetFunc = "";
let extractedResultOfYtGetFunc = [];
// This function GETs data, parses it, pushes required values in an array.
function httpsYtGetFunc(queryOfYtGetFunc) {
apiKey = "AI...MI";
urlOfYtGetFunc =
"https://www.googleapis.com/youtube/v3/search?key=" +
apiKey +
"&part=snippet&q=" +
queryOfYtGetFunc +
"&maxResults=4&order=relevance&type=video";
return new Promise((resolve, reject) => {
// GETting data and storing it in chunks.
https.get(urlOfYtGetFunc, (response) => {
const chunks = [];
response.on("data", (d) => {
chunks.push(d);
});
// Parsing the chunks
response.on("end", () => {
// Initialising the array
extractedResultOfYtGetFunc = []
resultOfYtGetFunc = JSON.parse(Buffer.concat(chunks).toString());
// console.log(resultOfYtGetFunc)
// Extracting useful data, and allocating it.
for (i = 0; i < resultOfYtGetFunc.items.length; i++) {
// Adding the element to the array
extractedResultOfYtGetFunc.push(resultOfYtGetFunc.items[i].id.videoId);
// console.log(extractedResultOfYtGetFunc);
}
resolve(extractedResultOfYtGetFunc);
});
});
});
}
app.post("/", async function (req, res) {
query = req.body.queryValue;
// Fetching top results related to user's query and putting them in the array.
ytQueryAppJs = await httpsYtGetFunc(query);
console.log("ytQueryAppJs:");
console.log(ytQueryAppJs);
});
Another option would be to use axios,
The code for this would just be,
app.post("/", async function (req, res) {
query = req.body.queryValue;
// Fetching top results related to user's query and putting them in the array.
try{
ytQueryAppJs = await axios.get(url); // replace with your URL
console.log("ytQueryAppJs:");
console.log(ytQueryAppJs);
} catch(e) {
console.log(e);
}
});
Using Axios would be a quicker way as you don't need to write promise wrappers around everything, which is required as the node HTTP(S) libraries don't support promises out of the box.

Firebase async scheduler issue

I'm new to both firebase and async javascript. I'm trying to create a scheduled task in firebase to fetch a bunch of rss URLs from a collection, parse it and store it inside another collection but I'm getting Error: 4 DEADLINE_EXCEEDED: Deadline exceeded at Object.callErrorFromStatus upon its execution.
const refreshRSS = functions.pubsub.schedule('every 30 mins').onRun(async context => {
let newRSS = addRSS();
return await newPodcasts;
});
addRSS = async () => {
const newRSSFeed = new Array();
let rssURLs = await db.collection('rssURLs').get();
rssURLs.forEach(async rssURLsObject=>{
rss = rssURLsObject.data();
let rssData = await parser.parseURL(rss.url);
newRSSFeed.push(db.collection('rss').doc(encodeURIComponent(rss.url))
.set(podcast));
})
return newRSSFeeds;
}
I also tried returning Promise.all(newRSS) inside refreshRSS but it throws another error stating: is not iterable.
I'm not sure what exactly is firebase expectingas return parameter.
When you have to resolve promise in loop, try to use map. This will work:
const refreshRSS = functions.pubsub.schedule('every 30 mins').onRun(async context => {
let newRSS = await addRSS();
return await newPodcasts;
});
addRSS = async () => {
const newRSSFeed = new Array();
let rssURLs = await db.collection('rssURLs').get();
let promise = rssURLs.map(async rssURLsObject=>{
rss = rssURLsObject.data();
let rssData = await parser.parseURL(rss.url);
let con = await db.collection('rss').doc(encodeURIComponent(rss.url)
newRSSFeed.push(con)
.set(podcast));
})
await Promise.all(promise)
return newRSSFeeds;
}

Updating a MongoDB document from two different clients using nodejs native driver

I have a collection of users which I'd like to update regularly using several different APIs (where each has its own rate limits etc).
So I have a few cron jobs with the following similar structure:
const cursor_i = db.collection('users').find();
while(await cursor_i.hasNext()){
let user = await cursor_i.next();
user.field_1 = 'Some New Stuff';
const write_result = await db.collection('newusers').save(user);
if(!write_result.result.ok){
console.log(write_result);
}
}
The problem is when a document is being updated at the same time, by more than one updater, only the last save() call would matter.
To clarify, consider the following code:
const cursor_1 = db.collection('users').find();
const cursor_2 = db.collection('users').find();
let user_cursor_1 = await cursor_1.next(); // user_cursor_1 has the first user in the collection
let user_cursor_2 = await cursor_2.next(); // user_cursor_2 has the first user in the collection
user_cursor_1.new_field_1 = 'ok';
const write_result = await db.collection('users').save(user_cursor_1);
if(!write_result.result.ok){
console.log(write_result);
}
// first user in collection now has a new field named new_field_1 with the value 'ok'
user_cursor_2.new_field_2 = 'ok';
const write_result_2 = await db.collection('newusers').save(user_cursor_2);
if(!write_result_2.result.ok){
console.log(write_result);
}
// first user in collection now has a new field named new_field_2 with the value 'ok' but DOES NOT have new_field_1 anymore
And so, the first user in the collection has been updated twice, but eventually will only have the effect of the second update.
I can think of a few ways to avoid it by implementing locks myself, but I'd guess MongoDB must have something to handle these cases.
you should find users after updating the first cursor, like this :
const cursor_1 = db.collection("users").find();
let user_cursor_1 = await cursor_1.next(); // user_cursor_1 has the first user in the collection
user_cursor_1.new_field_1 = "ok";
const write_result = await db.collection("users").save(user_cursor_1);
if (!write_result.result.ok) {
console.log(write_result);
}
const cursor_2 = db.collection("users").find();
let user_cursor_2 = await cursor_2.next(); // user_cursor_2 has the first user in the collection
user_cursor_2.new_field_2 = "ok";
const write_result_2 = await db.collection("newusers").save(user_cursor_2);
if (!write_result_2.result.ok) {
console.log(write_result);
}

Categories