MongoError: FiledPath field names may notstart with '$' - javascript

I'm following the udemy cours by Brad Schiff- Learn JavaScript Full-Stack from Scratch, and on the section 7-7. Live Search Feature, I'm having problems with searching for posts. the problem happened after installation of axios.
Tried with the exact code from course but still se same error: MongoError: FiledPath field names may notstart with '$'
The code with the search feature:
import axios from "axios";
import DOMPurify from "dompurify";
export default class search {
constructor() {
this.injectHTML();
this.headerSearchIcon = document.querySelector(".header-search-icon");
this.overlay = document.querySelector(".search-overlay");
this.closeIcon = document.querySelector(".close-live-search");
this.inputField = document.querySelector("#live-search-field");
this.resultsArea = document.querySelector(".live-search-results");
this.loaderIcon = document.querySelector(".circle-loader");
this.typingWaitTimer;
this.previousValue = "";
this.events();
events() {
this.inputField.addEventListener("keyup", () => this.keyPressHandler());
this.closeIcon.addEventListener("click", () => this.closeOverlay());
this.headerSearchIcon.addEventListener("click", (e) => {
e.preventDefault();
this.openOverlay();
});
}
keyPressHandler() {
let value = this.inputField.value;
if (value == "") {
clearTimeout(this.typingWaitTimer);
this.hideLoaderIcon();
this.hideResultsArea();
}
if (value != "" && value != this.previousValue) {
clearTimeout(this.typingWaitTimer);
this.showLoaderIcon();
this.hideResultsArea();
this.typingWaitTimer = setTimeout(() => this.sendRequest(), 750);
}
this.previousValue = value;
}
sendRequest() {
axios
.post("/search", { searchTerm: this.inputField.value })
.then((response) => {
console.log(response.data);
this.renderResultsHTML(response.data);
})
.catch(() => {
alert("Hello, the request failed.");
});
}
}
The other code in Post.js file
Post.reusablePostQuery = function (uniqueOperations, visitorId) {
return new Promise(async function (resolve, reject) {
let aggOperations = uniqueOperations.concat([
{
$lookup: {
from: "users",
localField: "author",
foreignField: "_id",
as: "authorDocument",
},
},
{
$project: {
title: 1,
body: 1,
createdDate: 1,
authorId: "$author",
author: { $arrayElemAt: ["$authorDocument", 0] },
},
},
]);
let posts = await postsCollection.aggregate(aggOperations).toArray();
// clean up author property in each post object
posts = posts.map(function (post) {
post.isVisitorOwner = post.authorId.equals(visitorId);
post.authorId = undefined;
post.author = {
username: post.author.username,
avatar: new User(post.author, true).avatar,
};
return post;
});
resolve(posts);
});
};
Post.findSingleById = function (id, visitorId) {
return new Promise(async function (resolve, reject) {
if (typeof id != "string" || !ObjectID.isValid(id)) {
reject();
return;
}
let posts = await Post.reusablePostQuery(
[{ $match: { _id: new ObjectID(id) } }],
visitorId
);
if (posts.length) {
console.log(posts[0]);
resolve(posts[0]);
} else {
reject();
}
});
};
Post.findByAuthorId = function (authorId) {
return Post.reusablePostQuery([
{ $match: { author: authorId } },
{ $sort: { createdDate: -1 } },
]);
};
Post.search = function (searchTerm) {
return new Promise(async (resolve, reject) => {
if (typeof searchTerm == "string") {
//this. added
let posts = await Post.reusablePostQuery([
{ $match: { $text: { $search: searchTerm } } },
{ $sort: { score: { $meta: "textScore" } } },
]);
resolve(posts);
} else {
reject();
}
});
};
the code in postController.js file
exports.search = function (req, res) {
Post.search(req.body.searchTerm)
.then((posts) => {
res.json(posts);
})
.catch(() => {
res.json([]);
});
};

I was also facing this issue for days, and after looking out for different answers and quite some documentations I have figured out that you need to do a slight change in order to make this live search work.
the code for search function in Post.js file should be this
Post.search = function (searchTerm) {
return new Promise(async (resolve, reject) => {
if (typeof searchTerm == "string") {
// sort by search score (or relevancy to search string)
let posts = await Post.reusablePostQuery([
{ $match: { $text: { $search: searchTerm } } },
{ $project: { _id: true, "score": false } },
{ $sort: { score: { $meta: "textScore" } } },
]);
resolve(posts);
} else {
reject();
}
});
};
Try this and It will work fine..

Related

How to speed up Sequelize findAndCountAll?

I have pagination made with Sequelize
router.js
router.post('/getall', async (req, res) => {
try {
const { q, page, limit, order_by, order_direction } = req.query;
const { candidate, position, filters } = req.body
let include = [
{
model: SortedLvl,
where: {
lvl: filters.jobLvl.name,
months: { [Op.gte]: filters.jobMinExp.value },
},
},
{
model: SortedLastJob,
where: { jobposition: filters.jobType.name }
},
{
model: SortedSkills,
}
]
let search = {};
let order = [];
let filterCandidate = {}
if (candidate) {
if (candidate != undefined) {
t = candidate.split(/[ ,]+/)
let arr = new Array()
// console.log('t', t);
t.map((el, index) => {
console.log('el', el);
if (typeof el == 'number') {
arr.push({ first_name: { [Op.iLike]: `%` + `${el}` + `%` } }, { last_name: { [Op.iLike]: `%` + `${el}` + `%` } });
} else {
arr.push({ first_name: { [Op.iLike]: `%` + `%${el}%` + `%` } }, { last_name: { [Op.iLike]: `%` + `%${el}%` + `%` } });
}
});
filterCandidate = {
[Op.or]: arr
};
}
}
let filterPosition = {}
if (position) {
if (position != undefined) {
filterPosition = { position: { [Op.iLike]: `%${position}%` } }
}
}
if (filterCandidate.length > 0 || filterPosition.length > 0) {
search = { where: { ...(filterCandidate || []), ...(filterPosition || []) } }
}
console.log('search', search);
console.log('candidate', filterCandidate);
console.log('position', filterPosition);
if (order_by && order_direction) {
order.push([order_by, order_direction]);
}
const transform = (records) => {
return records.map(record => {
return {
id: record.id,
name: record.name,
date: moment(record.createdAt).format('D-M-Y H:mm A')
}
});
}
const products = await paginate(Candidate, page, limit, search, order, include);
return res.json({
success: true,
// message: '',
data: products
})
} catch (error) {
console.log('Failed to fetch products', error);
return res.status(500).send({
success: false,
message: 'Failed to fetch products'
})
}
});
paginate.js
const paginate = async (model, pageSize, pageLimit, search = {}, order = [], include, transform, attributes, settings) => {
try {
const limit = parseInt(pageLimit, 10) || 10;
const page = parseInt(pageSize, 10) || 1;
// create an options object
let options = {
offset: getOffset(page, limit),
limit: limit,
distinct: true,
include: include,
};
// check if the search object is empty
if (Object.keys(search).length) {
options = { ...options, ...search };
}
if (attributes && attributes.length) {
options['attributes'] = attributes;
}
if (order && order.length) {
options['order'] = order;
}
let data = await model.findAndCountAll(options);
if (transform && typeof transform === 'function') {
data = transform(data.rows);
}
return {
previousPage: getPreviousPage(page),
currentPage: page,
nextPage: getNextPage(page, limit, count),
total: count,
limit: limit,
data: data.rows
}
} catch (error) {
console.log(error);
}
}
const getOffset = (page, limit) => {
return (page * limit) - limit;
}
const getNextPage = (page, limit, total) => {
if ((total / limit) > page) {
return page + 1;
}
return null
}
const getPreviousPage = (page) => {
if (page <= 1) {
return null
}
return page - 1;
}
module.exports = paginate;
First problem - on queries with included models sometimes it response with wrong count
and also it is so slow
Please, help me with solving this issue
Thank you
I tried use separate: true, required: true and etc - I just get empty arrays on included
In your sequelize request there are lots of json query. Mysql cant create directly index your json fields. You can create a referance your query property like below and after that you can add a index for reference column.
And change your referance column as created reference. Also you can create mysql index another requests columns.
ALTER TABLE [table_name] ADD COLUMN email VARCHAR(255)
GENERATED ALWAYS as (properties->>"$.[jsonObjectName].[jsonProperty]");
ALTER TABLE [table_name] ADD INDEX email ([jsonProperty]) USING BTREE;

Rxdb infinitely pulling in replicateRxCollection

I'm working with rxdb and I have pull and push handlers for the backend I have used supabase
I have setup the code for replication as follows:
replication.ts
import { RxDatabase } from "rxdb";
import { RxReplicationPullStreamItem } from "rxdb/dist/types/types";
import { replicateRxCollection } from "rxdb/plugins/replication";
import { Subject } from "rxjs";
import { supabaseClient, SUPABASE_URL } from "src/config/supabase";
import { DbTables } from "src/constants/db";
import {
blockPullHandler,
blockPushHandler,
} from "./repilicationhandlers/block";
import { CheckpointType, RxBlockDocument, RxBlocksCollections } from "./types";
export async function startReplication(
database: RxDatabase<RxBlocksCollections>
) {
const pullStream$ = new Subject<
RxReplicationPullStreamItem<RxBlockDocument, CheckpointType>
>();
supabaseClient
.from(DbTables.Block)
.on("*", (payload) => {
console.log("Change received!", payload);
const doc = payload.new;
pullStream$.next({
checkpoint: {
id: doc.id,
updated: doc.updated,
},
documents: [doc] as any,
});
})
.subscribe((status: string) => {
console.log("STATUS changed");
console.dir(status);
if (status === "SUBSCRIBED") {
pullStream$.next("RESYNC");
}
});
const replicationState = await replicateRxCollection({
collection: database.blocks,
replicationIdentifier: "supabase-replication-to-" + SUPABASE_URL,
deletedField: "archived",
pull: {
handler: blockPullHandler as any,
stream$: pullStream$.asObservable(),
batchSize: 10,
},
push: {
batchSize: 1,
handler: blockPushHandler as any,
},
});
replicationState.error$.subscribe((err) => {
console.error("## replicationState.error$:");
console.log(err);
});
return replicationState;
}
blockPullHandler:
export const blockPullHandler = async (
lastCheckpoint: any,
batchSize: number
) => {
const minTimestamp = lastCheckpoint ? lastCheckpoint.updated : 0;
console.log("Pulling data", batchSize, lastCheckpoint);
const { data, error } = await supabaseClient
.from(DbTables.Block)
.select()
.gt("updated", minTimestamp)
.order("updated", { ascending: true })
.limit(batchSize);
if (error) {
console.log(error);
throw error;
}
const docs: Array<Block> = data;
return {
documents: docs,
hasMoreDocuments: false,
checkpoint:
docs.length === 0
? lastCheckpoint
: {
id: lastOfArray(docs).id,
updated: lastOfArray(docs).updated,
},
};
};
blockPushHandler:
export const blockPushHandler = async (
rows: RxReplicationWriteToMasterRow<RxBlockDocumentType>[]
) => {
if (rows.length !== 1) {
throw new Error("# pushHandler(): too many push documents");
}
const row = rows[0];
const oldDoc: any = row.assumedMasterState;
const doc: Block = row.newDocumentState;
console.log(row, oldDoc, doc);
// insert
if (!row.assumedMasterState) {
const { error } = await supabaseClient.from(DbTables.Block).insert([doc]);
console.log("Error 1", error);
if (error) {
// we have an insert conflict
const conflictDocRes: any = await supabaseClient
.from(DbTables.Block)
.select()
.eq("id", doc.id)
.limit(1);
return [conflictDocRes.data[0]];
} else {
return [];
}
}
// update
console.log("pushHandler(): is update");
const { data, error } = await supabaseClient
.from(DbTables.Block)
.update(doc)
.match({
id: doc.id,
replicationRevision: oldDoc.replicationRevision,
});
console.log("Error 2", error);
if (error) {
console.log("pushHandler(): error:");
console.log(error);
console.log(data);
throw error;
}
console.log("update response:");
console.log(data);
if (data.length === 0) {
// we have an updated conflict
const conflictDocRes: any = await supabaseClient
.from(DbTables.Block)
.select()
.eq("id", doc.id)
.limit(1);
return [conflictDocRes.data[0]];
}
return [];
};
But the issue is when I start the application and the pull handler is called correctly but it doesn't stop calling the pull handler and it sends continuous request one after another even after it has fetched the documents even when I set hasMoreDocuments to false It keeps sending requests and running the replicator. Is there something wrong with my configuration?
database.ts:
export const createDatabase = async () => {
const database = await createRxDatabase({
name: "sundaedb",
storage: getRxStorageDexie(),
});
await database.addCollections({
blocks: {
schema: blockSchema as any,
conflictHandler: conflictHandler as any,
},
documents: {
schema: documentSchema as any,
conflictHandler: conflictHandler as any,
},
});
database.blocks.preInsert((docData) => {
docData.replicationRevision = createRevision(
database.hashFunction,
docData as any
);
return docData;
}, false);
database.blocks.preRemove((docData) => {
console.log(" PRE REMOVE !!");
console.log(JSON.stringify(docData, null, 4));
const oldRevHeight = parseRevision(docData.replicationRevision).height;
docData.replicationRevision =
oldRevHeight + 1 + "-" + database.hashFunction(JSON.stringify(docData));
console.log(JSON.stringify(docData, null, 4));
return docData;
}, false);
database.blocks.preSave((docData) => {
const oldRevHeight = parseRevision(docData.replicationRevision).height;
docData.replicationRevision =
oldRevHeight + 1 + "-" + database.hashFunction(JSON.stringify(docData));
return docData;
}, false);
return database;
};

How to fix the typeerror in react and typescript?

my code is like below,
itemDetails: async (parent, args, { dataSources: { someAPI } }) => {
const source = get(parent, 'source');
const id = get(parent, 'id');
if (id) {
let output;
if (source === 'source1') {
const data = await someAPI.getItem(id);
output = {
id: data.id,
name: data.name,
}
}
if (source === 'source2') {
const data = await someAPI.getItem(id);
output = {
id: data.id,
name: data.name,
}
}
if (output) {
return {
id: output.id,
name: output.name,
}
} else {
return {};
}
} else {
return {};
}
},
the above code works but if i return the output within each if condition like below code it throws error that
the type id? : any, name?: any cannot be assigned to id? undefined , name?: undefined type
itemDetails: async (parent, args, { dataSources: { someAPI } }) => {
const source = get(parent, 'source');
const id = get(parent, 'id');
if (id) {
if (source === 'source1') {
const data = await someAPI.getItem(id);
return { //returning here
id: data.id,
name: data.name,
}
}
if (source === 'source2') {
const data = await someAPI.getItem(id);
return { //returning here
id: data.id,
name: data.name,
}
}
} else { //no id
return {};
}
},
not sure what the problem is. could someone help me with this. thanks.

Mongoose Validation error while adding a record to array

I am getting error 'hospital_doctor_details validation failed.Path hospitalName,path is required' while adding a new Treatment details to existing hospitals record.
Since hospital record with hospital name used in the api is already created Mongoose should not throw this error.As i am relatively new to Mongodb,clueless on why this error is thrown.
Any help in identifying the root cause will be really appreciated.
var hospitalName = req.params.hospitalname;
var hospitalCity = req.params.hospitalcity;
var hospitalCountry = req.params.hospitalcountry;
var procedureName = req.body["procedureName"];
var doctorName = req.body["doctorName"];
var treatmentFound = false;
var doctorFound = false;
//create doctor promise
const doctorPromise = new Promise((resolve, reject) =>
counterSchema.getNext('Treatment.$.doctor.$.doctorId', collection, function (id) {
//code
));
//create department promise
const departmentPromise = new Promise((resolve, reject) =>
//code
));
//create procedure promise
const procedurePromise = new Promise((resolve, reject) =>
//code
));
//Create hospital check promise
const dbHospitalCheckPromise = new Promise((resolve, reject) =>
//code
});
//Create dbcheck promise
const dbTreatmentCheckPromise = new Promise((resolve, reject) =>
//code
})
Promise.all([dbHospitalCheckPromise, dbTreatmentCheckPromise, departmentPromise, procedurePromise, doctorPromise])
.then(([, searchResult , departmentID, procedureID, doctorID ]) => {
var procedureFound = searchResult.split('|')[0];
procedureFound = procedureFound == "true";//convert to boolean
var docFound = searchResult.split('|')[1];
docFound = docFound== "true";//convert to boolean
if (procedureFound == true && docFound == false && doctorName != null) {
hospitalModel.findOneAndUpdate({
"hospitalName": hospitalName, "hospitalContact.City": hospitalCity, "hospitalContact.country": hospitalCountry, "Treatment": {
$elemMatch: { "name": procedureName }
}},
{
"$push": {
"Treatment.$.doctor": {
"doctorId": doctorID,
"doctorName": req.body["doctorName"],
"profilepicdir": req.body["profilePicDirectory"],
"medinovitadoctorRating": parseInt(req.body["medinovitaDoctorRating"]),
"speciality": {
"specialityName": req.body["specialityName"]
}
}
}
},
{ returnOriginal: false, upsert: true }, function (err, doc) {
if (err) {
logger.error("Error while updating record : - " + err.message);
} else if (doc === null) {
logger.error("Error while updating record : - Unable to update database");
}
}).
then(function () {
hospitalSchema.save(function (error, data) {
if (error) {
console.log("Save failed")
logger.error("Error while inserting record : - " + error.message);
return res.json({ "Message": error.message });
}
else {
console.log("saved")
return res.json({ "Message": "Data got inserted successfully" });
}
});
})
.catch(function (err) {
return res.json({ "Message": err.message });
});
}
My schema looks like below,
hospitalID: 10001,
hospitalName: 'Renai MediCity',
__v: 0,
updated_at: 2017-08-18T17:34:53.784Z,
Treatment:
[ { name: 'Root Canal',
costUpperBound: 10000,
costLowerBound: 8000,
departmentId: 10001,
procedureid: 10001,
departmentName: 'Dental',
_id: 599725530c126c1efc43dc52,
doctor:[ {
profilepicdir:"smdir1",
doctorName:"Dr.vp2",
doctorId:10002,_id:5997255c0c126c1efc43dc57
}] },
],

Declaring multiple relayjs connections

I have to two GraphQLObjectType
const AType = new GraphQLObjectType({
name: 'A'
fields: () => ({
id: globalIdField('A', obj => obj._id),
Id: {
type: GraphQLID,
resolve: obj => obj._id
},
email: { type: GraphQLString },
name: { type: GraphQLString },
status: { type: GraphQLString },
description: { type: GraphQLString }
}),
interfaces: [nodeInterface]
});
and another Type BType, i am using AType inside the type of so t
const BType = new GraphQLObjectType({
name: 'BType',
fields: {
id: globalIdField('BType'),
ApplicantsDetails: {
type: AType,
resolve: obj => {
return obj.applicantsId;
}
},
respo_2: {
type: GraphQLString,
resolve: obj => "I have declared a ObjectType inside another"
}
},
interfaces: [nodeInterface]
});
and the main type from where i am returning a promise, so when i return the promise resolve(d), it should go to the BType and
const { connectionType: EmployerDashBoardConnectionType} =
connectionDefinitions({
name: 'EmployerDash',
nodeType: BType
});
above is the connection
EmployerDashBoardConnection: {
type: EmployerDashBoardConnectionType,
description: 'Employer DashBoard Details',
args: connectionArgsWithJobId,
resolve: (_, args, auth) => {
return new Promise((resolve, reject) => {
Dash(_, args, auth, function (err, d) {
if (err) {
reject(err);
} else {
resolve(d);
}
});
});
}
}
/* Introduce your new fields here */
}
the response from Dash() function call is
{
applicantsId:
{ _id: 5878548b51179817f48eb1f1,
email: '123#gmail.com',
password: '$2a$10$lDpfl7kL4i/8VPij8aypmeeiD1794g1afACUxca397LdlErMgWa.S',
__v: 0,
name: 'Alpaina',
status: 'Unemployed',
isJobSeeker: true,
to: 2017-01-13T04:16:11.755Z }
}
it only prints null
for using edges you need to pass an array to resolve function
resolve: (_, args, auth) => {
return new Promise((resolve, reject) => {
Dash(_, args, auth, function (err, d) {
if (err) {
reject(err);
} else {
// if d is object then
const a = [];
a.push(d);
resolve(connectionFromArray(a, args));
}
});
});
}
this code is to only solve your current problem
NOTE: while using relay connections you should always resolve a list [ ]
There are two problems in your code:
First problem:
The node type of EmployerDashBoardConnectionType is BType. So, the items returned in resolve() function of field EmployerDashBoardConnection should have the same properties - that's not the case in your code.
The structure of BType is:
{
id
ApplicantsDetails {
id,
Id,
email,
name,
status,
description,
}
respo_2,
}
whereas you are passing the following object, which totally does not match.
{
applicantsId: {
_id: 5878548b51179817f48eb1f1,
email: '123#gmail.com',
password: '$2a$10$lDpfl7kL4i/8VPij8aypmeeiD1794g1afACUxca397LdlErMgWa.S',
__v: 0,
name: 'Alpaina',
status: 'Unemployed',
isJobSeeker: true,
to: 2017-01-13T04:16:11.755Z
}
}
Second problem:
This is why you're getting null for edges. Your resolve() function:
resolve: (_, args, auth) => {
return new Promise((resolve, reject) => {
Dash(_, args, auth, function (err, d) {
if (err) {
reject(err);
} else {
resolve(d);
}
});
});
}
where you return nothing. You can use connectionFromArray from graphql-relay npm module:
resolve: (_, args, auth) => {
return new Promise((resolve, reject) => {
Dash(_, args, auth, function (err, d) {
if (err) {
reject(err);
} else {
resolve(connectionFromArray(d, args));
}
});
});
}
The value of d must be a list where each item should have top level properties id, ApplicantsDetails, respo_2.

Categories