I have this code in App.js
const getPlayers = async()=>{
const players = await API.getPlayers();
setPlayers(players)
}
getPlayers()
This code in my API.js file
const getPlayers = async () => {
return getJson(
fetch(SERVER_URL + 'users', { credentials: 'include'})
).then( json => {
return json.map((user) => {
return {
id: user.id,
name: user.name,
rank: user.rank
}
})
})
}
This code in my server.js file
app.get('/api/players',
(req, res) => {
riddleDao.getPlayers()
.then(async players => {
res.json(players)
})
.catch((err) => res.status(500).json(err));
});
and finally, this in my DataAccessObject.js file
exports.getPlayers = () => {
return new Promise((resolve, reject) => {
const sql = 'SELECT * FROM users';
db.all(sql, [], (err, rows) => {
if (err) { reject(err); return; }
else {
const players = rows.map(row => {
return {
id: row.id,
name: row.name,
rank: row.rank
}
})
resolve(players);
}
});
});
};
but i am getting this error:
I am expecting to get an array of object in my App.js when i call the getPlayer() function and the objects in the array should have id, name and rank of the players in my db table
I think you've got "users" in your fetch URL when it should be "players".
fetch(SERVER_URL + 'users', { credentials: 'include'})
should be
fetch(SERVER_URL + 'players', { credentials: 'include'})
your api endpoint differs from the url you are sending requests
app.get('/api/players',
you are listening to "players" but
fetch(SERVER_URL + 'users', { credentials: 'include'})
you are fetching "users"
Related
I have written this function to do update in dynamo table
const updateTask = async (req, res) => {
try {
const { existingTaskText,updatedTaskText } = req.body;
console.log(existingTaskText,updatedTaskText );
UPDATE({
TableName: "todos",
Key:{ task: existingTaskText},
UpdateExpression:"set task = :task",
ExpressionAttributeValues: {":task": updatedTaskText},
});
res.status(200).json({ data: "this is controller" });
} catch (error) {
res.status(400).json({ message: error.message });
}
};
this is calling UPDATE
const UPDATE = async (payload) => {
try {
console.log(payload);
const updateDoc = await dbClient
.update({
TableName: payload.TableName,
Key: payload.Key,
UpdateExpression: payload.UpdateExpression,
ExpressionAttributeNames:payload.ExpressionAttributeNames,
ReturnValues: "UPDATED_NEW",
})
.promise();
console.log(updateDoc);
} catch (error) {
console.log(error);
}
};
When I am testing this in postman, I am getting this error
ValidationException: Invalid UpdateExpression: An expression attribute value used in expression is not defined; attribute value: :task
this is payload log getting passed
{
TableName: 'todos',
Key: { task: 'see its done' },
UpdateExpression: 'set task = :task',
ExpressionAttributeValues: { ':task': 'edited' }
}
I made below common functions for the update, get, and create a table.use the same.
const AWS = require('aws-sdk');
AWS.config.update({ region: "us-east-1",accessKeyId : process.env.AWS_ACCESS_KEY_ID, secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY });
const dynamoDB = new AWS.DynamoDB()
const documentClient = new AWS.DynamoDB.DocumentClient();
const Dynamo = {
async get(id, TableName) {
const params = {
TableName,
Key: {
id,
},
};
const data = await documentClient.get(params).promise();
if (!data || !data.Item) {
throw Error(`There was an error fetching the data for ID of ${id} from ${TableName}`);
}
console.log(data);
return data.Item;
},
async getall(TableName) {
const params = {
TableName: TableName,
};
const data = await documentClient.scan(params).promise();
if (!data || !data.Item) {
throw Error(`There was an error fetching the data for ID of ${ID} from ${TableName}`);
}
console.log(data);
return data.Items;
},
async getMany(params) {
const data = await documentClient.scan(params).promise();
console.log(data);
if (!data || !data.Items) {
throw Error(`There was an error fetching the data`);
}
return data.Items;
},
async write(data, TableName) {
console.log('write dynamo',data, TableName);
if (!data.id) {
throw Error('no ID on the data');
}
const params = {
TableName,
Item: data,
};
const res = await documentClient.put(params).promise();
if (!res) {
throw Error(`There was an error inserting ID of ${data.id} in table ${TableName}`);
}
console.log('res of write dynamo ',res);
return data;
},
async createTable(TableName) {
documentClient
.scan({
TableName: TableName,
})
.promise()
.catch(error => {
return new Promise(resolve => {
dynamoDB
.createTable({
AttributeDefinitions: [
{
AttributeName: "id",
AttributeType: "S",
},
],
KeySchema: [
{
AttributeName: "id",
KeyType: "HASH",
},
],
BillingMode: "PAY_PER_REQUEST",
TableName: TableName,
})
.promise()
.then(data => console.log("Success!", data))
.catch(console.error)
})
});
},
};
module.exports = Dynamo;
When you call the dbClient.update method, you are declaring the parameter ExpressionAttributeNames. It should be ExpressionAttributeValues. This is why the error message indicates that expression attribute value used in expression is not defined.
So you can try it changing the dbClient.update call in this way:
const updateDoc = await dbClient
.update({
TableName: payload.TableName,
Key: payload.Key,
UpdateExpression: payload.UpdateExpression,
ExpressionAttributeValues:payload.ExpressionAttributeValues,
ReturnValues: "UPDATED_NEW",
})
.promise();
Here as you are setting 'ExpressionAttributeNames:', you have to set 'ExpressionAttributeValues' as well.
current url is: http://localhost:3000/products/60e03e16229da058968f33c3
but i want to mask it as http://localhost:3000/products/product-1
This is the getStaticPaths code that i have (uses apollo client to fetch the data)
const GET_ALL_QUERY = gql`
{
products {
id
}
}
`;
export async function getStaticPaths() {
const paths = await client
.query({
query: GET_ALL_QUERY,
})
.then((data) => {
const productsData = data.data.products;
if (productsData.length) {
return productsData.map((res) => {
return { params: { id: res.id } };
});
}
})
.catch((err) => Promise.reject(new Error(err)));
return {
paths,
fallback: false,
};
}
export async function getStaticProps({ params }) {
try {
const res = await client.query({
query: GET_ONE_QUERY,
variables: { id: params.id },
});
return {
props: res.data.product,
};
} catch (err) {
throw new Error(err);
}
}
if i use
<Link href={`/products/${product_id}` as={`/products/${product_name}`}>
</Link>
it doesnt seem to work. it is looking for /product-1 page and returns page not found.
also in sever.js,
i have
const server = express();
server.get("/products/:id", (req, res) => {
const actualPage = "/products";
const queryParams = { id: req.params.id };
console.dir("req.params.id = " + JSON.stringify(req.params.id));
app.render(req, res, actualPage, queryParams);
});
suspecting if this could be affecting the dynamic path.
I am not able to figure out the solution. Searched extensively but all resources refer to using only the id. Is this possible in nextjs ?
I have a list of posts containing userId. When fetching n no.of post, I want to loop throught and get and fetch user data and append them into the post.
But before the loop gets resolved, the function gets return with undefined. After that, the post data gets listed but I want the post data to be fetched first.
I am new to promises and async. If there are any other solutions that I can use, then please notify me.
I am using sailsjs.
fetchPosts: async (req, res) => {
let feeds = [];
posts = await Posts.find({
skip: offset,
limit: limit,
sort: sort,
});
if (posts) {
/**
* LOOPING THROUGH FETCHED POST LOOP TO GET
* USER DATA, LIKE, SHARE, FOLLOW, BOOKMARKS
*/
const functionWithPromise = (post) => {
//a function that returns a promise
console.log(feeds);
return Promise.resolve(post);
};
const anotherAsyncFunction = async (post) => {
return functionWithPromise(post);
};
const getUser = async (userId, post) => {
return new Promise(async (resolve, reject) => {
const user = await Account.findOne({ id: userId });
if (user) {
post = {
...post,
user: {
id: user.id,
uName: user.uName,
provider: user.provider,
dpURL: user.dpURL,
provider: user.provider,
},
};
resolve(post);
} else {
reject(null);
}
});
};
const anAsyncFunction = async (post) => {
if (post.isAdminPost) {
post = {
...post,
user: {
id: "5f3b8bf00dc3f12414b7f773", // this is usedid of admin#dopaminetalks.com in `Admin` model
uName: "DTOfficial",
provider: "LOCAL",
dpURL: "/dpURL/86a73b80-babc-4caa-a84c-762f6e9c1b36.png",
},
};
feeds = [...feeds, post];
return anotherAsyncFunction(feeds);
} else {
getUser(post.userId, post).then((post) => {
feeds = [...feeds, post];
return anotherAsyncFunction(feeds);
});
}
};
const getData = async () => {
return Promise.all(posts.map((post) => anAsyncFunction(post)));
};
getData().then((data) => {
console.log(data);
return res.json({
status: true,
msg: "Posts Fetched",
data: data,
});
});
}
},
I'm writing an Angular 6 + Express.JS app and now I stuck with the following problem: when there are some multiple requests made at the same time, sometimes (especially when there are more than 4 requests) all of them response with 404 or even get cancelled. Is there any problem with the way I handle requests in Express or I should add some tweaks for concurrent requests?
Requests:
let requests = [];
files.forEach((file) => {
if (file.type.toLowerCase().includes('zip')) {
requests.push(this.imagesService.uploadArchive(file).pipe(first()));
} else {
requests.push(this.imagesService.saveImage(file).pipe(first()));
}
});
forkJoin(requests).subscribe(
(res) => res.forEach(response => {
this.onSave.emit(response);
}),
(error) => {
console.error(error);
},
() => {
this.close.emit();
}
);
Express handling routes:
router.post('/images',
formidable({
encoding: 'utf-8',
uploadDir: path.resolve(__dirname, '..', '..', 'uploads'),
multiples: true,
keepExtensions: true
}),
(req, res, next) => {
const image = req.fields;
const data = req.files;
image.path = data.image.path;
const file = fs.createReadStream(image.path);
saveImage(image).then(
result => {
if (result) {
res.status(200).send(result);
} else {
console.error("Cannot save image");
res.status(400).send("Cannot save image");
}
}).catch(e => console.error(e.stack));
});
Responses:
UPDATE
router.post('/archives',
formidable({
encoding: 'utf-8',
uploadDir: path.resolve(__dirname, '..', '..', 'uploads'),
multiples: true,
keepExtensions: true
}),
(req, res, next) => {
const data = req.files;
let promises = [];
fs.readFile(data.archive.path, async (err, archive) => {
if (err) throw err;
await extractImagesFromZip(archive, data.archive.path).then((images) =>
images.forEach((image) => {
promises.push(
saveImage(image).then(
result => {
if (result) {
result.path = result.path.split('/').pop();
return result;
} else {
console.error("Cannot save image " + image.name);
fs.unlink(image.path, () => {});
}
}).catch(e => {
fs.unlink(image.path, () => {});
console.error(e.stack)
})
);
})
);
Promise.all(promises)
.then((result) => {
if (result.length > 0) {
res.status(200).send(result)
} else {
res.status(400).send("None images were saved")
}
}).catch((error) => {
console.log(error.stack);
res.status(400).send("None images were saved")
});
});
}
);
export const extractImagesFromZip = (file, link) => {
let promises = [];
var zip = new JSZip();
return zip.loadAsync(file)
.then((archive) => {
Object.values(archive.files).filter(
f =>
['.jpg', '.jpeg', '.png'].some((suffix) => f.name.toLowerCase().endsWith(suffix))
&& ![...f.name.toLowerCase().split('/')].pop().startsWith('.')
&& !f.dir
).forEach(f => promises.push(zip.file(f.name).async('nodebuffer').then((content) => {
const ext = f.name.split('.').pop().toLowerCase();
var dest = path.resolve(__dirname, '..', '..') + '/uploads/upload_'
+ crypto.randomBytes(Math.ceil(1322)).toString('hex').slice(0, 32).toLowerCase()
+ '.' + ext;
return new Promise((res, rej) => {
fs.writeFile(dest, content, (err) => {
if (err) rej(err);
res(new Promise((resolve, reject) => {
fs.readFile(dest, (erro, data) => {
if (erro) reject(erro);
if (data) resolve({
name: f.name,
type: 'image/' + (ext === 'jpg' ? 'jpeg' : ext),
path: dest
});
});
}));
});
});
})));
fs.unlink(link, () => {});
return Promise.all(promises);
});
}
export const saveImage = (image) => {
return database.raw(
"INSERT INTO images (name, type, path) " +
"VALUES (?, ?, ?) " +
"RETURNING name, type, path, id",
[image.name, image.type, image.path]
).then(data => data.rows[0]).catch(e => console.error(e.stack));
};
UPDATE 2
Everything works fine if user and server are on localhost (regardless server running with nginx or without it), but problem appears when server is remote
Such code worked
public async uploadFiles(files: File[]) {
of(files)
.pipe(
concatMap(files =>
files.map(file => {
return this.imagesService
.saveImage(file)
.pipe(
map(),
catchError((error, caught) => {
console.error(error);
return empty();
})
);
})
),
concatAll(),
toArray(),
map(res => console.log)
)
.subscribe();
}
I have been trying to insert large data about(400-1000) json object array to mongodb using mongoose + expressjs When i changed data about(50) items insertMany works great without problem. But if data is more than 100 it giving me an error.
Departed.insertMany(results)
.then(dep => {
console.log(dep)
res.sendStatus(201)
})
.catch(err => {
console.log(err)
})
in morgan console i got following:
creation { active: true,
_id: 5b73e8af19722d1689d863b0,
name: 'TEST DATA 241',
map: '',
created_at: 2018-08-15T08:47:43.196Z,
updated_at: 2018-08-15T08:47:43.196Z,
__v: 0 }
insert read 453
(node:5769) [DEP0079] DeprecationWarning: Custom inspection function on Objects via .inspect() is deprecated
also on client side(chrome, dev tools network tab) status got
(failed)
net::ERR_EMPTY_RESPONSE
I have read mongo's insertMany() has limit about 1000 and i am using mongo 4.0 version. Even i chunked large json into several arrays and tried to insert it but still got same results. Actual snippets are
router.post('/xls', upload.single('file'), async (req, res, next) => {
try {
if (req.body && req.file) {
console.log('req', req.file)
const segments = req.file.originalname.split('.')
let exceltojson = segments[segments.length - 1] === 'xlsx' ? xlsx : xls
exceltojson(
{
input: req.file.path,
output: 'output.json'
},
async (err, result) => {
if (err) console.log(err)
const section = await Section.create({
name: req.body.section,
map: req.body.map
})
const results = await result.map(item => {
return {
branch: req.body.branch,
section: String(section._id),
...item
}
})
await console.log('creation', section)
console.log('insert read', results.length)
if (results.length >= 100) {
console.log('more than 100')
const data = _.chunk(results, 100)
data.forEach(async chunk => {
console.log('foreach')
Departed.insertMany(chunk)
.then(dep => {
console.log(dep)
res.sendStatus(201)
})
.catch(err => {
console.log(err)
})
})
}
}
)
}
} catch (error) {
next(error)
}
})
Your problem is not related to any insertMany limit. You have a race condition in your code where you don't wait for all chunks to be inserted, before sending the status back:
data.forEach(async chunk => {
console.log('foreach')
Departed.insertMany(chunk)
.then(dep => { // this will be called as soon as one of the inserts finish
console.log(dep)
res.sendStatus(201)
})
.catch(err => {
console.log(err)
})
})
Change this in something like (untested):
Promise.all(data.map(chunk => Departed.insertMany(chunk)))
.then(dep => { // this will be called when all inserts finish
console.log(dep)
res.sendStatus(201)
})
.catch(err => {
console.log(err)
})
})
Another alternative is to use the bulkWrite API which is is faster than sending multiple independent operations because with bulkWrite() there is only one round trip to MongoDB:
router.post('/xls', upload.single('file'), async (req, res, next) => {
try {
if (req.body && req.file) {
console.log('req', req.file)
const segments = req.file.originalname.split('.')
let exceltojson = segments[segments.length - 1] === 'xlsx' ? xlsx : xls
exceltojson(
{
input: req.file.path,
output: 'output.json'
},
async (err, result) => {
if (err) console.log(err)
const section = await Section.create({
name: req.body.section,
map: req.body.map
})
let chunk = [];
result.forEach(item => {
chunk.push({
insertOne: {
document: {
branch: req.body.branch,
section: String(section._id),
...item
}
}
});
if (chunk.length === 500) {
const blkResult = await Departed.bulkWrite(chunk);
console.log(blkResult)
res.sendStatus(201)
}
});
if (chunk.length > 0) {
const dep = await Departed.bulkWrite(chunk);
console.log(dep)
res.sendStatus(201)
}
}
)
}
} catch (error) {
next(error)
}
})