insert data from api pgsql with javascript - javascript

I'm using a npm package (tcmb-doviz-kuru) to get the currency data from api and I'm trying to insert my database. Hovewer i couldnt map the data to insert the values. Here is my code:
var tcmbDovizKuru = require('tcmb-doviz-kuru');
var pg = require('pg');
function cb(error, data) {
if (error) {
console.log('error', error)
}
insertToDatabase(data);
}
function insertToDatabase(data){
var pgClient = new pg.Client(connectionString);
pgClient.connect();
const text = 'INSERT INTO currency.kur("date", forexBuying, forexSelling, banknoteBuying, banknoteSelling, unit, isim, currencyCode)VALUES(CURRENT_TIMESTAMP,$1,$2,$3,$4,$5,$6,$7)'
const values = data['tarihDate']['currency'];
pgClient.query( text, values).then(res => {
console.log("inserted")
})
.catch(e => console.error("error"))
tcmbDovizKuru(cb);
}
and an example of the data from api (data['tarihDate']['currency']):
{
attributes: { crossOrder: '0', kod: 'USD', currencyCode: 'USD' },
unit: 1,
isim: 'ABD DOLARI',
currencyName: 'US DOLLAR',
forexBuying: 8.2981,
forexSelling: 8.313,
banknoteBuying: 8.2922,
banknoteSelling: 8.3255,
crossRateUSD: null,
crossRateOther: null
},
{
attributes: { crossOrder: '1', kod: 'AUD', currencyCode: 'AUD' },
unit: 1,
isim: 'AVUSTRALYA DOLARI',
currencyName: 'AUSTRALIAN DOLLAR',
forexBuying: 6.1339,
forexSelling: 6.1739,
banknoteBuying: 6.1057,
banknoteSelling: 6.211,
crossRateUSD: 1.3496,
crossRateOther: null
}
]
How can i insert the values each ?

The data you are inserting is an object, but the $1 $2.. you are using expects an array.
See What does the dollar sign ('$') mean when in the string to .query?
So you would need to map the object from each of the entries of "data['tarihDate']['currency']"
To insert for the US DATA from your example data
const usdInfo = data['tarihDate']['currency'][0];
const valuesArray = [usdInfo.forexBuying, usdInfo.forexSelling, banknoteBuying, banknoteSelling, unit, isim, currencyCode];
pgClient.query( text, valuesArray ).then(res => {
console.log("inserted", res)
})
.catch(e => console.error("error", e))

Related

Failed to get item from dynamoDB using GetItemCommand (JS)

I am new to dynamoDB and trying to perform some basic operations to learn the subject.
I have successfully created a table using AWS SDK (so no credentials issue) like this:
const newTable = async () => {
//* it's working!!!
try {
const params = {
AttributeDefinitions: [
{
AttributeName: 'email',
AttributeType: 'S',
},
{
AttributeName: 'password',
AttributeType: 'S',
},
],
KeySchema: [
{
AttributeName: 'email',
KeyType: 'HASH',
},
{
AttributeName: 'password',
KeyType: 'RANGE',
},
],
ProvisionedThroughput: {
ReadCapacityUnits: 5,
WriteCapacityUnits: 5,
},
TableName,
StreamSpecification: {
StreamEnabled: false,
},
};
const command = new CreateTableCommand(params);
const data = await client.send(command);
console.log(data);
} catch (err) {
console.log(err);
}
};
I inserted a new item into the table using the AWS console, and now I'm trying to access it using the SDK as follows:
const getItem = async () => {
try {
const params = {
TableName,
Key: {
email: { S: 'ofer#email.com' },
},
};
const command = new GetItemCommand(params);
const response = await client.send(command);
console.log(response);
} catch (err) {
console.error(err);
}
};
When I try to run the code, the following error is received: "ValidationException: The provided key element does not match the schema"
I couldn't figure out where my mistake was
Since you have a composite key, both HASH and RANGE keys need to be specified when getting an item. Both email and password in your case.
For the primary key, you must provide all of the attributes. For
example, with a simple primary key, you only need to provide a value
for the partition key. For a composite primary key, you must provide
values for both the partition key and the sort key.
https://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_GetItem.html#DDB-GetItem-request-Key
As a side note, you are unlikely to want to make a password a RANGE key.

JavaScript, fetch, API

i want to do the following: get a random name with fetch from this website https://swapi.dev/api/people/, which i did and i can see it in my html page then i want also to get a random planet, here i need to access the homeworld key, and to return the link, before returning the link i formatted to get a random url and from this one i also have to show the name of the planet on my page. The first fetch works fine, at least i think but the 3rd .then() is not working or at least i don't know how to access the information from the homeworld url. This is my first time trying fetch() and it will be nice if you guys can help me telling where i did wrong in code and maybe different solutions but not so complicated :D tnks
let randomNumber = Math.floor(Math.random()*9)
const fetchPromise = fetch("https://swapi.dev/api/people/");
let test
let test2
let planets = document.querySelector('#age')
fetchPromise
.then((response) => {
if (!response.ok) {
throw new Error(`Http error: ${response.status}`);
}
return response.json();
})
.then((json) => {
console.log(json.results[randomNumber].name)
showRandomUserData(json)
test = json.results[0].homeworld
test = test.slice(0, -2)
// console.log(test + randomNumber + "/");
// console.log(test + "/" + randomNumber + "/");
test = test + randomNumber + "/";
return fetch(test)
// return fetch("https://swapi.dev/api/planets/2/");
})
.then(response => response.json()).then(json =>
{ test2=json.name
console.log(test2);
planets.innerHTML = test2
})
showRandomUserData = (randomUser) => {
document.querySelector("#name").innerHTML =
randomUser.results[randomNumber].name;
}
Solved
Here's a simple solution that uses fetch() to grab data from both those URLs and then insert all the people and the one planet that is returned into your web page:
function myFetch(...args) {
return fetch(...args).then(response => {
if (!response.ok) {
throw new Error(`fetch failed with status ${response.status}`);
}
return response.json();
});
}
Promise.all([
myFetch("https://swapi.dev/api/people/"),
myFetch("https://swapi.dev/api/planets/2/")
]).then(([people, planet]) => {
const peopleDiv = document.getElementById("people");
let peopleHTML = "";
for (let p of people.results) {
peopleHTML += `<div>${p.name}</div>`;
}
peopleDiv.innerHTML = peopleHTML;
const planetDiv = document.getElementById("planets");
let planetHTML = `<div>${planet.name}</div>`;
planetDiv.innerHTML = planetHTML;
}).catch(err => {
console.log(err);
});
<div id="people"></div>
<hr>
<div id="planets"></div>
As for using the results, the people URL returns a structure that looks like this:
{
count: 82,
next: 'https://swapi.dev/api/people/?page=2',
previous: null,
results: [
{
name: 'Luke Skywalker',
height: '172',
mass: '77',
hair_color: 'blond',
skin_color: 'fair',
eye_color: 'blue',
birth_year: '19BBY',
gender: 'male',
homeworld: 'https://swapi.dev/api/planets/1/',
films: [Array],
species: [],
vehicles: [Array],
starships: [Array],
created: '2014-12-09T13:50:51.644000Z',
edited: '2014-12-20T21:17:56.891000Z',
url: 'https://swapi.dev/api/people/1/'
},
{
name: 'C-3PO',
height: '167',
mass: '75',
hair_color: 'n/a',
skin_color: 'gold',
eye_color: 'yellow',
birth_year: '112BBY',
gender: 'n/a',
homeworld: 'https://swapi.dev/api/planets/1/',
films: [Array],
species: [Array],
vehicles: [],
starships: [],
created: '2014-12-10T15:10:51.357000Z',
edited: '2014-12-20T21:17:50.309000Z',
url: 'https://swapi.dev/api/people/2/'
}
}
So, you have people.results which is an array and you can access people.results[n] to get an item from that array. That item will be an object which has properties like .name, .height, etc...
The specific planet URL you show returns a single planet object like this:
{
name: 'Alderaan',
rotation_period: '24',
orbital_period: '364',
diameter: '12500',
climate: 'temperate',
gravity: '1 standard',
terrain: 'grasslands, mountains',
surface_water: '40',
population: '2000000000',
residents: [
'https://swapi.dev/api/people/5/',
'https://swapi.dev/api/people/68/',
'https://swapi.dev/api/people/81/'
],
films: [
'https://swapi.dev/api/films/1/',
'https://swapi.dev/api/films/6/'
],
created: '2014-12-10T11:35:48.479000Z',
edited: '2014-12-20T20:58:18.420000Z',
url: 'https://swapi.dev/api/planets/2/'
}
So, you access properties on that object as in planet.name.
Notice that the people results are paged. There are 82 total results, but only 10 come in this first result. The rest come with results for other pages such as https://swapi.dev/api/people/?page=2.
Similar to this answer but using async/await to avoid callback hell. If you can, try using this approach. Why?
Excellent recommendation in that answer by jfriend00 to use Promise.all instead of separate fetch calls, as that enables fetching to happen in parallel. To know more.
sandbox to test and try
const fetchData = async (...args) => {
try {
const response = await fetch(...args);
return response.json();
} catch (err) {
throw new Error(`fetch failed with status ${err?.message}`);
}
};
const updateDOM = (people, planet) => {
document.getElementById("people").innerHTML =
people.results.reduce((s, p) => s + `<div>${p.name}</div>`, "");
document.getElementById("planets").innerHTML = `<div>${planet.name}</div>`;
};
const populateData = async () => {
try {
const [people, planet] = await Promise.all([
fetchData("https://swapi.dev/api/people/"),
fetchData("https://swapi.dev/api/planets/2/"),
]);
// do stuff with 'people' or 'planet'
// example, get
// const firstPersonsHomeworld = people.results[0].homeworld;
// console.log(firstPersonsHomeworld);
// or
// const planetName = planet.name;
// console.log(planetName);
updateDOM(people, planet);
} catch (err) {
// errorHandler(err);
console.error(err);
}
};
// start app
populateData();

Pushing to to array with multiple values using node js - discord js

So I have an empty array:
const logsEmbed = {
color: '15c9c9',
title: '',
fields: [],
timestamp: new Date()
}
The data I want to receive I want to look something like:
{
name: headline1,
value: value1 \n value2 \n value3,
},
{
name: headline2,
value: value1 \n value2 \n value3,
}
What i am getting:
{
name: headline1,
value: value1,
},
{
name: headline1,
value: value2,
}
Basicly I want to get 1 unique headline with all the values under it, and then go to the next headline and get all the values there
The data in the database looks like:
1|Headline1|Value1|2022-04-22 18:05:52.473 +00:00|2022-04-22 18:05:52.473 +00:00
2|Headline1|Value2|2022-04-22 18:15:40.061 +00:00|2022-04-22 18:15:40.061 +00:00
3|Headline1|Value3|2022-04-22 18:16:45.313 +00:00|2022-04-22 18:16:45.313 +00:00
4|Headline2|Value4|2022-04-22 18:19:13.985 +00:00|2022-04-22 18:19:13.985 +00:00
5|Headline2|Value5|2022-04-22 18:19:36.230 +00:00|2022-04-22 18:19:36.230 +00:00
Currently This array I am pushing some values to through a for loop from data in a database using sequelize, ofc that is not getting me where I want to get but.. I hope some can help me on my way
const raids = await logs_table.findAll({ attributes: ['raid'], group: ['raid'] });
for(const raid of raids)
{
const logs = await logs_table.findAll({ attributes: ['raid', 'link' ], where: {raid: raid.raid }});
for(const log of logs)
{
logsEmbed.fields.push({name: log.raid, value: log.link, inline: false });
}
I think this is what you are looking for:
const raids = await logs_table.findAll({
attributes: ['raid'],
group: ['raid'],
});
for (const raid of raids) {
const logs = await logs_table.findAll({
attributes: ['raid', 'link'],
where: { raid: raid.raid },
});
logs.forEach((log) => {
const matchIndex = logsEmbed.fields.findIndex(
(field) => field.name === log.raid
);
if (matchIndex < 0)
logsEmbed.fields.push({ name: log.raid, value: log.link });
else logsEmbed.fields[matchIndex].value += ` \n ${log.link}`;
});
}
Note that performing the DB request (logs_table.findAll) twice is probably a mistake, you should be able to retrieve all the data in one single request.

Flexsearch export and import document index issue

I'm trying to build an index on using flexsearch and nodejs and store it on a local disk as it take quite a bit of time to build. The export seems to work, but when trying to import the file again with a new document index I get this error:
TypeError: Cannot read property 'import' of undefined
at Q.t.import (/opt/hermetic/hermetic/server/node_modules/flexsearch/dist/flexsearch.bundle.js:33:330)
at Object.retrieveIndex (/opt/hermetic/hermetic/server/build/search.js:86:25)
at Object.search (/opt/hermetic/hermetic/server/build/search.js:96:32)
at init (/opt/hermetic/hermetic/server/build/server.js:270:27)
I'm running nodejs version 14 and flexsearch version 0.7.21. Below is the code I am using:
import fs from 'fs';
import Flexsearch from 'flexsearch';
const createIndex = async () => {
const { Document } = Flexsearch;
const index = new Document({
document: {
id: 'id',
tag: 'tag',
store: true,
index: [
'record:a',
'record:b',
'tag',
],
},
});
index.add({ id: 0, tag: 'category1', record: { a: '1 aaa', b: '0 bbb' } });
index.add({ id: 1, tag: 'category1', record: { a: '1 aaa', b: '1 bbb' } });
index.add({ id: 2, tag: 'category2', record: { a: '2 aaa', b: '2 bbb' } });
index.add({ id: 3, tag: 'category2', record: { a: '2 aaa', b: '3 bbb' } });
console.log('search', index.search('aaa'));
await index.export((key, data) => fs.writeFile(`./search_index/${key}`, data, err => !!err && console.log(err)));
return true;
}
const retrieveIndex = async () => {
const { Document } = Flexsearch;
const index = new Document({
document: {
id: 'id',
tag: 'tag',
store: true,
index: [
'record:a',
'record:b',
'tag',
],
},
});
const keys = fs
.readdirSync('./search_index', { withFileTypes: true }, err => !!err && console.log(err))
.filter(item => !item.isDirectory())
.map(item => item.name);
for (let i = 0, key; i < keys.length; i += 1) {
key = keys[i];
const data = fs.readFileSync(`./search_index/${key}`, 'utf8');
index.import(key, data);
}
return index;
}
await createIndex();
const index = await retrieveIndex();
console.log('cached search', index.search('aaa'));
I was trying to find a way to export the index properly too, originally trying to put everything into one file. While it worked, I didn't really like the solution.
Which brought me to your SO question, I've checked your code and managed to find out why you get that error.
Basically the export is a sync operation, while you also (randomly) use async. In order to avoid the issue, you need to remove all async code and only use sync node.fs operations. For my solution, I also only once created the Document store, to then just fill it via retrieveIndex() rather than creating new Document() per function.
I also added a .json extension to guarantee that node.fs reads the file properly and for sanity purposes - afterall it's json stored.
So thanks for giving me the idea to store each key as file #Jamie Nicholls 🤝
import fs from 'fs';
import { Document } from 'flexsearch'
const searchIndexPath = '/Users/user/Documents/linked/search-index/'
let index = new Document({
document: {
id: 'date',
index: ['content']
},
tokenize: 'forward'
})
const createIndex = () => {
index.add({ date: "2021-11-01", content: 'asdf asdf asd asd asd asd' })
index.add({ date: "2021-11-02", content: 'fobar 334kkk' })
index.add({ date: "2021-11-04", content: 'fobar 234 sffgfd' })
index.export(
(key, data) => fs.writeFileSync(`${searchIndexPath}${key}.json`, data !== undefined ? data : '')
)
}
createIndex()
const retrieveIndex = () => {
const keys = fs
.readdirSync(searchIndexPath, { withFileTypes: true })
.filter(item => !item.isDirectory())
.map(item => item.name.slice(0, -5))
for (let i = 0, key; i < keys.length; i += 1) {
key = keys[i]
const data = fs.readFileSync(`${searchIndexPath}${key}.json`, 'utf8')
index.import(key, data ?? null)
}
}
const searchStuff = () => {
retrieveIndex()
console.log('cached search', index.search('fo'))
}
searchStuff()
After looking into this further, the feature is not currently available for document type searches. See this issue in github for more information

Convert DynamoDb stringset into JSON

This is one of the fields of StringSet type that is returned from DynamoDb.
permissions:
Set {
wrapperName: 'Set',
values:
[ 'BannerConfigReadOnly',
'CampaignBannerCreate',
'CampaignPromoCreate',
'CampaignReadOnly',
'MasterplanReadOnly',
'SegmentCreate',
'SegmentDownload',
'SegmentUpload' ],
type: 'String' }
}
Now, I am using aws.DynamoDB.Converter.unmarshal function to get it in this format
permissions: ['BannerConfigReadOnly',
'CampaignBannerCreate',
'CampaignPromoCreate',
'CampaignReadOnly',
'MasterplanReadOnly',
'SegmentCreate',
'SegmentDownload',
'SegmentUpload']
But, this is what i get
{}
Any ideas what, I may be doing wrong.
This is my code
const aws = require('aws-sdk');
const documentClient = new aws.DynamoDB.DocumentClient();
documentClient.scan(params, (err, data) => {
if (err) {
reject(err);
} else {
let processedItems = [...data.Items];
var test = aws.DynamoDB.Converter.unmarshall(processedItems[0].permissions);
console.log(`test is ${JSON.stringify(test)}`);
}});
ProcessedItems[0] is this
{ email: 'abc#gmail.com',
tenant: 'Canada',
permissions:
Set {
wrapperName: 'Set',
values:
[ 'BannerConfigReadOnly',
'CampaignBannerCreate',
'CampaignPromoCreate',
'CampaignReadOnly',],
type: 'String' } }
That data is already unmarshalled since you are using the DocumentClient. Consider just using processedItems[0].permissions.values to get the values of the set.

Categories