I have an url that returns json object.
I need to have an button with onClick function that will call that API, get the json object, convert it to CSV and allow user to download it on thier local machine.
The CSV file should have structure as follows:
"Header 1","Header 2","Header 3","Header 4","Header 5","Header 6","Header 7","Header 8","Header 9","Header 10","Header 11","Header 12"
"B-E7BE5602-2F9B-E3","11608501","Active","2023-06-29","1","0","1","ID","OPEN","Yes","Yes","FLOWER"
"B-480A8929-57D5-97","11608502","Active","2023-06-29","1","0","1","ID","OPEN","No","No","FLOWER"
this is the json I get from the API:
{
"items": {
"recordsFiltered": 2,
"data": [{
"numOfIds": 1,
"productId": null,
"askOrgId": "Yes",
"orderId": 11608501,
"orgSelectionType": "FLOWER",
"batchCode": "B-E7BE5602-2F9B-E3",
"IDType": "OPEN",
"batchId": 413,
"creationDate": "2022-06-29",
"isOnline": "Yes",
"productName": null,
"batchProductArray": [{
"ID": 663255,
"TYPE": "PRODUCT",
"NAME": "SOME NAME"
}
],
"numOfUsedIDs": 0,
"redemptionMethod": "ID",
"askSSN": "No",
"askEmployeeId": "Yes",
"batchStatus": "Active",
"productType": null,
"expirationDate": "2023-06-29"
}, {
"numOfIds": 1,
"productId": null,
"askOrgId": "No",
"orderId": 11608502,
"orgSelectionType": "LEAF",
"batchCode": "B-480A8929-57D5-97",
"IDType": "OPEN",
"batchId": 414,
"creationDate": "2022-06-29",
"isOnline": "Yes",
"productName": null,
"batchProductArray": [{
"ID": 663255,
"TYPE": "PRODUCT",
"NAME": "Other Name"
}
],
"numOfUsedIDs": 0,
"redemptionMethod": "ID",
"askSSN": "No",
"askEmployeeId": "No",
"batchStatus": "Active",
"productType": null,
"expirationDate": "2023-06-29"
},
],
"draw": 1,
"recordsTotal": 2
}
}
I tried below code but it gives me that my json is undefined
function downloadJSONAsCSV(endpoint) {
// Fetch JSON data from the endpoint
fetch(endpoint)
.then(response => response.json())
.then(jsonData => {
// Convert JSON data to CSV
let csvData = jsonToCsv(jsonData);
// Create a CSV file and allow the user to download it
let blob = new Blob([csvData], { type: 'text/csv' });
let url = window.URL.createObjectURL(blob);
let a = document.createElement('a');
a.href = url;
a.download = 'data.csv';
document.body.appendChild(a);
a.click();
})
.catch(error => console.error(error));
}
function jsonToCsv(jsonData) {
let csv = '';
// Get the headers
let headers = Object.keys(jsonData[0]);
csv += headers.join(',') + '\n';
// Add the data
jsonData.forEach(function(row) {
let data = headers.map(header => row[header]).join(',');
csv += data + '\n';
});
return csv;
}
Plus I belive that the code above will not format the CSV file in the format I need.
I believe that the problem with your code is that you try to convert the root node of the json data to csv instead of the data one, to fix this you just need to change jsonToCsv(jsonData) to jsonToCsv(jsonData.items.data). Additionally you will need to add a JSON.stringify statement around for data mapping function. Iv'e made the necessary changes to your code and attached them below
function downloadJSONAsCSV(endpoint) {
// Fetch JSON data from the endpoint
fetch(endpoint)
.then(response => response.json())
.then(jsonData => {
// Convert JSON data to CSV
let csvData = jsonToCsv(jsonData.items.data); // Add .items.data
// Create a CSV file and allow the user to download it
let blob = new Blob([csvData], { type: 'text/csv' });
let url = window.URL.createObjectURL(blob);
let a = document.createElement('a');
a.href = url;
a.download = 'data.csv';
document.body.appendChild(a);
a.click();
})
.catch(error => console.error(error));
}
function jsonToCsv(jsonData) {
let csv = '';
// Get the headers
let headers = Object.keys(jsonData[0]);
csv += headers.join(',') + '\n';
// Add the data
jsonData.forEach(function (row) {
let data = headers.map(header => JSON.stringify(row[header])).join(','); // Add JSON.stringify statement
csv += data + '\n';
});
return csv;
}
Related
My file "mainfile.json" in blob storage has the following content :
[
{ "name": "abc", "id": "01", "location": "random" },
{ "month": "Jan", "project": "50%", "training": "50%" },
]
The data I'm trying to add to it is this :
{"month": "Feb", "project":"60%", "training":"40%"}
I want it to be something like this :
[
{ "name": "abc", "id": "01", "location": "random" },
{ "month": "Jan", "project": "50%", "training": "50%" },
{"month": "Feb", "project":"60%", "training":"40%"}
]
I'm using #azure/storage-blob sdk to carry out the same and here's my code below :
const blobServiceClient = require("./getCred");
const fs = require("fs");
async function appendBlob() {
const containerClient =
blobServiceClient.getContainerClient("containername");
//gets the main content from a blob
const blobClient = containerClient.getBlobClient("mainfile.json");
//the new appended content gets written into this blob
const blockBlobClient = containerClient.getBlockBlobClient("data.json");
// the data that needs to be appended
const data = fs.readFileSync("new-data.json", "utf-8", (err) => {
if (err) {
console.log("File not read");
}
});
// Get blob content from position 0 to the end
// In Node.js, get downloaded data by accessing downloadBlockBlobResponse.readableStreamBody
const downloadBlockBlobResponse = await blobClient.download();
const downloaded = (
await streamToBuffer(downloadBlockBlobResponse.readableStreamBody)
).toString();
const append = await appendingFile(downloaded, data);
const uploadBlobResponse = await blockBlobClient.upload(
append,
append.length
);
console.log(
`Uploaded block blob to testing.json successfully`,
uploadBlobResponse.requestId
);
// [Node.js only] A helper method used to read a Node.js readable stream into a Buffer
async function streamToBuffer(readableStream) {
return new Promise((resolve, reject) => {
const chunks = [];
readableStream.on("data", (data) => {
chunks.push(data instanceof Buffer ? data : Buffer.from(data));
});
readableStream.on("end", () => {
resolve(Buffer.concat(chunks));
});
readableStream.on("error", reject);
});
}
async function appendingFile(content, toBeAdded) {
return new Promise((resolve, reject) => {
let temp = content.concat(toBeAdded);
console.log(temp);
resolve(temp);
reject(new Error("Error occurred"));
});
}
}
But I get the following as the output :
[
{
"name": "KK",
"id": "01",
"location": "chennai"
},
{
"month": "December",
"project": "50%",
"training": "50%"
}
]
{
"month": "January",
"adaptive-cards": "50%",
"azure-func-app": "50%"
}
My entire approach could be wrong as I'm new to coding. Please help me with this. Thanks in advance.
There's nothing wrong with your code and it is working properly.
The issue is with your understanding of the blob storage. Azure storage blob (any kind of blob - block, append or page) does not really know if you are trying to add an element to a JSON array. For blob storage, it is simply a chunk of bytes.
What you would need to do is read the blob into a string, using JSON.parse to create a JSON array object and add the data to that object. Once you have that object, convert it back to string using JSON.stringify and reupload that string (i.e. overwrite the blob).
I have a problem with my code, can't find a solution to insert the id of the recipes that I have into a new request to display the calories of the recipes into HTML. All I want is to somehow add a dynamic id instead of the static 634091 id over there into the API url, because if I send that request and refresh my page it will not display the calories in HTML because it will be another id generated from every refresh. If it is not clear I can provide additional information, thank you so much for your time.
js:
setTimeout(function () {
const api_url_calories =
"https://api.spoonacular.com/recipes/634091/nutritionWidget.json";
// Defining async function
async function getapi(url) {
// Storing response
const response = await fetch(url);
// Storing data in form of JSON
var data = await response.json();
console.log(data);
data.calories.forEach((obj) => {
/*
create new DOM elements
*/
let div = document.createElement("div");
let calories = document.createElement("p");
div.append(calories);
/*
assign content from api data
*/
calories.innerHTML = obj.calories;
/*
add to DOM
*/
displayRecipes.append(div);
});
// if (response) {
// console.log("data here");
// }
}
getapi(api_url_calories);
}, 100);
HTML:
<body>
<div id="displayRecipes"></div>
</body>
The response from the api for the calories information is this:
{
"calories": "316",
"carbs": "49g",
"fat": "12g",
"protein": "3g",
"bad": [
{
"name": "Calories",
"amount": "316",
"indented": false,
"percentOfDailyNeeds": 15.84
},
js from the recipes request where the id comes from
const api_url =
"https://api.spoonacular.com/recipes/random?number=3";
// Defining async function
async function getapi(url) {
// Storing response
const response = await fetch(url);
// Storing data in form of JSON
var data = await response.json();
console.log(data);
data.recipes.forEach((obj) => {
/*
create new DOM elements
*/
let div = document.createElement("div");
let h1 = document.createElement("h1");
let image = new Image();
let cuisines = document.createElement("p");
let id = document.createElement("p");
div.append(h1);
div.append(image);
div.append(cuisines);
div.append(id);
/*
assign content from api data
*/
h1.innerHTML = obj.title;
image.src = obj.image;
for (let i = 0; i < 100; i++) {
cuisines.innerHTML = obj.cuisines;
}
// cuisines.innerHTML = obj.cuisines[0];
id.innerHTML = obj.id;
/*
add to DOM
*/
displayRecipes.append(div);
});
// if (response) {
// console.log("data here");
// }
}
// Calling that async function
getapi(api_url);
How the recipes data looks like, it has an id key.
"recipes": [
{
"vegetarian": false,
"vegan": false,
"glutenFree": false,
"dairyFree": false,
"veryHealthy": false,
"cheap": false,
"veryPopular": false,
"sustainable": false,
"weightWatcherSmartPoints": 1,
"gaps": "no",
"lowFodmap": false,
"aggregateLikes": 11,
"spoonacularScore": 21.0,
"healthScore": 1.0,
"creditsText": "Foodista.com – The Cooking Encyclopedia Everyone Can Edit",
"license": "CC BY 3.0",
"sourceName": "Foodista",
"pricePerServing": 12.65,
"extendedIngredients": [
{
"id": 1123,
"aisle": "Milk, Eggs, Other Dairy",
"image": "egg.png",
"consistency": "solid",
"name": "eggs",
"nameClean": "egg",
"original": "3 eggs, slightly beaten",
"originalString": "3 eggs, slightly beaten",
"originalName": "eggs, slightly beaten",
"amount": 3.0,
"unit": "",
"meta": [
"slightly beaten"
],
"metaInformation": [
"slightly beaten"
],
"measures": {
"us": {
"amount": 3.0,
"unitShort": "",
"unitLong": ""
},
"metric": {
"amount": 3.0,
"unitShort": "",
"unitLong": ""
}
}
},
You can achieve this with concatenation:
const recipe_id = "YOUR_DESIRED_ID";
const api_url_calories =
"https://api.spoonacular.com/recipes/" + recipe_id + "/nutritionWidget.json";
I am trying to create a bubble chart that is going to show how many comments have been made in the files. However, my API doesn't provide the chunk of the information in one end-point, but it's more like a repository from which I have to fetch data from the different end-points. How should I approach this kind of thing?
const getData = async () => {
console.log("Processing");
const request = await fetch("http://25.102.238.73/api/repos/RxJava/");
const data = await request.json();
return data;
};
console.log(getData());
var popCanvas = document.getElementById("myChart");
Chart.defaults.global.defaultFontFamily = "Lato";
Chart.defaults.global.defaultFontSize = 18;
// y = Line of code in the file
// x = Line of comments in the file
var popData = {
datasets: [{
label: ['How many comments have been made in the code?'],
data: [{
x:getData(),
y:getData(),
r:getData()
},
],
backgroundColor: "#FF9966"
}]
};
var bubbleChart = new Chart(popCanvas, {
type: 'bubble',
data: popData
});
</script>
There are 2 problems in this code.
Problem 1: using async and sync code in one place. Here is an example snippet (for testing I removed fetch, but replaced it with its data, because your fetch is not working in snippet here).
Compare results of DATA1 and DATA2. The first one is sync, the second - async. You can see, that your sync variant is not working. So, you need async (using .then).
const getData = async() => {
//console.log("Processing");
//const request = await fetch("http://46.101.95.202/api/repos/RxJava/");
//const data = await request.json();
const data = {
"key": "RxJava", "name": "RxJava", "language": "java",
"path": "/RxJava_c", "links": { "self": "/api/repos/RxJava/" },
"contents": [{
"path": "/RxJava_c/docs", "name": "docs",
"type": "directory",
"follow": "/api/repos/RxJava?data_path=/RxJava_c/docs"
},
{
"path": "/RxJava_c/gradle", "name": "gradle",
"type": "directory",
"follow": "/api/repos/RxJava?data_path=/RxJava_c/gradle"
},
{
"path": "/RxJava_c/src", "name": "src",
"type": "directory",
"follow": "/api/repos/RxJava?data_path=/RxJava_c/src"
}
]};
return data;
};
console.log("DATA1 =", getData());
getData().then((mydata) => {
console.log("DATA2 =", mydata);
});
Problem 2: even if we change sync code to be async, the data itself is not suitable for ChartJS. Look:
const getData = async() => {
//console.log("Processing");
//const request = await fetch("http://46.101.95.202/api/repos/RxJava/");
//const data = await request.json();
const data = {
"key": "RxJava", "name": "RxJava", "language": "java",
"path": "/RxJava_c", "links": { "self": "/api/repos/RxJava/" },
"contents": [{
"path": "/RxJava_c/docs", "name": "docs",
"type": "directory",
"follow": "/api/repos/RxJava?data_path=/RxJava_c/docs"
},
{
"path": "/RxJava_c/gradle", "name": "gradle",
"type": "directory",
"follow": "/api/repos/RxJava?data_path=/RxJava_c/gradle"
},
{
"path": "/RxJava_c/src", "name": "src",
"type": "directory",
"follow": "/api/repos/RxJava?data_path=/RxJava_c/src"
}
]};
return data;
};
getData().then((mydata) => {
//console.log(mydata);
var popCanvas = document.getElementById("myChart");
Chart.defaults.global.defaultFontFamily = "Lato";
Chart.defaults.global.defaultFontSize = 18;
// y = Line of code in the file
// x = Line of comments in the file
var popData = {
datasets: [{
label: ['How many comments have been made in the code?'],
data: [{
x: mydata,
y: mydata,
r: mydata
},
],
backgroundColor: "#FF9966"
}]
};
var bubbleChart = new Chart(popCanvas, {
type: 'bubble',
data: popData
});
});
<script src="https://cdn.jsdelivr.net/npm/chart.js#2.8.0"></script>
<canvas id="myChart">
I am trying to replace / change the values in an object, but I can't seem to work out how it's done or if it can even be done.
I'm trying to add https://SiteName.com to the start of each of the values so it will be like https://SiteName.com\/asset\/image\/map\/map-grass.svg
var assets = [{
"name": "\/asset\/image\/map\/map-grass.svg",
"url": "\/asset\/image\/map\/map-grass.svg"
}, {
"name": "\/asset\/image\/map\/map-stone.svg",
"url": "\/asset\/image\/map\/map-stone.svg"
}]
Object.keys(assets).forEach(key => {
const val = assets[key];
console.log(val)
});
Try this:
var assets = [{
"name": "\/asset\/image\/map\/map-grass.svg",
"url": "\/asset\/image\/map\/map-grass.svg"
}, {
"name": "\/asset\/image\/map\/map-stone.svg",
"url": "\/asset\/image\/map\/map-stone.svg"
}]
let url = "https://SiteName.com";
Object.keys(assets).forEach(key => {
const val = assets[key];
val.name = url + val.name;
val.url = url + val.url;
});
console.log(assets)
You need a nested loop (or forEach) here - one to go over the elements of the assets array, and then, for each object in there, go over all its properties:
var assets = [{
"name": "\/asset\/image\/map\/map-grass.svg",
"url": "\/asset\/image\/map\/map-grass.svg"
}, {
"name": "\/asset\/image\/map\/map-stone.svg",
"url": "\/asset\/image\/map\/map-stone.svg"
}]
assets.forEach(o => {
Object.keys(o).forEach(key => {
o[key] = 'https://SiteName.com' + o[key];
})
});
console.log(assets);
I need to convert a large CSV data set to JSON, however the output should be a JSON dictionary like this:
var products = {
"crystal": {
"description": "This is a crystal",
"price": "2.95"
},
"emerald": {
"description": "This is a emerald",
"price": "5.95"
}
};
This is what the CSV table would look like:
I am using a script referenced here to generate the JSON:
var csv = require('csv')
var fs = require('fs')
var f = fs.createReadStream('Fielding.csv')
var w = fs.createWriteStream('out.txt')
w.write('[');
csv()
.from.stream(f, {columns:true})
.transform(function(row, index) {
return (index === 0 ? '' : ',\n') + JSON.stringify(row);
})
.to.stream(w, {columns: true, end: false})
.on('end', function() {
w.write(']');
w.end();
});
However the output from that script is created in this format:
[
{
"name": "crystal",
"description": "This is a crystal",
"price": "2.95"
},
{
"name": "emerald",
"description": "This is a emerald",
"price": "5.95"
}
]
How would I modify the script to get my desired "dictionary" format?
All you need to do is loop over the array and use item.name as key for your dictionary object
var products ={};
data.forEach(function(item){
products[item.name] = item;
});
This will leave the name property in the item but that shouldn't be an issue
I found csv parser library most useful:
var csvText=`status,path,name,ext,checksum,size,document_service_id,document_service_path,message
success,./15-02-2017_17-11/d77c7886-ffe9-40f2-b2fe-e68410d07891//expE1.txt,expE1.txt,txt,38441337865069eabae7754b29bb43e1,414984,8269f7e3-3221-49bb-bb5a-5796cf208fd1,/neuroinftest/20170215/expE1.txt,
success,./15-02-2017_17-11/d77c7886-ffe9-40f2-b2fe-e68410d07891//expE10.txt,expE10.txt,txt,f27e46979035706eb0aaf58c26e09585,368573,2c94ed19-29c9-4660-83cf-c2148c3d6f61,/neuroinftest/20170215/expE10.txt,
success,./15-02-2017_17-11/d77c7886-ffe9-40f2-b2fe-e68410d07891//expE2.txt,expE2.txt,txt,e1040d9546423c823944120de0e5c46c,333308,b3898f5d-1058-4cf3-acf9-76759117b810,/neuroinftest/20170215/expE2.txt,
`
var csv = require('csv');
csv.parse(csvText, {columns: true}, function(err, data){
console.log(JSON.stringify(data, null, 2));
});
In variable csvText I have my comma-separated file, with the first line serving as a header. I use the parse function and I'm passing the {columns: true} to indicated that the first line has the headers. Second parameter in the callback function (data) has the object with keys being the headers and the values being the corresponding csv cells. I use JSON.stringify to print it nicely and the result object looks like this (it puts it into an array):
[
{
"status": "success",
"path": "./15-02-2017_17-11/d77c7886-ffe9-40f2-b2fe-e68410d07891//expE1.txt",
"name": "expE1.txt",
"ext": "txt",
"checksum": "38441337865069eabae7754b29bb43e1",
"size": "414984",
"document_service_id": "8269f7e3-3221-49bb-bb5a-5796cf208fd1",
"document_service_path": "/neuroinftest/20170215/expE1.txt",
"message": ""
},
{
"status": "success",
"path": "./15-02-2017_17-11/d77c7886-ffe9-40f2-b2fe-e68410d07891//expE10.txt",
"name": "expE10.txt",
"ext": "txt",
"checksum": "f27e46979035706eb0aaf58c26e09585",
"size": "368573",
"document_service_id": "2c94ed19-29c9-4660-83cf-c2148c3d6f61",
"document_service_path": "/neuroinftest/20170215/expE10.txt",
"message": ""
},
{
"status": "success",
"path": "./15-02-2017_17-11/d77c7886-ffe9-40f2-b2fe-e68410d07891//expE2.txt",
"name": "expE2.txt",
"ext": "txt",
"checksum": "e1040d9546423c823944120de0e5c46c",
"size": "333308",
"document_service_id": "b3898f5d-1058-4cf3-acf9-76759117b810",
"document_service_path": "/neuroinftest/20170215/expE2.txt",
"message": ""
}
]
UPD: This array can easily be turned into the object you need with reduce:
var res_obj = data.reduce(function(acc, cur, i) {
acc[cur.name] = cur;
return acc;
}, {});
In my case I use the name property as a key. Make sure it's unique.
I think something like this would work :
var products_arr = [{"name":"crystal","description":"This is a crystal","price":"2.95"},
{"name":"emerald","description":"This is a emerald","price":"5.95"}]
var products = {};
for (var i = 0, l = products_arr.length ; i < l ; ++i) {
var x = products_arr[i];
var name = x.name
delete x.name; // deletes name property from JSON object
products[name] = x;
}
This will output :
{
"crystal": {
"description": "This is a crystal",
"price": "2.95"
},
"emerald": {
"description": "This is a emerald",
"price": "5.95"
}
}
If you would like to modify your specific code, you could change the line
return (index === 0 ? '' : ',\n') + JSON.stringify(row);
to
var clonedRow = JSON.parse(JSON.stringify(row));
var key = clonedRow['name'];
delete clonedRow['name'];
var newRow = {};
newRow[key] = clonedRow;
return (index === 0 ? '' : ',\n') + JSON.stringify(newRow);
This creates a new object for each row, modifying the structure according to your requirement.
Your best bet is to use PapaParse, a powerful csv parser/dumper. It supports streams, various string encodings, header row, and it's fast.