JSON to Excel in JavaScript - javascript

I am trying to save data in json to excel .xlsx file. Json looks like this(with changing value names, this is just an example):
{"hum_in":[{"ts":1646034284215,"value":"22"},{"ts":1646033983313,"value":"22"}]}
I tried converting and downloading using this code:
const EXCEL_TYPE = 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet;charset=UTF-8';
const EXCEL_EXTENSION = '.xlsx';
function downloadAsExcel(data) {
const worksheet = XLSX.utils.json_to_sheet(data);
const workbook = {
Sheets: {
'data': worksheet
},
SheetNames: ['data']
};
const excelBuffer = XLSX.write(workbook, { bookType: 'xlsx', type: 'array' });
console.log(excelBuffer);
saveAsExcel(excelBuffer);
}
function saveAsExcel(buffer) {
const data = new Blob([buffer], { type: EXCEL_TYPE });
saveAs(data, "Export_" + new Date().getTime() + EXCEL_EXTENSION);
}
and then calling it like this:
downloadAsExcel(json);
It returned an error:
TypeError: r.forEach is not a function
at rb (xlsx.full.min.js:23:18346)
at Object.tb [as json_to_sheet] (xlsx.full.min.js:23:19000)
at downloadAsExcel (app.js:233:34)
at app.js:112:25
Does anyone have any idea what's gone wrong?

The json_to_sheet function takes in an Array, whereas your data argument is an Object. Instead pass it the hum_in property to target the internal data array:
const worksheet = XLSX.utils.json_to_sheet(data.hum_in);
Here's a more complete example including support for multiple keys in the data object:
const data = {"hum_in":[
{"ts":1646034284215,"value":"22"},
{"ts":1646033983313,"value":"22"}
]};
function generateAsExcel(data) {
try {
const workbook = XLSX.utils.book_new();
for (let key in data) {
const worksheet = XLSX.utils.json_to_sheet(data[key]);
XLSX.utils.book_append_sheet(workbook, worksheet, key);
}
let res = XLSX.write(workbook, { type: "array" });
console.log(`${res.byteLength} bytes generated`);
} catch (err) {
console.log("Error:", err);
}
}
document.getElementById("gen").addEventListener("click",
() => generateAsExcel(data));
<script type="text/javascript" src="//cdn.jsdelivr.net/npm/xlsx/dist/xlsx.full.min.js"></script>
<button id="gen">Generate</button>
To combine all the data keys into a dataset to generate a single worksheet from, you can use something like this:
const data = {
"hum_in":[ {"ts":1646034284215,"value":"22"}, {"ts":1646033983313,"value":"22"} ],
"wind_dir":[ {"ts":1646034284215,"value":"123"}, {"ts":1646033983313,"value":"125"} ]
};
let merged = Object.keys(data).reduce((merged, key) => {
for (record of data[key]) { merged.push(Object.assign({ key }, record)); }
return merged;
}, []);
console.log(merged);

Related

how to convert to json from excel in js i have prublem with spilt lines

i have excel file That I uploaded to react i'm use XLSX library
this is my excel
my orginal excel
and i try to do Something like that
item[{
CJ Order Number:14,
Your Order Number:100101,
SKU:{
CJNSXZHL:1,
CJNSX:1},
},
{
CJ Order Number:15,
Your Order Number:100102,
SKU:{
CJNSXZN:1,// 1 is quentity
CJNS:1},
},
{
CJ Order Number:16,
Your Order Number:100103,
SKU:{
CJNSX:1,
CJNS:1},
},
{
CJ Order Number:17,
Your Order Number:100103,
SKU:{
CJNSTX:2
},
}]
but i got Something like this
[{CJ Order Number: '14',
Your Order Number: '100101',
SKU: 'CJNSXZHL',
CJ Quantity: '1'}
{SKU: 'CJNSXL',
CJ Quantity: '1'}]
my code is
const [items, setItems] = useState([]);
const readExcel = (file) => {
const promise = new Promise((resolve, reject) => {
const fileReader = new FileReader();
fileReader.readAsArrayBuffer(file);
fileReader.onload = (e) => {
const bufferArray = e.target.result;
const wb = XLSX.read(bufferArray, { type: "buffer" });
const wsname = wb.SheetNames[0];
const ws = wb.Sheets[wsname];
const data = XLSX.utils.sheet_to_json(ws);
resolve(data);
};
fileReader.onerror = (error) => {
reject(error);
};
});
promise.then((d) => {
setItems(d);
});
}
I compare products that want to buy
Do I have them in my inventory
It could be that a person has ordered 2 products and I only have one order number so I want to know how I pass the Excel standard
That every line that is double he will assign it to one person
And not to be treated as another separate line
That it's a mistake
I solved the problem using the following code I hope it will help more people who have had difficulty with it
I continue the code above
const [items, setItems] = useState([]);
setItems(d);
let i = 0
for ( i ; i <= items.length - 1; i++) {
try {
if (items[i].Orders) {
arr.push({
"OrderNumber":items[i].Orders,
"Address":items[i].Address,
"SKU": [items[i].SKU],
"Quantity":[parseInt(items[i].Quantity)],
"Province":items[i].Province,
"Product_Name":[items[i].Product_Name],
"Name":items[i].Name,
"City":items[i].City,
"ZipCode":items[i].ZipCode,
"Phone":(items[i].Phone),
});
} else {
arr[arr.length - 1].SKU.push(items[i].SKU);
arr[arr.length - 1].Quantity.push(parseInt(items[i].Quantity));
arr[arr.length - 1].Product_Name.push(items[i].Product_Name);
}
} catch (err) {
console.log("err",err);
}
}

string to bufferstream not always writing data

I have a cloud function receiving a json string in a pubsub topic.
The goal is to extracts some data into a new json string.
Next parse it as JSONL.
And finally stream it to Google Cloud Storage.
I notice that sometimes the files seem to contain data and sometimes they do not.
The pubsub is working fine and data is coming into this cloud function just fine.
I tried adding some async awaits where I seem it might fit but I am afraid it has do to with the bufferstream. Both topics on where I have trouble getting my head around.
What could be the issue?
const stream = require('stream');
const { Storage } = require('#google-cloud/storage');
// Initiate the source
const bufferStream = new stream.PassThrough();
// Creates a client
const storage = new Storage();
// save stream to bucket
const toBucket = (message, filename) => {
// Write your buffer
bufferStream.end(Buffer.from(message));
const myBucket = storage.bucket(process.env.BUCKET);
const file = myBucket.file(filename);
// Pipe the 'bufferStream' into a 'file.createWriteStream' method.
bufferStream.pipe(file.createWriteStream({
validation: 'md5',
}))
.on('error', (err) => { console.error(err); })
.on('finish', () => {
// The file upload is complete.
console.log(`${filename} is uploaded`);
});
};
// extract correct fields
const extract = (entry) => ({
id: entry.id,
status: entry.status,
date_created: entry.date_created,
discount_total: entry.discount_total,
discount_tax: entry.discount_tax,
shipping_total: entry.shipping_total,
shipping_tax: entry.shipping_tax,
total: entry.total,
total_tax: entry.total_tax,
customer_id: entry.customer_id,
payment_method: entry.payment_method,
payment_method_title: entry.payment_method_title,
transaction_id: entry.transaction_id,
date_completed: entry.date_completed,
billing_city: entry.billing.city,
billing_state: entry.billing.state,
billing_postcode: entry.billing.postcode,
coupon_lines_id: entry.coupon_lines.id,
coupon_lines_code: entry.coupon_lines.code,
coupon_lines_discount: entry.coupon_lines.discount,
coupon_lines_discount_tax: entry.coupon_lines.discount_tax,
});
// format json to jsonl
const format = async (message) => {
let jsonl;
try {
// extract only the necessary
const jsonMessage = await JSON.parse(message);
const rows = await jsonMessage.map((row) => {
const extractedRow = extract(row);
return `${JSON.stringify(extractedRow)}\n`;
});
// join all lines as one string with no join symbol
jsonl = rows.join('');
console.log(jsonl);
} catch (e) {
console.error('jsonl conversion failed');
}
return jsonl;
};
exports.jsonToBq = async (event, context) => {
const message = Buffer.from(event.data, 'base64').toString();
const { filename } = event.attributes;
console.log(filename);
const jsonl = await format(message, filename);
toBucket(jsonl, filename);
};
it's fixed by moving the bufferstream const into the tobucket function.

Vue.js excel to json

i tried to read the excel files with vue.js but u can once i read the file the memory start to sky rocket like 5 gb ram and i the excel file is feairly small pls help need to convert the file to json
The vue method to handle the excel file i tried all the type option i saw in the documintation but all send me diffrent errors
I saw a similar question here but still could not solve this
when tried
base64: "TypeError: input.replace is not a function"
binary: "TypeError: x.charCodeAt is not a function"
string: "TypeError: data.match is not a function"
array: is the one that cause the memory to get into 5gb
Also when tried to use the new file reader as present in the documentation when create the reader.onload the function never ran.
the actual temeplate i tried two things.
when i use the buffer it's seems to work but all the function return empty array.
like the file is empty but it is not
both way did the same thing
<v-file-input
v-on:change="displayFile($event)"
v-model="file">
</v-file-input>
<input type="file" name="xlfile" id="xlf" v-on:change="displayFile($event)" />
displayFile: function (event) {
// console.log(event.target.files[0])
// const file = event.target.files[0]
// const workbook = XLSX.read(file, {
// type: 'string'
// })
// console.log(workbook, workbook.SheetNames)
// const res = XLSX.read(file)
// console.log(res)
// const res = XLSX.read(this.file)
// console.log(res)
console.log(this.file)
this.file.text().then(text => {
const fileType = this.file.type
console.log(fileType)
// this.PropceseMethod(this.file, fileType)
})
const reader = new FileReader()
reader.onload = (data) => {
console.log('HERE')
console.log(data)
const workbook = XLSX.read(data, {
type: 'buffer'
})
console.log(workbook)
workbook.SheetNames.forEach(function (sheetName) {
console.log(sheetName)
console.log(workbook.Sheets[sheetName])
// Here is your object
const XLRowObject = XLSX.utils.sheet_to_row_object_array(workbook.Sheets[sheetName])
console.log(XLSX.utils.sheet_to_json(workbook.Sheets[sheetName]))
console.log(XLRowObject)
const jsonObject = JSON.stringify(XLRowObject)
console.log(jsonObject)
})
}
reader.onerror = function (ex) {
console.log(ex)
}
reader.readAsText(this.file)
}
to manage this i had to do change the way i am reading the file.
When i used readAsBinaryString it's working, and pay using the type binary with this.
This function is reading only the first sheet
fileToJson (e) {
const file = e.target.files[0]
/* Boilerplate to set up FileReader */
const reader = new FileReader()
reader.onload = (e) => {
/* Parse data */
const bstr = e.target.result
const wb = XLSX.read(bstr, { type: 'binary' })
/* Get first worksheet */
const wsname = wb.SheetNames[0]
const ws = wb.Sheets[wsname]
/* Convert array of arrays */
const data = XLSX.utils.sheet_to_json(ws, { header: 1 })
/* Update state */
this.data = data
const header = data.shift()
}
reader.readAsBinaryString(file)
}
This code worked for me in a Vue CLI App:
// Important that import statement must get the full.min.js file only.
import XLSX from '../../../node_modules/xlsx/dist/xlsx.full.min.js'
var reader = new FileReader()
reader.onload = function (e) {
var data = e.target.result
var workbook = XLSX.read(data, { type: 'binary' })
let sheetName = workbook.SheetNames[0]
let worksheet = workbook.Sheets[sheetName]
let rowObject = XLSX.utils.sheet_to_row_object_array(worksheet)
const finalJsonData = JSON.stringify(rowObject, undefined, 4)
console.log(finalJsonData)
}
reader.readAsBinaryString(this.excelFile)
With my final JSON Output as:
[
{
"email": "test5#test.com",
"password": "password",
"full_name": "Some Name 5",
"mobile": 9897675463
},
{
"email": "test6#test.com",
"password": "password",
"full_name": "Some Name 6",
"mobile": 9897675463
},
...
...
]
And my Excel file as:

How to output TreeModel Js model as JSON

So I'm triyng to update some ids from a categories tree using TreeModelJS.
after editing I would like to dump the tree to a file in JSON format.
but when outputing other keys from TreeModel gets outputed as well.
How could I output edited tree as JSON (model only)?
I managed to replace other keys values with null and so far I got this:
const axios = require('axios')
const TreeModel = require('tree-model')
const fs = require('fs')
const url = 'https://my-api-uri-for-categories'
const dumpPath = `${process.cwd()}/data/test/categories.json`
const getCategories = async () => {
try {
const response = await axios.get(url)
return response.data.categories
} catch (error) {
console.log('Error reading categories', error)
}
}
const dumpJsonTofile = data => {
try {
console.log('Dumping to file')
console.log(data)
fs.writeFileSync(
dumpPath,
JSON.stringify(data, (k, v) => {
if (k === 'parent' || k === 'config' || k === 'children') return null
else return v
}),
'utf8'
) // write it back
} catch (error) {
console.log('Error dumping categories', error)
}
}
const scraping = async category => {
try {
const response = await axios.get(category.url)
const document = response.data
const json = document.match(/{"searchTerm"(.*);/g)[0]
const data = JSON.parse(json.replace(';', ''))
return data
} catch (error) {
console.log(`Error while scraping category: ${category.name}`, error)
}
}
async function run() {
const categories = await getCategories()
const categoriesTree = new TreeModel({
childrenPropertyName: 'items',
})
const root = categoriesTree.parse({ id: 0, origin: {}, items: categories })
root.walk(async node => {
const category = node.model
console.log(`scraping category: ${category.name}...`)
if (!category.url) return console.log(`skipping (root?)...`)
const data = await scraping(category)
category.id = data.categoryId
})
dumpJsonTofile(root)
}
run()
but that still outputs a Node object like this:
{
"config":null,
"model":{},
"children":null
}
I need to output all the tree showing only the model key value for each item
Try JSON.stringify(root.model).

Write to a CSV in Node.js

I am struggling to find a way to write data to a CSV in Node.js.
There are several CSV plugins available however they only 'write' to stdout.
Ideally I want to write on a row-by-row basis using a loop.
You can use fs (https://nodejs.org/api/fs.html#fs_fs_writefile_file_data_options_callback):
var dataToWrite;
var fs = require('fs');
fs.writeFile('form-tracking/formList.csv', dataToWrite, 'utf8', function (err) {
if (err) {
console.log('Some error occured - file either not saved or corrupted file saved.');
} else{
console.log('It\'s saved!');
}
});
The docs for node-csv-parser (npm install csv) specifically state that it can be used with streams (see fromStream, toStream). So it's not hard-coded to use stdout.
Several other CSV parsers also come up when you npm search csv -- you might want to look at them too.
Here is a simple example using csv-stringify to write a dataset that fits in memory to a csv file using fs.writeFile.
import stringify from 'csv-stringify';
import fs from 'fs';
let data = [];
let columns = {
id: 'id',
name: 'Name'
};
for (var i = 0; i < 10; i++) {
data.push([i, 'Name ' + i]);
}
stringify(data, { header: true, columns: columns }, (err, output) => {
if (err) throw err;
fs.writeFile('my.csv', output, (err) => {
if (err) throw err;
console.log('my.csv saved.');
});
});
If you want to use a loop as you say you can do something like this with Node fs:
let fs = require("fs")
let writeStream = fs.createWriteStream('/path/filename.csv')
someArrayOfObjects.forEach((someObject, index) => {
let newLine = []
newLine.push(someObject.stringPropertyOne)
newLine.push(someObject.stringPropertyTwo)
....
writeStream.write(newLine.join(',')+ '\n', () => {
// a line was written to stream
})
})
writeStream.end()
writeStream.on('finish', () => {
console.log('finish write stream, moving along')
}).on('error', (err) => {
console.log(err)
})
In case you don't wanna use any library besides fs, you can do it manually.
let fileString = ""
let separator = ","
let fileType = "csv"
let file = `fileExample.${fileType}`
Object.keys(jsonObject[0]).forEach(value=>fileString += `${value}${separator}`)
fileString = fileString.slice(0, -1)
fileString += "\n"
jsonObject.forEach(transaction=>{
Object.values(transaction).forEach(value=>fileString += `${value}${separator}`)
fileString = fileString.slice(0, -1)
fileString += "\n"
})
fs.writeFileSync(file, fileString, 'utf8')
For those who prefer fast-csv:
const { writeToPath } = require('#fast-csv/format');
const path = `${__dirname}/people.csv`;
const data = [{ name: 'Stevie', id: 10 }, { name: 'Ray', id: 20 }];
const options = { headers: true, quoteColumns: true };
writeToPath(path, data, options)
.on('error', err => console.error(err))
.on('finish', () => console.log('Done writing.'));
**In case you don't wanna use any library besides fs, you can do it manually. More over you can filter the data as you want to write to CSV file
**
router.get('/apiname', (req, res) => {
const data = arrayOfObject; // you will get from somewhere
/*
// Modify old data (New Key Names)
let modifiedData = data.map(({ oldKey1: newKey1, oldKey2: newKey2, ...rest }) => ({ newKey1, newKey2, ...rest }));
*/
const path = './test'
writeToFile(path, data, (result) => {
// get the result from callback and process
console.log(result) // success or error
});
});
writeToFile = (path, data, callback) => {
fs.writeFile(path, JSON.stringify(data, null, 2), (err) => { // JSON.stringify(data, null, 2) help you to write the data line by line
if (!err) {
callback('success');
// successfull
}
else {
callback('error');
// some error (catch this error)
}
});
}
this is the code that worked for me in nest js
import { Parser } from "json2csv";
const csv = require('csvtojson');
const csvFilePath = process.cwd() + '/' + file.path;
let csv data = await csv().fromFile(csvFilePath); /// read data from csv into an array of json
/// * from here how to write data into csv *
data.push({
label: value,
.......
})
}
const fields = [
'field1','field2', ...
]
const parser = new Parser({ fields, header:false }); /// if dont want header else remove header: false
const csv = parser.parse(data);
appendFileSync('./filename.csv',`${csv}\n`); // remove /n if you dont want new line at the end

Categories