I have the following code which works well, but I'd like this to save and write to a CSV file in the folder I'm in. I'm running the JS in node. Big thanks!
var jsonexport = require('jsonexport');
json = [ { uniq_id: [ 'test' ],
product_url: [ 'http://www.here.com' ],
manufacturer: [ 'Disney' ]}]
jsonexport(json,function(err, csv){
if(err) return console.log(err);
console.log(csv);
});
Note: jsonexport is a JSON-to-CSV converter.
UPDATE you can Use something like this.
jsonexport(json,function(err, csv){
fs.writeFile("/tmp/test.csv", csv, function(err) {
if(err) {}
});
});
Related
I am using Hashlips code for generating NFTs (10 000 NFTs), however, I have added data for Solana, which I've decided not go with and I would like to remove it. The code of block is in every single NFT, it would take a lot of time to delete manually.
The files are in json folder and the same code is:
"properties": {
"files": [
{
"uri": "image.png",
"type": "image/png"
}
],
"category": "image",
"creators": [
{
"address": "xxxxxxxxxxxxxxxxxxxxxx",
"share": 100
}
]
},
You can use Node.js to read files or directories of file (documentation of file system).
Then, using JSON.parse() you can create an object with filtered data.
Finally, you can rewrite on each files those new objects.
It can look like this:
import { readdir, readFile, writeFile } from 'fs/promises';
try {
const files = await readdir('./jsons');
for (const file of files) {
const fileContent = await readfile(`./jsons/${file}`, {encoding: 'UTF-8'};
const json = JSON.parse(fileContent);
// Do magic
writeFile(`./jsons/${file}`, json, {encoding: 'UTF-8'})
}
} catch (err) {
console.error(err);
}
Consider saving your files somewhere else before executing your script.
Iam able to generate a csv file with the data below. I am using a nodejs library "csv-writer" that generates the file quite well. My problem is that I need a way to return back a buffer instead of the file itself. Reason being I need to upload the file to a remote server via sftp.
How do I go ab bout modifying this piece of code to enable buffer response? Thanks.
...
const csvWriter = createCsvWriter({
path: 'AuthHistoryReport.csv',
header: [
{id: 'NAME', title: 'msg_datetime_date'},
{id: 'AGE', title: 'msg_datetime'}
]
});
var rows = [
{ NAME: "Paul", AGE:21 },
{ NAME: "Charles", AGE:28 },
{ NAME: "Teresa", AGE:27 },
];
csvWriter
.writeRecords(rows)
.then(() => {
console.log('The CSV file was written successfully');
});
...
Read your own file with fs.readFile('AuthHistoryReport.csv', data => ... );. If you don't specify an encoding, then the returned data is a buffer, not a string.
fs.readFile('AuthHistoryReport.csv', 'utf8', data => ... ); Returns a string
fs.readFile('AuthHistoryReport.csv', data => ... ); Returns a buffer
Nodejs file system #fs.readFile
You need to store your created file in a buffer using the native package fs
const fs = require('fs');
const buffer = fs.readFileSync('AuthHistoryReport.csv');
I'm using BabyParse to convert a local CSV file to JSON. Here's the js file I've written that does it:
var Baby = require('babyparse');
var fs = require('fs');
var file = 'test2.csv';
var content = fs.readFileSync(file, { encoding: 'binary' });
parsed = Baby.parse(content, {fastMode: false});
rows = parsed.data;
console.log(rows);
fs.writeFile("blahblahblah.json", rows, function(err) {
if(err) {
return console.log(err);
}
console.log("The file was saved!");
});
The JSON outputted to the console by the console.log(rows) line seems to be correct(ish). However, when I write rows to a file, all the JSON boilerplate disappears. For example, here's what I get when trying to convert the following csv file:
col1,col2,col3
"val1","val2","val3"
"val1","val2","val3"
"val1","val2","val3"
This is what gets printed to console:
[ [ 'col1', 'col2', 'col3' ],
[ 'val1', 'val2', 'val3' ],
[ 'val1', 'val2', 'val3' ],
[ 'val1', 'val2', 'val3' ],
[ '' ] ]
But this is what gets written to the file:
col1,col2,col3,val1,val2,val3,val1,val2,val3,val1,val2,val3,
Does anyone know what's happening here? Why is the JSON-specific syntax being stripped out?
You need to convert your json into a string before you save it.
rows = JSON.stringify(parsed.data);
This should do the trick!
fs.writeFile("blahblahblah.json", JSON.stringify(Baby.parse(content, {fastMode: false}).data), function(err) {
if(err) {
return console.log(err);
}
console.log("The file was saved!");
});
My application needs to write the new users who sign up in to a JSON file, so that later I send this file to users.
What I want is something like:
[
{"username": "A"},
{"username": "B"},
{"username": "C"}
]
When a new user "D" signs up, NodeJS will update the file as:
[
{"username": "A"},
{"username": "B"},
{"username": "C"},
{"username": "D"}
]
However I'm having problems to implement this because although I can 'append' to the file, I cannot write a user name just before the closing ']'.
I tried to do without square brackets and JSON.parse(arayFromFileRead) but it gives me an
'unexpected token {'
error.
Could somebody help with either:
Writing to the file, one line before the last line. That is, the line before the closing squire bracket.
Reading a file as a JSON object without the enclosing square brackets.
Thank you.
In order to write proper JSON (and be able to parse it as such with JSON.parse), you need to have commas between objects inside an array.
[
{"username": "A"},
{"username": "B"},
{"username": "C"}
]
Checkout this example:
var fs = require('fs');
function addUser(user, callback) {
var usersFile = './users.json';
fs.readFile(usersFile, function(err, users) {
if (err) {
return (callback)? callback(err) : console.error(err);
}
users = (users)? JSON.parse(users) : [];
users.push(user);
fs.writeFile(usersFile, JSON.stringify(users), function(err, result){
(callback)? callback(err, result) : console.error(err);
});
});
}
addUser({username: 'D', password: 'blablabla'});
logic:
to have one users.json file where we'll keep all user data serialized by JSON.stringify() function.
to do so You've to read whole file to variable, parse it, push new record to variable, serialize (stringify) and save it back to file
benefits:
there is no benefit! when Your file will be bigger You'll waste more memory and CPU to read it, push, serialize, write back. also Your file will be locked during read/write
SO BETTER TO DO THIS:
1) create users folder
2) make Your code like this:
var fs = require('fs');
var path = require('path');
var md5 = require('md5');
var usersDir = './users';
function addUser(user, callback) {
var userFile = path.join(usersDir, md5(user.username)+'.json');
fs.writeFile(userFile, JSON.stringify(user), function(err, result){
(callback)? callback(err, result) : console.error(err);
});
}
addUser({username: 'D', password: 'blablabla'});
logic: You have users folder where You keep users records "file per user" way
benefits:
when You've 1 file (users.json) You're having issue of parallel accessing same file.
but when You files are separate, so filesystem itself acts as database, where json file is row and content is document.
I've just started learning on MEAN stack and need to generate dynamic forms on the fly.
The requirement is to import document (excel/csv/xml/xls etc..) and generate dynamic forms using it so a user can update their data and again export it into the respective file.
So to accomplish this I'm converting documents to JSON format and storing JSON data into the MongoDB database.
Ex: Consider this xlsx data:
ID Name dob Gender
1 user1 7-Dec-87 m
2 user2 8-Dec-87 f
3 user3 9-Dec-87 f
3 user4 4-Dec-87 m
And I'm converting this using xlsx-to-json module to JSON format and storing it into Mongodb.
app.post('/myapp', function (req, res) {
//console.log("===========" + req.file.path);
converter({
input: req.file.path,
output: "output.json"
}, function (err, result) {
if (err) {
console.error(err);
} else {
console.log(result);
db.collection('test').insert(result, function (err, doc) {
console.log(err);
res.json(doc);
});
}
});
});
Here I'm fetching above data from Mongodb & express.js
app.get('/myapp', function (req, res) {
db.collection('test').find(function (err, docs) {
console.log(docs);
res.json(docs);
});
});
app.get('/birthdaylist/:id', function (req, res) {
var id = req.params.id;
console.log(id);
db.collection('test').findOne({_id: mongojs.ObjectId(id)}, function (err, doc) {
console.log(JSON.stringify(doc));
res.json(doc);
});
});
and here's the JSON output:
[
{ dob: '7-Dec-87', ID: '1', Name: 'user1' },
{ dob: '8-Dec-87', ID: '2', Name: 'user2' },
{ dob: '9-Dec-87', ID: '3', Name: 'user3' },
{ dob: '4-Dec-87', ID: '4', Name: 'user4' }
]
So, I've few queries:
Is this the correct approach I'm doing to generate dynamic form from xlsx/csv..etc ? If yes, then how can I generate dynamic form from above JSON.
While exploring on google I've found mongodb generates form automatically (https://github.com/GothAck/forms-mongoose)
So will it help because there may be chance of huge data on excel files.
Any help would be really appreciated.
Do you actually need to analyze an arbitrary spreadsheet and dynamically extract the schema, or do you know the schema ahead of time? If you know the schema, then the Mongoose form generating example is straightforward. But make sure that is actually a requirement because it is tough.
You are never going to be 100% because spreadsheets are created by users and users do weird things. But you can make something that works most of the time.
You need something that takes a JSON object and extracts the schema and puts that in a Mongoose schema format.
So you want to add an interesting module to Mongoose schema. I searched node-modules.com and this came up: https://github.com/Nijikokun/generate-schema
Form generation is not a trivial task. You may want to consider using a library for this. Here are a few that might be useful to you:
http://schemaform.io/
https://github.com/jdorn/json-editor/
Also, if you need help generating JSON schema from JSON:
http://jsonschema.net/#/
and of course: http://json-schema.org/