I'm trying to parse a xml file and then change CDATA value of some nodes using xml2js. And then write the new xml document into that file. I've tried everything, but still not getting anywhere. I tried using string "" and "cdata" option in builder class. But it doesn't work. It always writes the text node as undefined i.e. [![CDATA[undefined]]
fs.readFile(xmlFileUrl, function(err, data) {
parser.parseString(data, function(err, result) {
//Change a node value to something .... and then build it
var xml = builder.buildObject(result);
fs.writeFile(xmlFileUrl), xml, function(err) {
if (err) {
console.log(err);
return;
}
});
});
});
Related
I am trying to loop through all the images in my folder convert it into base64 and send to MongoDB.
I started with one image, worked fine.
var filename = '1500.jpg';
var binarydata = fs.readFileSync(filename);
var converted = new Buffer(binarydata).toString("base64");
console.log(converted);
The above code gives me base64 for one file.
I tried changing the code so that it will loop through all the files in my directory and give me base64 for each file.
here is what I wrote but it did not work;
var variantfolder = './variantimages';
fs.readdir(variantfolder, function(err, files){
if (err) {
console.log(err);
}
else {
fs.readFileSync(files, function(err, res){
if (err){console.log('err')} else {
var converted = new Buffer(res).toString("base64");
var onevariant = {
"imagename":files,
"imagebase64":converted
}
var newvariant = new Variant(onevariant)
newvariant.save(err, newvar){
if (err) {
console.log('err');
}
else {
console.log('saved to mongo');
}
}
}
})
}
})
I suspect the problem will be related to you calling functions in the wrong ways.
Check the inputs and outputs of the functions you are using.
The fs.readdir() function callback is passed 2 parameters, an error and an array of file names.
The fs.readFileSync() function takes the parameters path and options. It also returns the file contents, it doesn't take a callback. The callback version is fs.readFile().
So in your code you are passing an array of file names into the file path parameter, which will not work.
You can also pass base64 as the encoding when reading the file and you won't have to convert it after.
I expect you will want something more along these lines (add your own error handling as required):
fs.readdir(variantfolder, (err, fileNames) => {
fileNames.forEach((fileName) => {
fs.readFile(`${variantfolder}/${fileName}`, 'base64', (err, base64Data) => {
// Do your thing with the file data.
});
});
});
Note that you can use the async, sync or promise (fs.promises) version of the fs functions depending on what is most suitable for your code.
I have the following code that is supposed to call an api using the object key value and then open a text file and replace the text that matches the key with the API data. To simplify I've removed the API to just loop through they obj keys and then attempt to replace.
const fs = require('fs')
const storage = require('./db/storage')
const keys = Object.keys(storage.MOVIES);
for (const key of keys) {
fs.readFile('./db/movies_prebuilt.txt', 'utf8', function (err,data) {
if (err) {
return console.log(err);
}
//THIS LINE DOESNT WORK using key. but does work using an actual
string.
var result = data.replace(key, "NOTWORKING");
fs.writeFile('./movies.md', result, 'utf8', function (err) {
if (err) return console.log(err);
});
});
}
The db/storage snippet looks like this:
exports.MOVIES = {
"Matrix_Score": "tt0133093",
"2001_Space_Score": "tt0062622"
...
}
And The replacement text inside looks like this
### Matrix_Score
And I have changed the for loop to data.replace("Matrix_Score","WORKING"); and that works perfectly fine. Is it not possible to use the key as a string? The only reason I want to use it, is so I dont have to write another array or object with all the names, when I have it already.
cheers!
I think this following code may be closer to what I am understanding the intention is. fs.readFile returns all of the file contents, and you likely want to replace all of the keys, not just the last one. Also no need to read the file multiple times:
const fs = require('fs')
const storage = require('./db/storage')
const keys = Object.keys(storage.MOVIES);
fs.readFile('./db/movies_prebuilt.txt', 'utf8', function (err,data) {
if (err) {
return console.log(err);
}
for (const key of keys) {
data = data.toString().replace(key, "NOTWORKING");
}
fs.writeFile('./movies.md', data, 'utf8', function (err) {
if (err) return console.log(err);
});
});
EDIT to add back the original point
fs.readFile typically returns a Buffer which needs to be .toString'd to be manipulated.
Well the title says it all, I'm trying to write a script (that runs in a nodejs/express server-side application) that leverages libraries request, unzip and xml2js to perform a task consisting of fetching a zip file from a given url, whose content is an xml file which I need to parse to a javascript object for some further processing.
So far I've managed to come up with:
var express = require("express");
var app = express();
/* some init code omitted */
var request = require("request");
var unzip = require("unzip");
var xml2js = require("xml2js");
var parser = new xml2js.Parser();
app.get("/import", function(req, res) {
request("http://path.to/file.zip")
.pipe(unzip.Parse())
.on("entry", function(entry) {
//This is what I'm trying to avoid, which doesn't even work
entry.pipe(fs.createWriteStream(entry.path));
fs.readFile(entry.path, function(err, data) {
if(err) {
return res.status(500).send(err);
}
parser.parseString(data, function(err, obj) {
console.log(util.inspect(obj));
/* further processing of obj */
});
});
});
});
Albeit the fact the contents of the xml file are correctly written to disk, I'm looking for an alternative to this approach for two reasons:
to save disk space, since I don't really need to keep the xml file anyway once it has been converted to js
it doesn't even work: fs.readFile probably starts reading the file before fs.createWriteStream is done writing it, because the line console.log(utils.inspect(obj)) logs null (whereas if I run only the innermost fs.readFile block and replace entry.path with the name of the previously written file, it produces the desired output)
I wish I could jot down a jsFiddle for this but I'm clueless as to how, when it comes to expressjs applications. Cheers.
EDITED
Piping is unnecessary, parse data directly from the entry stream:
app.get("/import", function(req, res) {
request("http://link-top.zip")
.pipe(unzip.Parse())
.on("entry", function(entry) {
var chunks = [];
var res;
if(entry.path == 'needed.xml') {
entry.on('data', function(data) {
chunks.push(data.toString());
});
entry.on('end', function () {
res = chunks.join("");
parser.parseString(res, function(err, obj) {
console.log(util.inspect(obj));
/* further processing of obj */
});
});
}
});
});
I have a folder with 260 .png files with different country names: Afghanistan.png, Albania.png, Algeria.png, etc.
I have a .json file with a piece of code with all the ISO codes for each country like this:
{
"AF" : "Afghanistan",
"AL" : "Albania",
"DZ" : "Algeria",
...
}
I would like to rename the .png files with their ISO name in low-case. That means I would like to have the following input in my folder with all the .png images:
af.png, al.png, dz.png, etc.
I was trying to research by myself how to do this with node.js, but I am a little lost here and I would appreciate some clues a lot.
You'll need to use fs for that: http://nodejs.org/api/fs.html
And in particular the fs.rename() function:
var fs = require('fs');
fs.rename('/path/to/Afghanistan.png', '/path/to/AF.png', function(err) {
if ( err ) console.log('ERROR: ' + err);
});
Put that in a loop over your freshly-read JSON object's keys and values, and you've got a batch renaming script.
fs.readFile('/path/to/countries.json', function(error, data) {
if (error) {
console.log(error);
return;
}
var obj = JSON.parse(data);
for(var p in obj) {
fs.rename('/path/to/' + obj[p] + '.png', '/path/to/' + p + '.png', function(err) {
if ( err ) console.log('ERROR: ' + err);
});
}
});
(This assumes here that your .json file is trustworthy and that it's safe to use its keys and values directly in filenames. If that's not the case, be sure to escape those properly!)
For synchronous renaming use fs.renameSync
fs.renameSync('/path/to/Afghanistan.png', '/path/to/AF.png');
fs.readdir(path, callback)
fs.rename(old,new,callback)
Go through http://nodejs.org/api/fs.html
One important thing - you can use sync functions also. (It will work like C program)
For linux/unix OS, you can use the shell syntax
const shell = require('child_process').execSync ;
const currentPath= `/path/to/name.png`;
const newPath= `/path/to/another_name.png`;
shell(`mv ${currentPath} ${newPath}`);
That's it!
Here's an updated version of the script that renames a file of any directory;
i.e => "C:\Users\user\Downloads"
const fs = require('fs');
// current file name
const fileName = 'C:\\Users\\user\\Downloads\\oldFileName.jpg';
// new file name
const newFileName = 'C:\\Users\\user\\Downloads\\newFileName.jpg';
fs.rename(fileName, newFileName, function(err) {
if (err) throw err;
console.log('File Renamed!');
});
This script renames a file with a specific path and file name, in this case, "C:\Users\user\Downloads\oldFileName.jpg" to "C:\Users\user\Downloads\newFileName.jpg" using the "fs" module in Node.js. The "rename" function takes in the current file name, the new file name, and a callback function that will be called after the file has been renamed. If there is an error, it will throw an error. Otherwise, it will print "File Renamed!" to the console.
I am trying to upload and insert large excel files (100K's of rows; 10-100M+) into mongo.
var cv_json = require('convert-json');
cv_json({
// now supporting csv, xls, xlsx, xml format
input: '/home/local/ASSYST-COC/sobharani/SMMC/DART - Sample Data File.xls',
output: null
}, function (err, result) {
if (err) {
console.error(err);
} else {
console.log(result);
}
});
This is working only small amount of data .how to use async.queue.
I tried this approach. But it's taking more time to load 4L data (around 1
hour).But I need to load more than 4L+ data.I want this data to be store in seconds or 5 to 10 min.
var cv_json = require('convert-json');
var async = require('async');
var MongoClient = require('mongodb').MongoClient; //require mongodb
MongoClient.connect('mongodb://127.0.0.1/ee', function (err, db) {
if (err) throw err;
var collection = db.collection('csvdata');
var queue = async.queue(collection.insert.bind(collection), 200);
console.time("queryTime"); //time start
cv_json({
// now supporting csv, xls, xlsx, xml format
input: '/home/local/ASSYST-COC/sobharani/Desktop/JSONto.csv',
output: null
}, function (err, result) {
if (err) {
console.error(err);
} else {
queue.push(result, function (err, res) {
if (err) throw err;
});
}
});
queue.drain = function () {
console.log('all items have been processed');
console.timeEnd("queryTime"); //end time
db.close();
}
console.timeEnd("queryTime"); //end time
});
Every insert is going to generate a synchronous call to the database.
Mongo locks the table for each insert, which slows even more the process.
I'd recommend to batch insert (many documents inserted at once) by providing an array of documents to the collection.insert method.
Cf. http://docs.mongodb.org/manual/reference/method/db.collection.insert/#db.collection.insert
You should use mongo batch insert. Batch insert support no more than 1000 write operations so you will need to use a bulk. This is an example (in coffeescript)
#Mongo cannot do more than 1000 batch insert, bulk will divide it
bulk = yourModel.collection.initializeUnorderedBulkOp()
for data in veryBigArray
bulk.insert data
bulk.execute (err) ->
if err? then console.log(err) else
console.log "INSERTING %s DATA IN MONGO", veryBigArray.length
The first step is to determine whether the time is being consumed by the conversion to json or the load into MongoDB. So try commenting out the step that inserts the result into MongoDB and see whether it still takes a long time just to do the conversion.