I am new to JavaScript and need the ability to create, edit and export an XML document on the server side. I have seen different options on the Internet, but they do not suit me.
It seems that I found one suitable option with processing my XML file into JSON, and then back and then export it through another plugin, but maybe there is some way to make it easier?
Thanks!
I recently came across a similar problem. The solution turned out to be very simple. It is to use XML-Writer
In your project folder, first install it via the console
npm install xml-writer
Next, first import it and create a new file to parse what's going on here:
var XMLWriter = require ('xml-writer');
xw = new XMLWriter;
xw.startDocument ();
xw.startElement ('root');
xw.writeAttribute ('foo', 'value');
xw.text ('Some content');
xw.endDocument ();
console.log (xw.toString ());
You can find more information here and at the bottom of the page see the different code for each item. In this way, you can create, edit and export xml files. Good luck and if something is not clear, write!
Additional
You will need also fs module
const fs = require("fs")
const xmlParser = require("xml2json")
const formatXml = require("xml-formatter")
Completed code:
const fs = require("fs")
const xmlParser = require("xml2json")
const formatXml = require("xml-formatter")
var XMLWriter = require('xml-writer');
xw = new XMLWriter;
xw.startDocument();
xw.startElement('root');
xw.startElement('man');
xw.writeElement('name', 'Sergio');
xw.writeElement('adult', 'no');
xw.endElement();
xw.startElement('item');
xw.writeElement('name', 'phone');
xw.writeElement('price', '305.77');
xw.endElement();
xw.endDocument();
const stringifiedXmlObj = JSON.stringify(xmlObj)
const finalXml = xmlParser.toXml(stringifiedXmlObj)
fs.writeFile("./datax.xml", formatXml(finalXml, { collapseContent: true }), function (err, result) {
if (err) {
console.log("Error")
} else {
console.log("Xml file successfully updated.")
}
})
})
Related
I'm learning programming and found myself in a tough spot; the code from the tutorial is not working and I can't understand why.
It's a shell script that's supposed to retrieve a wikipedia page, strip it of the references, and return just the paragraphs text.
It uses the urllib library. In the code below, the only difference from the tutorial's is the use of fs to make a text file with the page content. The rest is copied and pasted.
#!/usr/local/bin/node
// Returns the paragraphs from a Wikipedia link, stripped of reference numbers.
let urllib = require("urllib");
let url = process.argv[2];
let fs = require("fs");
console.log(url);
const jsdom = require("jsdom");
const { JSDOM } = jsdom;
urllib.request(url, { followRedirect: true }, function(error, data, response) {
let body = data.toString();
// Simulate a Document Object Model.
let { document } = (new JSDOM(body)).window;
// Grab all the paragraphs and references.
let paragraphs = document.querySelectorAll("p");
let references = document.querySelectorAll(".reference");
// Remove any references.
references.forEach(function(reference) {
reference.remove();
});
// Print out all of the paragraphs.
paragraphs.forEach(function(paragraph) {
console.log(paragraph.textContent);
fs.appendFileSync("article.txt", `${paragraph}\n`);
});
});
My first guess, was that urllib was not working for some reason. This cause, even if I installed it as per official documentation, when I type which urllib at the command line, it doesn't return a path.
But then, node doesn't return an error for not knowing what the require("urllib") is when I run the file.
The actual output is the following:
$ ./wikp https://es.wikipedia.org/wiki/JavaScript
https://es.wikipedia.org/wiki/JavaScript
$
Can anybody help please?
I think the tutorial you followed might have been a little out of date.
This works for me:
let urllib = require("urllib");
let url = process.argv[2];
let fs = require("fs");
console.log(url);
const jsdom = require("jsdom");
const { JSDOM } = jsdom;
urllib.request(url, { followRedirect: true }).then(({data, res}) => {
let body = data.toString();
// Simulate a Document Object Model.
let { document } = (new JSDOM(body)).window;
// Grab all the paragraphs and references.
let paragraphs = document.querySelectorAll("p");
let references = document.querySelectorAll(".reference");
// Remove any references.
references.forEach(function(reference) {
reference.remove();
});
// Print out all of the paragraphs.
paragraphs.forEach(function(paragraph) {
console.log(paragraph.textContent);
fs.appendFileSync("article.txt", `${paragraph.textContent}\n`);
});
});
The package you are using (urllib) is using promises, that might have been different in the past, when the tutorial was released.
I'm in another pickle I've realized over the past week that my images are not loading due to the fact the links have expired so I wanna find out how to go about using a file directory in the code.
Here's what I've tried:
});
client.on('message', message => {
if (message.content.startsWith('L!hug')) {
var fs = require('fs');
var files = fs.readdirSync('C:\Users\nevbw\Desktop\games\FBIBot\images\hugs')
/* now files is an Array of the name of the files in the folder and you can pick a random name inside of that array */
let chosenFile = files[Math.floor(Math.random() * files.length)]
}
});
and
});
client.on('message', message => {
if (message.content.startsWith('L!hug')) {
const path = 'C:\Users\nevbw\Desktop\games\FBIBot\images\hugs';
const fs = require('fs');
fs.readdirSync(path).forEach(file => {
ranfile = Math.floor(Math.random() * file.length);
message.channel.sendFile(ranfile);
})
}
});
found out through searching and searching but found a answer the modified it to this, i hope people use this in future reference!
const num = (Math.floor(Math.random()* 5)+1).toString(); message.channel.send({files: [`./slap/slap${num}.gif`]})
Using fs.readdirSync('./images/') instead of fs.readFileSync('./images/') works easier, but then you will have to create the folder inside of VSC and put the images in the folder, you can also drag and drop the images into the solution and use:
var files = fs.readdirSync(`./images/`).filter(file => file.endsWith('.png'))
so that when it looks for an image, it doesn't select anything else. hope it helps for some people.
Happy to help.
You're using FS the wrong way. This Is What It Should Look Like :D Also Here Is Some Documentation on It ( https://nodejs.org/dist/latest-v13.x/docs/api/fs.html ).
-- Code --
Also Just As A Tip! I See You Are Using Full Directories, That's Quite Innificeng (E.g if You Change Your Username, Drive ID, etc.) so in fs provided the image is in the same folder you can just do ./(ImageName), or if it is in the same folder but under another say /FBIBot/Images you can do ./Images/(ImageName). ^^
--
What The Error Was: (I Unfortunately Cannot Test it But I Am Like 99% Sure).
You Were Using fs.readdirSync(path).forEach(file => { When You Were Meant To Be Using fs.readfilesync(path).forEach(file => {.
-- First Code --
});
client.on('message', message => {
if (message.content.startsWith('L!hug')) {
var fs = require('fs');
var files = fs.readfileSync('C:\Users\nevbw\Desktop\games\FBIBot\images\hugs')
/* now files is an Array of the name of the files in the folder and you can pick a random name inside of that array */
let chosenFile = files[Math.floor(Math.random() * files.length)]
}
});
-- Second Code --
});
client.on('message', message => {
if (message.content.startsWith('L!hug')) {
var fs = require('fs');
var files = fs.readFileSync('C:\Users\nevbw\Desktop\games\FBIBot\images\hugs')
/* now files is an Array of the name of the files in the folder and you can pick a random name inside of that array */
let chosenFile = files[Math.floor(Math.random() * files.length)]
}
});
^^
So I wanted to save a file on the client storage using Store.js.
I can change the date using store.set and i can log it to console to see the change, but then it's supposed to be saved in app data where it's not created.
I tried to get the Path where it's being saved and it's :
C:\Users\USER\AppData\Roaming\stoma2/Categories.json
I noticed that there is a "/" so I tried :
C:\Users\USER\AppData\Roaming\stoma2\Categories.json
and :
C:/Users/USER/AppData/Roaming/stoma2/Categories.json
But all 3 of them didn't work.
This is my Store.js :
const fs = require('browserify-fs');
var fs2 = require('filereader'),Fs2 = new fs2();
const electron = window.require('electron');
const path = require('path');
class Store {
constructor(opts) {
// Renderer process has to get `app` module via `remote`, whereas the main process can get it directly
// app.getPath('userData') will return a string of the user's app data directory path.
//const userDataPath = (electron.app || electron.remote.app).getPath('userData');
var userDataPath = (electron.app || electron.remote.app).getPath('userData');
for(var i=0;i<userDataPath.length;i++){
if(userDataPath.charAt(i)=="\\"){
userDataPath = userDataPath.replace("\\","/");
}
}
// We'll use the `configName` property to set the file name and path.join to bring it all together as a string
this.path = path.join(userDataPath, opts.configName + '.json');
this.data = parseDataFile(this.path, opts.defaults);
console.log(this.path);
}
// This will just return the property on the `data` object
get(key) {
return this.data[key];
}
// ...and this will set it
set(key, val) {
this.data[key] = val;
// Wait, I thought using the node.js' synchronous APIs was bad form?
// We're not writing a server so there's not nearly the same IO demand on the process
// Also if we used an async API and our app was quit before the asynchronous write had a chance to complete,
// we might lose that data. Note that in a real app, we would try/catch this.
fs.writeFile(this.path, JSON.stringify(this.data));
}
}
function parseDataFile(filePath, data) {
// We'll try/catch it in case the file doesn't exist yet, which will be the case on the first application run.
// `fs.readFileSync` will return a JSON string which we then parse into a Javascript object
try {
return JSON.parse(Fs2.readAsDataURL(new File(filePath)));
} catch(error) {
// if there was some kind of error, return the passed in defaults instead.
return data;
}
}
// expose the class
export default Store;
There might be a probleme fith js.writeFile() (well that's the source of probleme).
and this is my call :
//creation
const storeDefCat = new Store({
configName: "Categories",
defaults: require("../data/DefaultCategorie.json")
})
//call for the save
storeDefCat.set('Pizza',{id:0,path:storeDefCat.get('Pizza').path});
For now if possible,I might need to find another way to save the file.
And i tried : fs : It doesn't work for me for some reason (I get strange errors that they don't want to be fixed..) .
If anyone has an Idea then please I would be grateful.
So I managed to fix the probleme, Why fs was sending me errors about undefined functions?Why file wasn't getting created ? It has NOTHING to do with the code it self, but the imports...
To clearify, I was using :
const fs = require('fs');
And the solution is to make it like :
const fs = window.require('fs');
Just adding window. fixed all the problems .Since it's my first time using electron I wasn't used to import from the window but it seems it's necessary.And more over...There was no posts saying this is the fix.
I created plugin for send json data in json file.
But I don't understand why send my object json in pipe, and not write file directly in my plugin.
I want use my plugin whit this syntax:
gulp.task('js-hash', function()
{
// Get all js in redis
gulp.src('./build/js/**/*.js')
.pipe(getHashFile('/build/js/'))
.pipe(gulp.dest('./build/js/hash.json'));
});
And not that:
gulp.task('js-hash', function()
{
// Get all js in redis
gulp.src('./build/js/**/*.js')
.pipe(getHashFile('./build/js/hash.json', '/build/js/'));
});
This is my plugin:
var through = require('through2');
var gutil = require('gulp-util');
var crypto = require('crypto');
var fs = require('fs');
var PluginError = gutil.PluginError;
// Consts
const PLUGIN_NAME = 'get-hash-file';
var json = {};
function getHashFile(filename, basename)
{
if (!filename) {
throw PluginError(PLUGIN_NAME, "Missing filename !");
}
// Creating a stream through which each file will pass
var stream = through.obj(function (file, enc, callback) {
if (file.isNull()) {
this.push(file); // Do nothing if no contents
return callback();
}
if (file.isBuffer()) {
var hash = crypto.createHash('sha256').update(String(file.contents)).digest('hex');
json[file.path.replace(file.cwd+basename, '')] = hash;
return callback();
}
if (file.isStream()) {
this.emit('error', new PluginError(PLUGIN_NAME, 'Stream not supported!'));
return callback();
}
}).on('finish', function () {
fs.writeFile(filename, JSON.stringify(json), function(err) {
if (err) {
throw err;
}
});
});
// returning the file stream
return stream;
}
// Exporting the plugin main function
module.exports = getHashFile;
Your are idea
Nothing prevents you from doing this... besides not respecting plugins guidelines!
Users actually assume a plugin will stream files and that they can pipe them to other plugins.
If I get your code right, you're trying to generate a file that contains all sha hashes of inbound files. Why not let users take this file and pipe it to other plugins? You'd be surprised what people could do.
While this question looks a bit opinion-based, you could definitely put the focus on how to deal with files that may not belong to the main stream of files. Issues like this can be found in many plugins; for example, gulp-uglify authors are wondering how they can add source-maps without mixing js and source map downstream.
For Node.js, what is the best way to prepend to a file in a way SIMILAR to
fs.appendFile(path.join(__dirname, 'app.log'), 'appendme', 'utf8')
Personally, the best way really revolves around a asynchronous solution to create a log where I can basically push onto the file from the top.
This solution isn't mine and I don't know where it's from but it works.
const data = fs.readFileSync('message.txt')
const fd = fs.openSync('message.txt', 'w+')
const insert = Buffer.from("text to prepend \n")
fs.writeSync(fd, insert, 0, insert.length, 0)
fs.writeSync(fd, data, 0, data.length, insert.length)
fs.close(fd, (err) => {
if (err) throw err;
});
It is impossible to add to a beginning of a file. See this question for the similar problem in C or this question for the similar problem in C#.
I suggest you do your logging in the conventional way (that is, log to the end of file).
Otherwise, there is no way around reading the file, adding the text to the start and writing it back to the file which can get really costly really fast.
It seems it is indeed possible with https://www.npmjs.com/package/prepend-file
Here is an example of how to prepend text to a file using gulp and a custom built function.
var through = require('through2');
gulp.src('somefile.js')
.pipe(insert('text to prepend with'))
.pipe(gulp.dest('Destination/Path/'))
function insert(text) {
function prefixStream(prefixText) {
var stream = through();
stream.write(prefixText);
return stream;
}
let prefixText = new Buffer(text + "\n\n"); // allocate ahead of time
// creating a stream through which each file will pass
var stream = through.obj(function (file, enc, cb) {
//console.log(file.contents.toString());
if (file.isBuffer()) {
file.contents = new Buffer(prefixText.toString() + file.contents.toString());
}
if (file.isStream()) {
throw new Error('stream files are not supported for insertion, they must be buffered');
}
// make sure the file goes through the next gulp plugin
this.push(file);
// tell the stream engine that we are done with this file
cb();
});
// returning the file stream
return stream;
}
Sources: [cole_gentry_github_dealingWithStreams][1]
Its possible by using the prepend-file node module. Do the following:
npm i prepend-file -S
import prepend-file module in your respective code.
Example:
let firstFile = 'first.txt';
let secondFile = 'second.txt';
prependFile(firstFile, secondFile, () => {
console.log('file prepend successfully');
})