Cannot read and parse XML file in Meteor - javascript

Im using xml2jws and Meteor. Ive tried both with fs and without like the examples in docs
my js file With fs
var xml2js = require('xml2js');
var fs = require('fs');
var parser = new xml2js.Parser();
export const extractEntities = function (file) {
fs.readFile('./xmlFile.xml', function(err, data) {
parser.parseString(data, function (err, result) {
console.dir(result);
console.log('Done');
});
});
Error
Uncaught TypeError: fs.readFile is not a function
at extractEntities (packages.js:8)
at PackageUpload.componentDidMount (PackageUpload.js:36)
at commitLifeCycles (modules.js?hash=603e8fa158a1fc409b1d0d581586111b31091ac9:24142)
js file Without fs
const parseString = require('xml2js').parseString;
export const extractEntities = () => {
parseString('./xmlFile.xml', function (err, result) {
console.dir(result);
return result;
})
}
In this case, it returns undefined
However, when I pass an xml string in parseStringit works. But in my app, Im uploading it as a file from disk and its passed to extractEntities. It has no arguments in this example because im testing parsing the file first, then will test an uploaded xml file.
This is my directory
Directory
file.js
xmlFile.xml

Related

How do you get a list of files in a directory with javascript in html on server side?

I am trying to get an array of file names from a server side folder in javascript, how do you do this or are you not able to do this at all? I'm looking for something like this:
var file_names = get_files_by_dir(path)
Using node's asynchronous readdir() method (docs)
function getFiles(folder){
const testFolder = folder;
const fs = require('fs');
fs.readdir(testFolder, (err, files) => {
files.forEach(file => {
console.log(file);
});
});
}

Node Winston log file forced string conversion

In a Node project, I want to show the contents of a Winston log file in a React interface. Reading the file:
let content;
fs.readFile("path", "utf-8", function read(err, data) {
if (err)
throw err;
content = data;
});
I send them to the interface:
router.get("/", function (req, res) {
res.status(200).send(JSON.stringify(content));
});
And i get the content in a .jsx file:
getLogs().then(res => {
let datafromfile = JSON.parse(res);
// Use the data
return;
}).catch(err => {
return err.response;
});
The issue i am having is that fs converts all the data into a string (since i am putting the utf-8 encoding and do not want to be returned a buffer) and therefore i cannot manipulate the objects in the log file to show them structurally in the interface. Can anyone guide how to approach this problem?
I have not debugged this, but a lot of this depends on whether or not the the Winston file your loading actually has JSON in it.
If it does then JSONStream is your friend and leaning through or through2 is helpful you in node (streams).
following, code/pseudo
router.get("/", function (req, res) {
const logPath = ‘somePath’; // maybe it comes from the req.query.path
const parsePath = null; // or the token of where you want to attemp to start parsing
fs.createReadStream(logPath)
.pipe(JSONStream.parse(parsePath))
.pipe(res);
});
JSONStream
fs.createReadStream and node docs
through2

Attempting to convert an XSD to JSON

I am attempting to convert an XSD to JSON using the XSD2JSON2 package. I have tried the below, but the users.json file is not created
var xsd2json = require('xsd2json2').xsd2json;
var fs = require('fs');
xsd2json('users.xsd', (err, jsonSchema) => {
fs.writeFile('users.json', jsonSchema, (err) => {
if (err) throw err;
console.log('JSON schema created')
});
});
I tried the above after getting a 'callback not defined' issue with XSD2JSON2's usage code:
var xsd2json = require('xsd2json2').xsd2json;
var fs = require('fs');
xsd2json('./users.xsd', function(err, jsonSchema) {
fs.writeFile('./users.json', jsonSchema, callback);
});
I am currently learning JS (as you can probably tell) so any assistance would be appreciated.

How to check file content using nodejs?

I want to see content of the file that is posted from the client i am using fs module so with below code contents is coming undefined , Any idea what is missing in below code ?
I have file printed in server side to make sure i am gettign the data.
server.js
var data = new multiparty.Form();
var fs = require('fs');
export function create(req, res) {
data.parse(req, function(err,files) {
var file = files.file;
console.log(file);
fs.readFile(file, 'utf8', function(err, contents) {
console.log('content',contents);
});
});
};
I guess the problem might be the signature of the callback you are supplying to data.parse (you are missing the fields argument).
Check it yourself by looking to the examples on multiparty docs
var data = new multiparty.Form();
var fs = require('fs');
export function create(req, res) {
data.parse(req, function(err, fields, files) {
var file = files.file;
console.log(file);
fs.readFile(file, 'utf8', function(err, contents) {
console.log('content',contents);
});
});
};

nodejs load file

I want to load test.txt with nodejs.
var fs = require('fs');
fs.readFile('./test.txt', function (err, data) {
if (err) {
throw err;
}
console.log(data);
});
The path of the server is C:\server\test\server.js. The test.txt is located in the same directory, but I get this error: Error: ENOENT, no such file or directory 'C:\Users\User\test.txt'
Paths in Node are resolved relatively to the current working directory. Prefix your path with __dirname to resolve the path to the location of your Node script.
var fs = require('fs');
fs.readFile( __dirname + '/test.txt', function (err, data) {
if (err) {
throw err;
}
console.log(data.toString());
});
With Node 0.12, it's possible to do this synchronously now:
var fs = require('fs');
var path = require('path');
// Buffer mydata
var BUFFER = bufferFile('../test.txt');
function bufferFile(relPath) {
return fs.readFileSync(path.join(__dirname, relPath)); // zzzz....
}
fs is the file system. readFileSync() returns a Buffer, or string if you ask.
fs correctly assumes relative paths are a security issue. path is a work-around.
To load as a string, specify the encoding:
return fs.readFileSync(path,{ encoding: 'utf8' });
You should use __dirname to get the directory name the file is located instead of the current working directory:
fs.readFile(__dirname + "/test.txt", ...);
Use path and fs:
const fs = require("fs");
const pth = require("path");
Sync:
let data = fs.readFileSync(pth.join(__dirname,"file.txt"));
console.log(data + "");
A-Sync:
fs.readFile(pth.join(__dirname,"file.txt"), (err, data) => {
console.log(data + "");
});
And that; If you need to read the file continuously and send it to the client and the file size is not large, you may be able to keep a copy of it:
const exp = require("express");
const app = exp();
const fs = require("fs");
const pth = require("path");
let file = "";
app.get("/file", (q, r) => {
if (file === "")
file = fs.readFileSync(pth.join(__dirname,"file.txt")) + "";
r.writeHead(200, { "Content-Type": "text/plain" });
r.write(file);
r.end();
});
so if it is in the same directory just do this
fs.readFile(__dirname+'/foo.txt',function(e,d){console.log(d)})
If it's in same directory it should work. I have tested with the same code, with a file name.txt and it's working fine:
var fs = require('fs');
fs.readFile('./test.txt', function (err, data) {
if (err) {
throw err;
}
console.log(data.toString());
});

Categories