I want to see content of the file that is posted from the client i am using fs module so with below code contents is coming undefined , Any idea what is missing in below code ?
I have file printed in server side to make sure i am gettign the data.
server.js
var data = new multiparty.Form();
var fs = require('fs');
export function create(req, res) {
data.parse(req, function(err,files) {
var file = files.file;
console.log(file);
fs.readFile(file, 'utf8', function(err, contents) {
console.log('content',contents);
});
});
};
I guess the problem might be the signature of the callback you are supplying to data.parse (you are missing the fields argument).
Check it yourself by looking to the examples on multiparty docs
var data = new multiparty.Form();
var fs = require('fs');
export function create(req, res) {
data.parse(req, function(err, fields, files) {
var file = files.file;
console.log(file);
fs.readFile(file, 'utf8', function(err, contents) {
console.log('content',contents);
});
});
};
Related
Im using xml2jws and Meteor. Ive tried both with fs and without like the examples in docs
my js file With fs
var xml2js = require('xml2js');
var fs = require('fs');
var parser = new xml2js.Parser();
export const extractEntities = function (file) {
fs.readFile('./xmlFile.xml', function(err, data) {
parser.parseString(data, function (err, result) {
console.dir(result);
console.log('Done');
});
});
Error
Uncaught TypeError: fs.readFile is not a function
at extractEntities (packages.js:8)
at PackageUpload.componentDidMount (PackageUpload.js:36)
at commitLifeCycles (modules.js?hash=603e8fa158a1fc409b1d0d581586111b31091ac9:24142)
js file Without fs
const parseString = require('xml2js').parseString;
export const extractEntities = () => {
parseString('./xmlFile.xml', function (err, result) {
console.dir(result);
return result;
})
}
In this case, it returns undefined
However, when I pass an xml string in parseStringit works. But in my app, Im uploading it as a file from disk and its passed to extractEntities. It has no arguments in this example because im testing parsing the file first, then will test an uploaded xml file.
This is my directory
Directory
file.js
xmlFile.xml
I am attempting to convert an XSD to JSON using the XSD2JSON2 package. I have tried the below, but the users.json file is not created
var xsd2json = require('xsd2json2').xsd2json;
var fs = require('fs');
xsd2json('users.xsd', (err, jsonSchema) => {
fs.writeFile('users.json', jsonSchema, (err) => {
if (err) throw err;
console.log('JSON schema created')
});
});
I tried the above after getting a 'callback not defined' issue with XSD2JSON2's usage code:
var xsd2json = require('xsd2json2').xsd2json;
var fs = require('fs');
xsd2json('./users.xsd', function(err, jsonSchema) {
fs.writeFile('./users.json', jsonSchema, callback);
});
I am currently learning JS (as you can probably tell) so any assistance would be appreciated.
this is demo.js file and i want to use this file in server.js file so that i can use diffrent js files in one server file.
Demo.js:
app.get('/add User', function (req, res) {
var MongoClient = mongodb.MongoClient;
var url = 'mongodb://localhost:27017/project';
MongoClient.connect(url, function (err, db) {
var collection = db.collection('users');
collection.find({name: 'shruti'}).toArray(function (err, result) {
console.log(, result);
db.close();
});
Server.js:
var a = require('./demo.js');
vr http=require("http");
var server = http.createServer(function(request, response) {
response.writeHead(200, {"Content-Type": "text/html"});
response.write(a);
res.end();});
server.listen(7860);
A possible sample would look like :
demo.js
var myModule = {
defineRoutes: function(router){
//do something...
}
}
module.exports = myModule;
server.js
var myModule = require('demo.js');
myModule.defineRoutes(router);
As stated, you need to export.
When you do:
var item = require("mymodule");
Require returns an object, which is a reference the value of module.exports for that given file - in your case demo.js.
You can write your modules a few ways as some people have shown you. Because it is encapsulated you basically are identifying what is public or can be called. Few ways to write it - you could also do:
module.exports = {
yourCall: function () {
console.log("stuff here");
}
};
As stated by #ishann, who is dead on here, you are writing something you assume might be populated. Going to a database and returning is an asynchronous call - so it will take time to go do that and then for the results to be returned.
Based on your structure - ideally what you want to do is assign the route ( "/addUser" ) which will pass in the response object to you:
app.get('/add User', function (req, res) {
var MongoClient = mongodb.MongoClient;
var url = 'mongodb://localhost:27017/project';
MongoClient.connect(url, function (err, db) {
var collection = db.collection('users');
collection.find({name: 'shruti'}).toArray(function (err, result) {
console.log(, result);
db.close();
// set the type
res.writeHead(200, {"Content-Type": "application/json"});
res.write(result);
});
Just looks like your code needs a bit of reorg, but separting concerns is good. You might want to also check out Express as a framework for node.
I want to save an image with node.js and the request library. So far I have this simple code:
var request = require('request');
var fs = require('fs');
request('http://upload.wikimedia.org/wikipedia/commons/8/8c/JPEG_example_JPG_RIP_025.jpg', function(error, response, body)
{
// further logic that decides
// whether or not the image will be saved
fs.writeFile('downloaded.jpg', body, function(){});
});
But it doesn't work. The image always arrives corrupt. I assume it's an encoding error but I cannot figure out how to fix this.
var request = require('request'),
fs = require('fs'),
url = 'http://upload.wikimedia.org/wikipedia/commons/8/8c/JPEG_example_JPG_RIP_025.jpg';
request(url, {encoding: 'binary'}, function(error, response, body) {
fs.writeFile('downloaded.jpg', body, 'binary', function (err) {});
});
var fs = require('fs'),
request = require('request'),
url='http://upload.wikimedia.org/wikipedia/commons/8/8c/JPEG_example_JPG_RIP_025.jpg';
request(url).pipe(fs.createWriteStream('downloaded.jpg'));
Here's how I did it using stream and pipe, (I was using express but you may not need that)
var express = require('express');
var app = express();
var filesystem = require('fs');
var https = require('https');
var download = function(url, dest, cb) {
var file = filesystem.createWriteStream(dest);
var request = https.get(url, function(httpResponse) {
httpResponse.pipe(file);
file.on('finish', function() {
console.log("piping to file finished")
file.close(cb); // close() is async, call cb after close completes.
});
}).on('error', function(err) { // Handle errors
filesystem.unlink(dest); // Delete the file async. (But we don't check the result)
if (cb) cb(err.message);
});
};
app.get('/image', (req, res) => {
download('https://lastfm-img2.akamaized.net/i/u/64s/15cc734fb0e045e3baac02674d2092d6.png',
'porcupine.png',
() => {console.log("downloaded to porcupine.png")})
})
When I run using node server.js and hit the url localhost:3000/image, it will download and save the file to porcupine.png in the base directory.
I want to load test.txt with nodejs.
var fs = require('fs');
fs.readFile('./test.txt', function (err, data) {
if (err) {
throw err;
}
console.log(data);
});
The path of the server is C:\server\test\server.js. The test.txt is located in the same directory, but I get this error: Error: ENOENT, no such file or directory 'C:\Users\User\test.txt'
Paths in Node are resolved relatively to the current working directory. Prefix your path with __dirname to resolve the path to the location of your Node script.
var fs = require('fs');
fs.readFile( __dirname + '/test.txt', function (err, data) {
if (err) {
throw err;
}
console.log(data.toString());
});
With Node 0.12, it's possible to do this synchronously now:
var fs = require('fs');
var path = require('path');
// Buffer mydata
var BUFFER = bufferFile('../test.txt');
function bufferFile(relPath) {
return fs.readFileSync(path.join(__dirname, relPath)); // zzzz....
}
fs is the file system. readFileSync() returns a Buffer, or string if you ask.
fs correctly assumes relative paths are a security issue. path is a work-around.
To load as a string, specify the encoding:
return fs.readFileSync(path,{ encoding: 'utf8' });
You should use __dirname to get the directory name the file is located instead of the current working directory:
fs.readFile(__dirname + "/test.txt", ...);
Use path and fs:
const fs = require("fs");
const pth = require("path");
Sync:
let data = fs.readFileSync(pth.join(__dirname,"file.txt"));
console.log(data + "");
A-Sync:
fs.readFile(pth.join(__dirname,"file.txt"), (err, data) => {
console.log(data + "");
});
And that; If you need to read the file continuously and send it to the client and the file size is not large, you may be able to keep a copy of it:
const exp = require("express");
const app = exp();
const fs = require("fs");
const pth = require("path");
let file = "";
app.get("/file", (q, r) => {
if (file === "")
file = fs.readFileSync(pth.join(__dirname,"file.txt")) + "";
r.writeHead(200, { "Content-Type": "text/plain" });
r.write(file);
r.end();
});
so if it is in the same directory just do this
fs.readFile(__dirname+'/foo.txt',function(e,d){console.log(d)})
If it's in same directory it should work. I have tested with the same code, with a file name.txt and it's working fine:
var fs = require('fs');
fs.readFile('./test.txt', function (err, data) {
if (err) {
throw err;
}
console.log(data.toString());
});