I am writing code in node.js where i want to read from a file and then export it to a web api. The problem is that I get an error on the code when i am using let.
The error appears to be in my function "render_html my views.js file:
"use strict";
const fs = require('fs');
const model = require('./model');
exports.car = (request, response, params) => {
if (Object.keys(params).length === 0) {
render_JSON(response, model.cars())
}else{
render_JSON(response, model.cars(parseInt(params.number)))
}
};
function render_html(response, file) {
fs.readFile(file, (err, data) => {
if (err) {
console.error(err)
} else {
response.write(data);
response.end();
}
});
}
function render_JSON(response, object) {
const responseJSON = JSON.stringify(object);
response.write(responseJSON);
response.end()
}
I also have problem in "function setHeaders" in router.js file:
"use strict";
const views = require('./views');
const url = require('url');
const routes = [
{
url: ['/api/cars'],
view: views.car,
methods: ['GET'],
headers: {'Content-Type': 'application/json; charset=UTF-8'} // application/json as per RFC4627
}];
function setHeaders(response, headers = {'Content-Type': 'text/plain'}, code = 200) {
response.writeHeader(code, headers);
}
// Filters trailing slash in the url
// for example allowing /api/cars and /api/cars/ to be treated equally by removing trailing slash in the second case
function filterURL(requestURL) {
if (requestURL.pathname.endsWith('/')) {
requestURL.pathname = requestURL.pathname.replace(/\/$/, '');
}
}
exports.route = (request, response) => {
for (let r of routes) {
const requestURL = url.parse(request.url, true);
// url matched and correct method
//if requestURL.pathname
filterURL(requestURL);
if (r.url.includes(requestURL.pathname) && r.methods.includes(request.method)) {
if (r.headers) {
setHeaders(response, r.headers);
} else {
setHeaders(response)
}
r.view(request, response, requestURL.query);
return;
}// if unsupported HTTP method
else if (r.url.includes(requestURL.pathname) && !r.methods.includes(request.method)) {
setHeaders(response, undefined, 405);
response.end();
return;
}
}
// if route not found respond with 404
setHeaders(response, undefined, 404);
response.end('404 Not Found!')
};
Somebody knows what the problem could be?
thanks.
about your problem in "render_html" function I think the problem is you are missing the encoding of the file, as fs doc says if you dont set a encoding the result will be a buffer. You can easy fix it using:
fs.readFile(file, 'utf8', callback)
(Assuming that you are using utf8 as encoding)
And I think your problem in "router.js" file is you should use "writeHead" instead "writeHeader" you can check it in http doc.
I hope it solves your issue, greetings.
Related
I have an API that periodically makes a request to RESTAPI and stores the data into my database, now I want to add a new feature for certain data: download some data by a request to my API, then, my API makes another request to the RESTAPI, but I dont want this data to store in my database I want it to download as JSON or CSV.
I managed to create the request, I coded the request to the RESTAPI and managed to get the JSON into a variable with all the data, Im stuck there, How do I make this data to get downloaded into a direcory?
Im using javascript nodeJS with bent ,getJSON ,mssql ,https.
the code of the function:
async function descargarDatosOpendata(anioIni, anioFin, Indicativo){
try{
while(anioIni == anioFin || anioIni < anioFin){
console.log("first");
var http = require("https");
var options = {
"method": "GET",
"hostname": "opendata.aemet.es",
"path": "/opendata/api/valores/climatologicos/mensualesanuales/datos/anioini/"+ anioIni +"/aniofin/"+ anioIni +"/estacion/"+ Indicativo +"",
"headers": {
"cache-control": "no-cache",
"api_key": "MYKEY"
}
};
console.log("second");
var req = http.request(options, function (res) {
console.log("tercera");
var chunks = [];
res.on("data", function (chunk) {
chunks.push(chunk);
});
res.on("end", async function () {
console.log("endChunk");
var body = Buffer.concat(chunks);
console.log(body);
var bodyString = body.toString();
bodyJSON = JSON.parse(bodyString);
console.log(bodyJSON);
console.log(bodyJSON.datos);
if (bodyJSON.estado == 200 && bodyJSON.descripcion == "exito") {
let obj = await getJSON(bodyJSON.datos);
console.log(obj)
}
});
});
anioIni++;
req.end();
}
}
catch (err) {
console.log(err);
}
}
obj log is the data in json format: [{data}]
If this code is running in Node, you should use the Node fs module. The appendFile method will create a new file or append to it if it already exists. Learn more about fs
Example code
var fs = require('fs');
fs.appendFile('mynewfile1.txt', 'Hello content!', function (err) {
if (err) throw err;
console.log('Saved!');
});
I have never had this happen before and am not sure why it's happening.
I have a component written to display PDF files in an iframe as part of a larger application. I am retrieving a BLOB stream from the server and attempting to create a URL for it to display in the iframe but it keeps giving me a cross-origin error, which I thought would not be possible since it is creating the URL out of data.
Here is my entire component:
import React, { useState, useEffect } from 'react'
import IFrameComponent from '../Elements/IFrameComponent';
const PDFPages = (props) => {
let [file, setFile] = useState(null)
let [notFound, show404]=useState(false)
useEffect(() => {
let id=props.site?.componentFile;
fetch(`${process.env.REACT_APP_HOST}/documents/GetPDF`,
{
method: 'POST'
, headers: {
'Content-Type': 'application/json'
}
, credentials: 'include'
, body: JSON.stringify({file:id})
})
.then(async response => {
let blob;
try{
blob=await response.blob(); // <--- this functions correctly
}
catch(ex){
let b64=await response.json()
blob=Buffer.from(b64.fileData,'base64')
}
//Create a Blob from the PDF Stream
//Build a URL from the file
const str=`data:application/pdf;base64,${b64.fileData}`
const url=URL.createObjectURL(blob) //<--- ERROR IS THROWN HERE
setFile(url);
})
.catch(error => {
show404(true)
});
}, []);
if(!notFound){
return <IFrameComponent src={file} title=''>
Please enable iFrames in your browser for this page to function correctly
</IFrameComponent>
}
else {
return (
<>
<h3> File {file} could not be found on server</h3>
</>
)
}
}
export default PDFPages;
For completeness here is the GetPDF function from the server which is sending the file.
router.post('/GetPDF', async (req, res, next) => {
const props = req.body;
let fileName = props.file;
try {
fileName = fileName.replace(/%20/g, " ");
let options = {};
if (props.base64) options.encoding = 'base64'
let data = await dataQuery.loadFile(`./data/documentation/${fileName}`, options);
if (!props.base64) {
res.attachment = "filename=" + fileName
res.contentType = 'application/pdf'
res.send(data);
}
else{
res.send({fileData:data, fileName: fileName});
}
}
catch (ex) {
res.send({ error: true })
}
});
I have done very little work in node sending files but am positive my client code is good. Where am I going wrong here?
The problem was that I was trying to be too fancy sending a BLOB or Base64 data. After investigation I rewrote
router.post('/GetPDF', async (req, res, next) => {
const props = req.body;
let fileName = props.file;
try {
fileName = fileName.replace(/%20/g, " ");
let options = {};
if (props.base64) options.encoding = 'base64'
let data = await dataQuery.loadFile(`./data/documentation/${fileName}`, options);
if (!props.base64) {
res.attachment = "filename=" + fileName
res.contentType = 'application/pdf'
res.send(data);
}
else{
res.send({fileData:data, fileName: fileName});
}
}
catch (ex) {
res.send({ error: true })
}
});
on the server to
router.get('/GetPDF/:fileName', async (req, res, next) => {
let fileName = req.params.fileName
fileName = `./data/documentation/${fileName.replace(/%20/g, " ")}`;
try {
let data = await dataQuery.loadFile(fileName);
res.contentType("application/pdf");
res.send(data);
}
catch (ex) {
res.send({ error: true })
}
});
Then calling it from the client using
const url = `${process.env.REACT_APP_HOST}/documents/GetPDF/${props.site.componentFile}`
as the iFrame src sends the PDF properly as expected.
This same method also solved another problem with HTML pages sent from the server not functioning correctly.
I'm trying to download a json file from an external url using nodejs.
The problem is that this file (dumpFile.json) is created empty.
var file = fs.createWriteStream("download/dumpFile.json");
let URL = 'http://user:pass#domain.com/file.json');
var request = http.get(URL, function (resp) {
resp.on("finish", function () {
logger.error(fs.readFileSync("file", { encoding: "utf8" }))
}).pipe(file);
});
}).catch(error => {
logger.error(error)
})
I tried a lot of things, but I can't figured it out what is happening.
const fs = require('fs')
const http = require('http')
const url = 'http://user:pass#domain.com/file.json'
const fileName = 'download/dumpFile.json'
http.get(url, function (res) {
res.pipe(fs.createWriteStream(fileName))
})
I think you are calling to a https url using http try this working code.
var http = require('https');
var fs = require("fs");
var file = fs.createWriteStream("dumpFile.json");
let URL = 'https://raw.githubusercontent.com/ljharb/json-file-plus/master/package.json';
try {
var request = http.get(URL, function (resp) {
resp.on("finish", function () {
logger.error(fs.readFileSync("file", {
encoding: "utf8"
}))
}).pipe(file);
});
} catch (e) {
console.log('error ', e);
}
sorry your code seems to be incomplete, I was updated it to check working.
I have this code that creates a project and uploads the image to Amazon S3.
I am using Bodyparser middleware to handle the data coming from the client but after some research I found out that it doesn't handle formdata. I need to use multer or some library like that.
Here is my attempt to use multer:
In my routes folder that handles the Post request to the Create function, I added it like this:
import multer from 'multer';
const upload = multer();
routes.post(
'/',
upload.any('projectImage'),
ProjectController.create,
);
Here is the Create function:
export async function create(req, res, next) {
const body = req.body;
S3Upload(req, res, async (s3err, s3res) => {
if (s3err) {
res.send('Error occured uploading image')
} else if (s3res && s3res.Location) {
body.projectImage = s3res.Location
try {
return res
.status(HTTPStatus.CREATED)
.json(await Project.createProject(body, req.user._id));
} catch (err) {
err.status = HTTPStatus.BAD_REQUEST;
return next(err);
}
} else {
res.send('Error creating project.')
}
});
}
Now when I send a post request to the /project routes. I get this error:
(node:77236) UnhandledPromiseRejectionWarning: TypeError: (0 , _s2.default) is not a function
(node:77236) UnhandledPromiseRejectionWarning: Unhandled promise rejection. This error originated either by throwing inside of an async function without a catch block, or by rejecting a promise which was not handled with .catch(). (rejection id: 1)
PS. The Create function works perfectly without adding the S3Upload. & the S3Upload also works perfectly if I am only sending a file. The only time when it doesnt work is when I send mix data. i.e. a form with some text fields and a file.
I didn't post the S3Upload function but if anyone is curious, let me know I will share the code for it.
UPDATE
export async function S3Upload(req, res, callback) {
const chunks = [];
let fileType;
let fileEncodingType;
const busboy = new Busboy({
headers: req.headers,
});
busboy.on('file', (fieldname, file, filename, encoding, mimetype) => {
filename.replace(/ /g, "_");
fileType = mimetype;
fileEncodingType = encoding;
file.on('data', data => {
chunks.push(data)
});
file.on('end', () => {
console.log(`File [${filename}] Finished`);
});
});
busboy.on('finish', () => {
const userId = UUID();
const params = {
Bucket: BUCKET_NAME,
Key: userId,
Body: Buffer.concat(chunks),
ACL: ACL,
ContentEncoding: fileEncodingType,
ContentType: fileType
}
S3.upload(params, (err, s3res) => {
if (err) {
callback(err);
} else {
callback(null, s3res);
}
});
});
req.pipe(busboy);
}
Is there a standard way to require a Node module located at some URL (not on the local filesystem)?
Something like:
require('http://example.com/nodejsmodules/myModule.js');
Currently, I am simply fetching the file into a temporary file, and requiring that.
You can fetch module using http.get method and execute it in the sandbox using vm module methods runInThisContext and runInNewContext.
Example
var http = require('http')
, vm = require('vm')
, concat = require('concat-stream'); // this is just a helper to receive the
// http payload in a single callback
// see https://www.npmjs.com/package/concat-stream
http.get({
host: 'example.com',
port: 80,
path: '/hello.js'
},
function(res) {
res.setEncoding('utf8');
res.pipe(concat({ encoding: 'string' }, function(remoteSrc) {
vm.runInThisContext(remoteSrc, 'remote_modules/hello.js');
}));
});
IMO, execution of the remote code inside server application runtime may be reasonable in the case without alternatives. And only if you trust to the remote service and the network between.
Install the module first :
npm install require-from-url
And then put in your file :
var requireFromUrl = require('require-from-url/sync');
requireFromUrl("http://example.com/nodejsmodules/myModule.js");
0 dependency version (node 6+ required, you can simply change it back to ES5)
const http = require('http'), vm = require('vm');
['http://example.com/nodejsmodules/myModule.js'].forEach(url => {
http.get(url, res => {
if (res.statusCode === 200 && /\/javascript/.test(res.headers['content-type'])) {
let rawData = '';
res.setEncoding('utf8');
res.on('data', chunk => { rawData += chunk; });
res.on('end', () => { vm.runInThisContext(rawData, url); });
}
});
});
It is still the asynchronous version, if sync load is the case, a sync http request module for example should be required
If you want something more like require, you can do this:
var http = require('http')
, vm = require('vm')
, concat = require('concat-stream')
, async = require('async');
function http_require(url, callback) {
http.get(url, function(res) {
// console.log('fetching: ' + url)
res.setEncoding('utf8');
res.pipe(concat({encoding: 'string'}, function(data) {
callback(null, vm.runInThisContext(data));
}));
})
}
urls = [
'http://example.com/nodejsmodules/myModule1.js',
'http://example.com/nodejsmodules/myModule2.js',
'http://example.com/nodejsmodules/myModule3.js',
]
async.map(urls, http_require, function(err, results) {
// `results` is an array of values returned by `runInThisContext`
// the rest of your program logic
});
You could overwrite the default require handler for .js files:
require.extensions['.js'] = function (module, filename) {
// ...
}
You might want to checkout better-require as it does pretty much this for many file formats. (I wrote it)
const localeSrc = 'https://www.trip.com/m/i18n/100012631/zh-HK.js';
const http = require('http');
const vm = require('vm');
const concat = require('concat-stream');
http.get(
localeSrc,
res => {
res.setEncoding('utf8');
res.pipe(
concat({ encoding: 'string' }, remoteSrc => {
let context = {};
const script = new vm.Script(remoteSrc);
script.runInNewContext(context);
console.log(context);
}),
);
},
err => {
console.log('err', err);
},
);