Forwarding an uploaded file stream to another server with Node.js - javascript

I'm building an Angular / Node.js application connected to a DMS (Alfresco). The server side Node.js / Express layer acts as a proxy to hide the complexity of Alfresco from the client:
Angular client <--> Node backend <--> Alfresco
This question is only about the Node.js backend.
When uploading a file I would like to forward the incoming file directly to Alfresco without temporarily storing it on the disk. With temporary disk storage this works as expected:
const fileUpload = require('express-fileupload');
const FormData = require('form-data');
// app is the express object
app.use(fileUpload({ createParentPath: true }));
app.post('/file', async (req, res) => {
// filedata is the FormData field of the client containing the actual file
let file = req.files.filedata;
let tmpPath = __dirname + '/tmp/' + file.name;
// save the file temporarily on the server
file.mv(tmpPath, async function(err) {
// create a new FormData Object for the upload to the DMS
const formData = new FormData();
formData.append('name', name);
// creates an fs.ReadStream object which is inherited from stream.Readable
formData.append('filedata', fs.createReadStream(tmpPath));
// upload the file to the DMS
let response = await axios.post('/files/...', formData, { headers: formData.getHeaders() });
// remove the temporary file
fs.rm(tmpPath, () => {
// return the answer of the DMS to the client
res.send(response.data);
});
});
});
Now I would like to avoid the disk access and forward the file directly to the DMS. Taking into consideration Converting a Buffer into a ReadableStream in Node.js I tried the following three alternatives.
const { Readable } = require('stream');
app.post('/file', async (req, res) => {
let file = req.files.fileData;
// create a new FormData Object for the upload to the DMS
const formData = new FormData();
formData.append('name', name);
/* alternatives starting here */
// Source: https://stackoverflow.com/questions/13230487/
// #1
const readable = new Readable();
readable._read = () => {};
readable.push(file.data);
readable.push(null);
// #2
const readable = new Readable();
readable._read = () => {};
const buffer = new Buffer(file.data, 'utf-8');
readable.push(buffer);
readable.push(null);
// #3
const readable = Readable.from(file.data);
/* alternatives ending here */
// put the Readable into the FormData object
formData.append('filedata', readable);
// upload the file to the DMS
let response = await axios.post('/files/...', formData, { headers: formData.getHeaders() });
// return the answer of the DMS to the client
res.send(response.data);
});
Whatever alternative I try, Alfresco always complains, required fields would be missing. Nonetheless, all required fields are provided, since the example with storing the file temporarily works fine. I think, Alfresco cannot handle the stream I provide and that I have a problem to completely understand how streams work in this situation. What should I do differently?
Please note, that all error handling as well as Alfresco request configuration / API URL is omitted for the sake of readability.

try providing file related information such as filename, knownLength etc.
let file = req.files.fileData;
const formData = new FormData();
// buffer with file related info
formData.append(name, file.data, {
filename: file.name,
contentType: file.mimetype,
knownLength: file.size
});
// upload the file to the DMS
let response = await axios.post('/files/...', formData, {headers: formData.getHeaders() });
// return the answer of the DMS to the client
res.send(response.data);

Related

Uploading file to S3 using a custom API and AWS lambda

I am trying to send a file through Postman using the form-data type. The request is sent to AWS Lambda using an API. The request content in the Lambda is corrupted with a lot of question marks in the content.
I would like to convert back to the file from the request content and store the file in S3.
Existing code -
const res = multipart.parse(event, false);
var file = res['File'];
var encodedFile = Buffer.from(file["content"], 'binary');
var encodedFilebs64 = Buffer.from(file["content"], 'binary').toString('base64');
const s3 = new AWS.S3();
const params = {
Bucket: config.s3Bucket,
Key: "asset_" + known_asset_id + '.bin',
Body: encodedFile
};
await s3.upload(params).promise().then(function(data) {
console.log(`File uploaded successfully. ${data.Location}`);
}, function(err) {
console.error("Upload failed", err);
});
Response content from Cloudwatch logs -
https://i.stack.imgur.com/SvBfF.png
When converting this to binary and comparing, the file is not same as the original file.
It would be helpful if someone could help me construct the file from response and store it in S3.

how to send a zip file back end to front end in nodejs using express server

I am building an application where it is required to save data into separate and compress these files into a zip file. these files are at the back end and i could not send this back end created zip file to front end which is react in this case
express js code
app.post("/download",function(request,response,next){
let sheets=request.body["sheets"];
let charts=request.body["charts"];
let compressedSheets=LZString.compress(JSON.stringify(sheets));
fs.writeFile(__dirname+'/public/dataModel.txt', compressedSheets, function (err) {
if (err) throw err;
console.log('Replaced!');
},()=>{
fs.writeFile(__dirname+'/public/Report.json',charts,function(err){
if(err){
console.log(err);
}
},()=>{
var archiver = require('archiver');
var output = fs.createWriteStream(__dirname+'/public/example.zip');
var archive = archiver('zip', {
gzip: true,
zlib: { level: 9 } // Sets the compression level.
});
archive.on('error', function(err) {
throw err;
});
archive.pipe(output);
archive.file(__dirname+'/public/dataModel.txt', {name: 'dataModel.txt'});
archive.file(__dirname+'/public/Report.json', {name: 'Report.json'});
archive.finalize().then(()=>{
response.setHeader('Content-disposition', 'attachment; filename=example.zip');
response.download(__dirname+'/public/example.zip');
});
})
});
react code
handleSaveAs=function(){
let data=new FormData();
data.append('sheets',JSON.stringify(this.state.sheets));
data.append('charts',JSON.stringify(this.state.charts));
axios
.post("http://localhost:4001/download",data)
.then(res=>{
console.log(res);
const element = document.createElement("a");
const file = new Blob([res.data], {type: 'application/zip'});
element.href = URL.createObjectURL(file);
element.download = "untitled.zip";
document.body.appendChild(element);
element.click();
})
provided all the imports are handled properly and the zip file is created properly at the back end. problem is only with sending that to front end
any help would be appreciated
thank you
You can use the in-built fs of Node.js to stream the data to the front-end.
//Filestream middleware that takes in the file path as the parameter
const streamW = (pathToZip) => {
return (req, res, next) => {
//Create a readable stream
const readableStream = fs.createReadStream(pathToZip, 'the_encoding_here');
//Pipe it into the HTTP response
readableStream.pipe(res)
next();
}};
//The route that you want to hit using the front-end to get the file
//Call the middleware and pass in the path to the zip
router.get('/downloadRoute', streamW('path_to_zip'), (req, res) => {
//Send the data to the front end by calling res
res
// });
This you do in node js download route
archive.finalize().then(()=>{
let filetext = fs.readFileSync(__dirname+'/public/example.zip');
return res.end(new Buffer(filetext ).toString('base64'));
});
On frontend do something like this
handleSaveAs=function(){
let data=new FormData();
data.append('sheets',JSON.stringify(this.state.sheets));
data.append('charts',JSON.stringify(this.state.charts));
axios
.post("http://localhost:4001/download",data)
.then(res=>{
console.log(res);
let elem = window.document.createElement('a');
elem.href = "data:application/zip;base64,"+res;
elem.download = "my.zip";
document.body.appendChild(elem);
elem.click();
document.body.removeChild(elem);
})

ExpressJS and PDFKit - generate a PDF in memory and send to client for download

In my api router, there is a function called generatePDF which aims to use PDFKit module to generate a PDF file in memory and send to client for download instead of displaying only.
In api.js:
var express = require('express');
var router = express.Router();
const PDFDocument = require('pdfkit');
router.get('/generatePDF', async function(req, res, next) {
var myDoc = new PDFDocument({bufferPages: true});
myDoc.pipe(res);
myDoc.font('Times-Roman')
.fontSize(12)
.text(`this is a test text`);
myDoc.end();
res.writeHead(200, {
'Content-Type': 'application/pdf',
'Content-disposition': 'attachment;filename=test.pdf',
'Content-Length': 1111
});
res.send( myDoc.toString('base64'));
});
module.exports = router;
This does not work. The error message is (node:11444) UnhandledPromiseRejectionWarning: Error [ERR_HTTP_HEADERS_SENT]: Cannot set headers after they are sent to the client.
How can I go about fixing the issue and getting it work?
Also, a relevant question would be how I can separate the business logic of PDF generation from the router and chain them up?
Complete solution.
var express = require('express');
var router = express.Router();
const PDFDocument = require('pdfkit');
router.get('/generatePDF', async function(req, res, next) {
var myDoc = new PDFDocument({bufferPages: true});
let buffers = [];
myDoc.on('data', buffers.push.bind(buffers));
myDoc.on('end', () => {
let pdfData = Buffer.concat(buffers);
res.writeHead(200, {
'Content-Length': Buffer.byteLength(pdfData),
'Content-Type': 'application/pdf',
'Content-disposition': 'attachment;filename=test.pdf',})
.end(pdfData);
});
myDoc.font('Times-Roman')
.fontSize(12)
.text(`this is a test text`);
myDoc.end();
});
module.exports = router;
First I recommend to create a service for the PDF kit. And then a Controller to the route that you want.
I used get-stream to make this easier.
It also answers your question to the accepted answer:
how I can separate the business logic of PDF generation from the
router and chain them up?
This is my professional solution:
import PDFDocument from 'pdfkit';
import getStream from 'get-stream';
import fs from 'fs';
export default class PdfKitService {
/**
* Generate a PDF of the letter
*
* #returns {Buffer}
*/
async generatePdf() {
try {
const doc = new PDFDocument();
doc.fontSize(25).text('Some text with an embedded font!', 100, 100);
if (process.env.NODE_ENV === 'development') {
doc.pipe(fs.createWriteStream(`${__dirname}/../file.pdf`));
}
doc.end();
const pdfStream = await getStream.buffer(doc);
return pdfStream;
} catch (error) {
return null;
}
}
}
And then the method of the Controller:
(...)
async show(req, res) {
const pdfKitService = new PdfKitService();
const pdfStream = await pdfKitService.generatePdf();
res
.writeHead(200, {
'Content-Length': Buffer.byteLength(pdfStream),
'Content-Type': 'application/pdf',
'Content-disposition': 'attachment;filename=test.pdf',
})
.end(pdfStream);
}
And finally the route:
routes.get('/pdf', FileController.show);
For those how don't want to waste RAM on buffering PDFs and send chunks right away to the client:
const filename = `Receipt_${invoice.number}.pdf`;
const doc = new PDFDocument({ bufferPages: true });
const stream = res.writeHead(200, {
'Content-Type': 'application/pdf',
'Content-disposition': `attachment;filename=${filename}.pdf`,
});
doc.on('data', (chunk) => stream.write(chunk));
doc.on('end', () => stream.end());
doc.font('Times-Roman')
.fontSize(12)
.text(`this is a test text`);
doc.end();
You can use blob stream like this.
reference: https://pdfkit.org/index.html
const PDFDocument = require('pdfkit');
const blobStream = require('blob-stream');
// create a document the same way as above
const doc = new PDFDocument;
// pipe the document to a blob
const stream = doc.pipe(blobStream());
// add your content to the document here, as usual
doc.font('fonts/PalatinoBold.ttf')
.fontSize(25)
.text('Some text with an embedded font!', 100, 100);
// get a blob when you're done
doc.end();
stream.on('finish', function() {
// get a blob you can do whatever you like with
const blob = stream.toBlob('application/pdf');
// or get a blob URL for display in the browser
const url = stream.toBlobURL('application/pdf');
iframe.src = url;
});
pipe all your pdf data to your blob and then write it to a file or url.
or u can store the pdf directly into cloud storage like firebase storage and send download link to client.
If you want to generate pdfs dynamically then you can also try out html-pdf library in node which allows you to create a pdf from html template and add dynamic data in it. Also it is more reliable than pdfkit
https://www.npmjs.com/package/html-pdf
Also refer this link
Generate pdf file using pdfkit and send it to browser in nodejs-expressjs

Correct way to receive json data and write to file using Node.js

I am new to Node and trying to request some json data from a server and save to a file. I can receive the data no problem but can't work out how to write to a file once it has received all the data. Do I need a callback or do I need to use http.CreateServer()? Any help with this would be appreciated.
This is what I have so far:
"use strict";
const request = require('request');
const fs = require('fs');
var options = {
url: 'url-to-request-data',
method: 'GET',
accept: 'application/json',
json: true,}
};
// Start the request
request(options, function(error, response, body) {
if (error) {
return console.log(error);
} else {
var path = "~/jsonfile.json";
fs.writeFile(path, body);
}
});
You have several issues.
fs.writeFile takes a third argument, a callback function, where it will notify you of any error, which you're getting.
fs.writeFile(path, body, err => console.error(err));
On *nix systems with that file path, you will get Error: ENOENT: no such file or directory
~ is a bash expansion, node does not know what to do with it.
use:
const os = require('os');
const path = require('path');
const filePath = path.join(os.homedir(), 'jsonfile.json');
fs.writeFile(path, body, err => console.error(err));
Then you will get [Object object] written to ~/jsonfile.json if that URL returns a json as you're clearly requesting one.
You have to solutions:
Remove json: true
Or fs.writeFile(path, JSON.stringify(body), err => /* ... */)
If you are only writing to a file, the best way to go is using streams
const filePath = path.join(os.homedir(), 'jsonfile.json');
request(options).pipe(fs.createWriteStream(filePath));
// See link for error handling
You can read more about request and streams, and how to handle errors in here
The json data is quite large so is fs.writeFileSync the best way to go? – ozzyzig
No. You should create a Writable Stream for large files, that way you're only buffering chunks of data at a time in memory, rather than the entire file at once. request() returns an object that implements the Stream interface, so you can simply .pipe() it to fs.createWriteStream():
'use strict';
const request = require('request');
const fs = require('fs');
var options = {
url: 'url-to-request-data',
method: 'GET',
accept: 'application/json',
// json: true,
};
var path = '~/jsonfile.json';
var ws = fs.createWriteStream(path);
// Start the request
request(options).on('error', function (error) {
console.log(error);
}).on('close', function () {
console.log('Done');
}).pipe(ws);

How do I upload a base64 encoded image (string) directly to a Google Cloud Storage bucket using Node.js?

Currently, I am using the #google-cloud/storage NPM package to upload a file directly to a Google Cloud Storage bucket. This requires some trickery as I only have the image's base64 encoded string. I have to:
Decode the string
Save it as a file
Send the file path to the below script to upload to Google Cloud Storage
Delete the local file
I'd like to avoid storing the file in the filesystem altogether since I am using Google App Engine and I don't want to overload the filesystem / leave junk files there if the delete operation doesn't work for whatever reason. This is what my upload script looks like right now:
// Convert the base64 string back to an image to upload into the Google Cloud Storage bucket
var base64Img = require('base64-img');
var filePath = base64Img.imgSync(req.body.base64Image, 'user-uploads', 'image-name');
// Instantiate the GCP Storage instance
var gcs = require('#google-cloud/storage')(),
bucket = gcs.bucket('google-cloud-storage-bucket-name');
// Upload the image to the bucket
bucket.upload(__dirname.slice(0, -15) + filePath, {
destination: 'profile-images/576dba00c1346abe12fb502a-original.jpg',
public: true,
validation: 'md5'
}, function(error, file) {
if (error) {
sails.log.error(error);
}
return res.ok('Image uploaded');
});
Is there anyway to directly upload the base64 encoded string of the image instead of having to convert it to a file and then upload using the path?
The solution, I believe, is to use the file.createWriteStream functionality that the bucket.upload function wraps in the Google Cloud Node SDK.
I've got very little experience with streams, so try to bear with me if this doesn't work right off.
First of all, we need take the base64 data and drop it into a stream. For that, we're going to include the stream library, create a buffer from the base64 data, and add the buffer to the end of the stream.
var stream = require('stream');
var bufferStream = new stream.PassThrough();
bufferStream.end(Buffer.from(req.body.base64Image, 'base64'));
More on decoding base64 and creating the stream.
We're then going to pipe the stream into a write stream created by the file.createWriteStream function.
var gcs = require('#google-cloud/storage')({
projectId: 'grape-spaceship-123',
keyFilename: '/path/to/keyfile.json'
});
//Define bucket.
var myBucket = gcs.bucket('my-bucket');
//Define file & file name.
var file = myBucket.file('my-file.jpg');
//Pipe the 'bufferStream' into a 'file.createWriteStream' method.
bufferStream.pipe(file.createWriteStream({
metadata: {
contentType: 'image/jpeg',
metadata: {
custom: 'metadata'
}
},
public: true,
validation: "md5"
}))
.on('error', function(err) {})
.on('finish', function() {
// The file upload is complete.
});
Info on file.createWriteStream, File docs, bucket.upload, and the bucket.upload method code in the Node SDK.
So the way the above code works is to define the bucket you want to put the file in, then define the file and the file name. We don't set upload options here. We then pipe the bufferStream variable we just created into the file.createWriteStream method we discussed before. In these options we define the metadata and other options you want to implement. It was very helpful to look directly at the Node code on Github to figure out how they break down the bucket.upload function, and recommend you do so as well. Finally, we attach a couple events for when the upload finishes and when it errors out.
Posting my version of the answer in response to #krlozadan 's request above:
// Convert the base64 string back to an image to upload into the Google Cloud Storage bucket
var mimeTypes = require('mimetypes');
var image = req.body.profile.image,
mimeType = image.match(/data:([a-zA-Z0-9]+\/[a-zA-Z0-9-.+]+).*,.*/)[1],
fileName = req.profile.id + '-original.' + mimeTypes.detectExtension(mimeType),
base64EncodedImageString = image.replace(/^data:image\/\w+;base64,/, ''),
imageBuffer = new Buffer(base64EncodedImageString, 'base64');
// Instantiate the GCP Storage instance
var gcs = require('#google-cloud/storage')(),
bucket = gcs.bucket('my-bucket');
// Upload the image to the bucket
var file = bucket.file('profile-images/' + fileName);
file.save(imageBuffer, {
metadata: { contentType: mimeType },
public: true,
validation: 'md5'
}, function(error) {
if (error) {
return res.serverError('Unable to upload the image.');
}
return res.ok('Uploaded');
});
This worked just fine for me. Ignore some of the additional logic in the first few lines as they are only relevant to the application I am building.
If you want to save a string as a file in Google Cloud Storage, you can do it easily using the file.save method:
const {Storage} = require('#google-cloud/storage');
const storage = new Storage();
const myBucket = storage.bucket('my-bucket');
const file = myBucket.file('my-file.txt');
const contents = 'This is the contents of the file.';
file.save(contents).then(() => console.log('done'));
:) what an issue !! Have tried it and got the issue Image has uploaded on firebase Storage but not download and just loader is moving around and around... After spending time... Got the success to upload the image on firebase storage with downloading... There was an issue in an access token...
check the screenshot
If you check in the file location section on the right side bottom there is an option "create access token" and not showing any "access token" on there if you create manually access token on there then refresh the page image will showing... So now the question is how to create it by code...
just use below code to create the access token
const uuidv4 = require('uuid/v4');
const uuid = uuidv4();
metadata: { firebaseStorageDownloadTokens: uuid }
Full code is given below for uploading an image to storage image on firebase storage
const functions = require('firebase-functions')
var firebase = require('firebase');
var express = require('express');
var bodyParser = require("body-parser");
const uuidv4 = require('uuid/v4');
const uuid = uuidv4();
const os = require('os')
const path = require('path')
const cors = require('cors')({ origin: true })
const Busboy = require('busboy')
const fs = require('fs')
var admin = require("firebase-admin");
var serviceAccount = {
"type": "service_account",
"project_id": "xxxxxx",
"private_key_id": "xxxxxx",
"private_key": "-----BEGIN PRIVATE KEY-----\jr5x+4AvctKLonBafg\nElTg3Cj7pAEbUfIO9I44zZ8=\n-----END PRIVATE KEY-----\n",
"client_email": "xxxx#xxxx.iam.gserviceaccount.com",
"client_id": "xxxxxxxx",
"auth_uri": "https://accounts.google.com/o/oauth2/auth",
"token_uri": "https://oauth2.googleapis.com/token",
"auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs",
"client_x509_cert_url": "https://www.googleapis.com/robot/v1/metadata/x509/firebase-adminsdk-5rmdm%40xxxxx.iam.gserviceaccount.com"
}
admin.initializeApp({
credential: admin.credential.cert(serviceAccount),
storageBucket: "xxxxx-xxxx" // use your storage bucket name
});
const app = express();
app.use(bodyParser.urlencoded({ extended: false }));
app.use(bodyParser.json());
app.post('/uploadFile', (req, response) => {
response.set('Access-Control-Allow-Origin', '*');
const busboy = new Busboy({ headers: req.headers })
let uploadData = null
busboy.on('file', (fieldname, file, filename, encoding, mimetype) => {
const filepath = path.join(os.tmpdir(), filename)
uploadData = { file: filepath, type: mimetype }
console.log("-------------->>",filepath)
file.pipe(fs.createWriteStream(filepath))
})
busboy.on('finish', () => {
const bucket = admin.storage().bucket();
bucket.upload(uploadData.file, {
uploadType: 'media',
metadata: {
metadata: { firebaseStorageDownloadTokens: uuid,
contentType: uploadData.type,
},
},
})
.catch(err => {
res.status(500).json({
error: err,
})
})
})
busboy.end(req.rawBody)
});
exports.widgets = functions.https.onRequest(app);
You have to convert base64 to image buffer then upload as below, you need to provide image_data_from_html variable as the data you extract from HTML event.
const base64Text = image_data_from_html.split(';base64,').pop();
const imageBuffer = Buffer.from(base64Text, 'base64');
const contentType = data.image_data.split(';base64,')[0].split(':')[1];
const fileName = 'myimage.png';
const imageUrl = 'https://storage.googleapis.com/bucket-url/some_path/' + fileName;
await admin.storage().bucket().file('some_path/' + fileName).save(imageBuffer, {
public: true,
gzip: true,
metadata: {
contentType,
cacheControl: 'public, max-age=31536000',
}
});
console.log(imageUrl);
I was able to get the base64 string over to my Cloud Storage bucket with just one line of code.
var decodedImage = new Buffer(poster64, 'base64');
// Store Poster to storage
let posterFile = await client.file(decodedImage, `poster_${path}.jpeg`, { path: 'submissions/dev/', isBuffer: true, raw: true });
let posterUpload = await client.upload(posterFile, { metadata: { cacheControl: 'max-age=604800' }, public: true, overwrite: true });
let permalink = posterUpload.permalink
Something to be aware of is that if you are inside of a Nodejs environment you wont be able to use atob().
The top answer of this post showed me the errors of my ways!
NodeJS base64 image encoding/decoding not quite working

Categories