Write file to directory then zip directory - javascript

I am trying to write a file to a directory templates then stream a zip with the content that was written. However, the when the zip file is returned it says Failed - Network Error which is due to the fs.writeFile in the controller. If i remove the WriteFile stream then the zipping works fine. My question is how do i first write the file then run the zip. There seems to be something synchronous happening with the archiving and file writing of typeArrayString.
Controller:
exports.download_one_feed = function(req, res, next) {
Feed.findOne({'name': req.params.id})
.exec(function(err, dbfeeds){
if(err){
res.send('get error has occured in routes/feeds.js');
} else {
const feedArray = dbfeeds.feed.data;
// write file
// get from db collection & write file to download
const typeArrayString = JSON.stringify(feedArray);
let type = 'b'; // test only
fs.writeFile(path.join(appDir, 'templates/' + type + '/template.json'), typeArrayString, (err) => {
if (err) throw err;
console.log('Saved!');
})
archiverService.FileArchiver(feedArray, res);
}
})
};
Archive Service
const archiver = require('archiver')
const zip = archiver('zip')
const path = require('path')
const fs = require('fs')
const appDir = path.dirname(require.main.filename)
exports.FileArchiver = function (feedArray, res) {
// const app = this.app;
const uploadsDir = path.join(appDir, '/uploads/');
const templatesDir = path.join(appDir, '/templates/');
const extensions = [".jpg", ".png", ".svg"];
let imageArray = [];
const feedArrayObject = JSON.parse(feedArrayString);
feedArrayObject.forEach(function(x){iterate(x)}); // grab image names from object
imageArray = uniq_fast(imageArray); // remove duplicates
// zip images
for (let i = 0; i < imageArray.length; i++) {
console.log(imageArray[i])
const filePath = path.join(uploadsDir, imageArray[i]);
zip.append(fs.createReadStream(filePath), { name: 'images/'+imageArray[i] });
}
res.attachment('download.zip');
zip.pipe(res);
// zip template directory
console.log(templatesDir)
zip.directory(templatesDir, false);
zip.on('error', (err) => { throw err; });
zip.finalize();
return this;
}

Instead of writing the file then zipping the directory, i used zip.append to override the old file in the directory.

Related

how to send a zip file back end to front end in nodejs using express server

I am building an application where it is required to save data into separate and compress these files into a zip file. these files are at the back end and i could not send this back end created zip file to front end which is react in this case
express js code
app.post("/download",function(request,response,next){
let sheets=request.body["sheets"];
let charts=request.body["charts"];
let compressedSheets=LZString.compress(JSON.stringify(sheets));
fs.writeFile(__dirname+'/public/dataModel.txt', compressedSheets, function (err) {
if (err) throw err;
console.log('Replaced!');
},()=>{
fs.writeFile(__dirname+'/public/Report.json',charts,function(err){
if(err){
console.log(err);
}
},()=>{
var archiver = require('archiver');
var output = fs.createWriteStream(__dirname+'/public/example.zip');
var archive = archiver('zip', {
gzip: true,
zlib: { level: 9 } // Sets the compression level.
});
archive.on('error', function(err) {
throw err;
});
archive.pipe(output);
archive.file(__dirname+'/public/dataModel.txt', {name: 'dataModel.txt'});
archive.file(__dirname+'/public/Report.json', {name: 'Report.json'});
archive.finalize().then(()=>{
response.setHeader('Content-disposition', 'attachment; filename=example.zip');
response.download(__dirname+'/public/example.zip');
});
})
});
react code
handleSaveAs=function(){
let data=new FormData();
data.append('sheets',JSON.stringify(this.state.sheets));
data.append('charts',JSON.stringify(this.state.charts));
axios
.post("http://localhost:4001/download",data)
.then(res=>{
console.log(res);
const element = document.createElement("a");
const file = new Blob([res.data], {type: 'application/zip'});
element.href = URL.createObjectURL(file);
element.download = "untitled.zip";
document.body.appendChild(element);
element.click();
})
provided all the imports are handled properly and the zip file is created properly at the back end. problem is only with sending that to front end
any help would be appreciated
thank you
You can use the in-built fs of Node.js to stream the data to the front-end.
//Filestream middleware that takes in the file path as the parameter
const streamW = (pathToZip) => {
return (req, res, next) => {
//Create a readable stream
const readableStream = fs.createReadStream(pathToZip, 'the_encoding_here');
//Pipe it into the HTTP response
readableStream.pipe(res)
next();
}};
//The route that you want to hit using the front-end to get the file
//Call the middleware and pass in the path to the zip
router.get('/downloadRoute', streamW('path_to_zip'), (req, res) => {
//Send the data to the front end by calling res
res
// });
This you do in node js download route
archive.finalize().then(()=>{
let filetext = fs.readFileSync(__dirname+'/public/example.zip');
return res.end(new Buffer(filetext ).toString('base64'));
});
On frontend do something like this
handleSaveAs=function(){
let data=new FormData();
data.append('sheets',JSON.stringify(this.state.sheets));
data.append('charts',JSON.stringify(this.state.charts));
axios
.post("http://localhost:4001/download",data)
.then(res=>{
console.log(res);
let elem = window.document.createElement('a');
elem.href = "data:application/zip;base64,"+res;
elem.download = "my.zip";
document.body.appendChild(elem);
elem.click();
document.body.removeChild(elem);
})

Nodejs : Get zip file from aws s3 url and manipulate files inside it after extracting

I am trying to fetch a zip file uploaded to aws s3. After that file is fetched, I have to extract it and display the names of files inside the folder. How can I achieve this? I am new to file streaming and this is what I have done till now.
import * as aws from "aws-sdk";
import express from "express";
import fs from "fs";
import request from "request";
import * as unzipper from "unzipper";
const config = {
// credentials
};
const s3Client = new aws.S3(config);
const app = express();
app.use(express.json({
limit: "1mb"
}));
app.use(express.urlencoded({
extended: true
}));
app.post("/seturl", async(req, res) => {
try {
const url = req.body.url;
request(url).pipe(fs.createWriteStream('ez.zip'));
console.log("here");
const zip = fs.createReadStream('ez.zip').pipe(unzipper.Parse({
forceStream: true
}));
for await (const entry of zip) {
const fileName = entry.path;
console.log("///////////", fileName);
const type = entry.type; // 'Directory' or 'File'
const size = entry.vars.uncompressedSize; // There is also compressedSize;
if (fileName === "this IS the file I'm looking for") {
entry.pipe(fs.createWriteStream('output/path'));
} else {
entry.autodrain();
}
}
} catch (error) {
return Promise.reject(`Error in reading ${error}`);
}
});
app.listen(5600, (err) => {
if (err) {
console.error(err);
} else {
console.log("running");
}
});
I am using the unzipper library here. If there is something better, I am open to use it. As of now, I am getting FILE ENDED error.

file-type npm module returns null despite being called with buffer, what am I doing wrong?

I'm developing a website where users can upload video files to be stored in MongoDB. Before the files get uploaded and stored, I would like to check and validate the mimetype of the file. I would like to do that with help of a npm module, I have tried without success with file-type.
Link to file-type npm module: https://www.npmjs.com/package/file-type
I call the module with buffer of uploaded files (tested with mp4-files) but it returns null. Here is my code for the upload route:
'use strict';
const router = require('express').Router();
const VideoInfo = require('../../models/VideoInfo');
const VideoAmount = require('../../models/VideoAmount');
const path = require('path');
const Lib = require('../../lib/Lib');
const multer = require('multer');
const GridFsStorage = require('multer-gridfs-storage');
const fileType = require('file-type');
// Defines storage of files with validation
const storage = new GridFsStorage({
url: process.env.dbURL,
file: (req, file) => {
const data = [];
req.on('data', chunk => {
data.push(chunk);
});
req.on('end', () => {
const buffer = Buffer.concat(data);
const fType = fileType(buffer);
return new Promise((resolve, reject) => {
if (fType === null) {
return reject(new Error('Unsupported file format'));
}
if (fType.mime !== 'video/mp4' ||
fType.mime !== 'video/webm' ||
fType.mime !== 'video/ogg') {
return reject(new Error('Unsupported file format'));
}
if (!req.session.username) {
return reject(new Error('Unauthorized file upload attempt'));
}
// changes the file name before storing
const fileName =
Lib.make.randomString() + path.extname(file.originalname);
const fileInfo = {
filename: fileName,
bucketName: 'uploads'
};
resolve(fileInfo);
});
});
}
});
const upload = multer({ storage });
router.route('/upload')
.get((req, res) => {
// renders upload form, not really relevant
})
.post(upload.single('video'), async (req, res) => {
// file gets saved to DB with upload.single-function
});
module.exports = router;
What am I doing wrong?
The problem was that I wasn't getting the video file buffer.
What solved it was including busboy-body-parser in my app:
const busboyBodyParser = require('busboy-body-parser')
app.use(busboyBodyParser({
limit: '120mb'
}))
Then I could get the buffer from the request:
const fileContent = req.files.video
const buffer = fileContent.data
Then I could get the file type of the file by calling file-type with the buffer.

NodeJS - Send converted file to client side to download

GOAL: Allow the user to download a PDF
Background: The below code generates a car.pdf file and stores it into the main project's directory when localhost:3000/ is loaded. This is great because I want to find a Car by id in the database, generate a handlebars template, pass the data from Car into it, and generate a PDF from the compiled HTML
Issue: Instead of saving the PDF to the main project's directory, I want it to download to the user's computer.
How can I do this?
Here is my code. I am using the NPM package: html-pdf
helpers/export-helper.js
const fs = require('fs');
const pdf = require('html-pdf');
const Handlebars = require('handlebars');
const { Car } = require('./../models/car');
var writePDF = (res) => {
Car.findById({_id: '58857e256b639400110897af'})
.then((car) => {
var source = fs.readFileSync(__dirname + '/templates/car.handlebars', 'utf8');
var template = Handlebars.compile(source);
var file = template(car);
pdf.create(file, { format: 'Letter' })
.toFile('./car.pdf', (err, res) => {
if (err) return console.log(err);
console.log(res); // { filename: '/app/businesscard.pdf' }
});
})
.catch((errors) => {
console.log(errors);
});
};
module.exports = { writePDF };
routes/home.js
const express = require('express');
const router = express.Router();
const { writePDF } = require('./../helpers/export-helpers');
router.get('/', (req, res) => {
writePDF();
});
module.exports = router;
You should use res.download for this. Like so
router.get('/', (req, res) => {
res.download('car.pdf');
});
https://expressjs.com/en/api.html#res.download
You have to pipe the created pdf with response to client side.

nodejs load file

I want to load test.txt with nodejs.
var fs = require('fs');
fs.readFile('./test.txt', function (err, data) {
if (err) {
throw err;
}
console.log(data);
});
The path of the server is C:\server\test\server.js. The test.txt is located in the same directory, but I get this error: Error: ENOENT, no such file or directory 'C:\Users\User\test.txt'
Paths in Node are resolved relatively to the current working directory. Prefix your path with __dirname to resolve the path to the location of your Node script.
var fs = require('fs');
fs.readFile( __dirname + '/test.txt', function (err, data) {
if (err) {
throw err;
}
console.log(data.toString());
});
With Node 0.12, it's possible to do this synchronously now:
var fs = require('fs');
var path = require('path');
// Buffer mydata
var BUFFER = bufferFile('../test.txt');
function bufferFile(relPath) {
return fs.readFileSync(path.join(__dirname, relPath)); // zzzz....
}
fs is the file system. readFileSync() returns a Buffer, or string if you ask.
fs correctly assumes relative paths are a security issue. path is a work-around.
To load as a string, specify the encoding:
return fs.readFileSync(path,{ encoding: 'utf8' });
You should use __dirname to get the directory name the file is located instead of the current working directory:
fs.readFile(__dirname + "/test.txt", ...);
Use path and fs:
const fs = require("fs");
const pth = require("path");
Sync:
let data = fs.readFileSync(pth.join(__dirname,"file.txt"));
console.log(data + "");
A-Sync:
fs.readFile(pth.join(__dirname,"file.txt"), (err, data) => {
console.log(data + "");
});
And that; If you need to read the file continuously and send it to the client and the file size is not large, you may be able to keep a copy of it:
const exp = require("express");
const app = exp();
const fs = require("fs");
const pth = require("path");
let file = "";
app.get("/file", (q, r) => {
if (file === "")
file = fs.readFileSync(pth.join(__dirname,"file.txt")) + "";
r.writeHead(200, { "Content-Type": "text/plain" });
r.write(file);
r.end();
});
so if it is in the same directory just do this
fs.readFile(__dirname+'/foo.txt',function(e,d){console.log(d)})
If it's in same directory it should work. I have tested with the same code, with a file name.txt and it's working fine:
var fs = require('fs');
fs.readFile('./test.txt', function (err, data) {
if (err) {
throw err;
}
console.log(data.toString());
});

Categories