Local file download is corrupted - javascript

I am trying to download a file to my windows pc using nodejs
I tried the following code. the problem is the file which i download from nodejs is 185kb and the actual original file size is 113kb(found by directly downloading from browser)
request = require('request');
function download(url, dest, cb){
request.head(url, function(err, res, body){
request(url).pipe(fs.createWriteStream(dest)).on('close', function(){
cb();
});
});
};
I also tried downloading the file using a different code
function download(url, dest, cb) {
var file = fs.createWriteStream(dest);
var request = https.get(url, function (response) {
response.pipe(file);
file.on('finish', function () {
file.close(cb);
file.end();
});
});
}
But the same bug happened
The problem is i am trying to open that file in photoshop , but it fails , saying the file is corrupted, please help

This code (using the built-in https module) should work correctly. The stream will close automatically, there's no need to close it, the autoClose parameter defaults to true when creating a write file stream.
See docs at: fs.createWriteStream.
If the file is still too large it is likely that you are not using the direct image link, try selecting "View image" / "Open image in new tab" etc. in your browser and using that link instead.
const https = require("https");
const fs = require("fs");
const url = "https://upload.wikimedia.org/wikipedia/commons/thumb/e/e1/FullMoon2010.jpg/1024px-FullMoon2010.jpg";
const fileStream = fs.createWriteStream("test.jpg");
https.get(url, response => {
response.pipe(fileStream);
});
You can also use the request library:
const request = require("request");
const fs = require("fs");
const url = "https://upload.wikimedia.org/wikipedia/commons/thumb/e/e1/FullMoon2010.jpg/1024px-FullMoon2010.jpg";
const fileStream = fs.createWriteStream("request-test.jpg");
const req = request(url);
req.on("response", response => {
response.pipe(fileStream);
});

Related

Problem with read file pdf from URL nodeJS

I'm using pdf.js-extract to read data pdf file from URL. This is my code and it runs well:
const fs = require('fs');
const PDFExtract = require('pdf.js-extract').PDFExtract;
const pdfExtract = new PDFExtract();
const https = require('https');
const url = 'https://www.w3.org/WAI/ER/tests/xhtml/testfiles/resources/pdf/dummy.pdf'
const readData = async (url) => {
https.get(url, async function (response) {
const file = fs.createWriteStream('./dummy.pdf');
response.pipe(file);
file.on("finish", () => {
pdfExtract.extract('./dummy.pdf', {})
.then(async function(data) {
console.log(data)
});
file.close();
fs.unlinkSync('./dummy.pdf')
});
});
}
readData(url)
But I have 2 problem about this code.
The first is that how can I deal with if the url changes from https
to http, I try node-fetch but it's seem not work as I expected.
The second one is that the code take too long to handle if the pdf
file is large, about 5 seconds for 2 Mbs. I wonder if there is a
faster way like reading the file from the buffer or something like
that without having to save it as a temporary file
Thanks for your attention

How to make this code run in modern nodejs?

I am trying to implement a simple web interface where I can see do tail -f of my log file.
Found the below link
https://thoughtbot.com/blog/real-time-online-activity-monitor-example-with-node-js-and-websocket
It seems to be very old and not compatible with modern-day node.j.
I tried to go through node.j` documentation but was unable to fix this.
Child process creation is causing some issues.
var filename = process.argv[2];
if (!filename)
return sys.puts("Usage: node watcher.js filename");
var tail = process.createChildProcess("tail", ["-f", filename]);
console.log("start tailing");
tail.addListener("output", function (data) {
console(data);
});
var http = require("http");
http.createServer(function(req,res){
res.sendHeader(200,{"Content-Type": "text/plain"});
tail.addListener("output", function (data) {
res.sendBody(data);
});
}).listen(8000);
I want to send this tailf'd log to another server, which will be running nodejs app to read this.
Can someone help me out?
I've searched through the whole nodejs documentation, but I haven't found any version where there were process.createChildProcess().
However, you can replace it using the child-process built-in module:
const filename = process.argv[2];
if (!filename){
console.log("Usage: node watcher.js filename");
process.exit(0)
}
const child = require('child-process')
const tail = child.exec("tail", ["-f", filename],{shell:true});
console.log("start tailing");
tail.stdout.on("data", function (data) {
console.log(data);
});
const http = require("http");
http.createServer(function(req,res){
res.writeHead(200,{"Content-Type": "text/plain"});
tail.stdout.on("data", function (data) {
res.write(data);
});
}).listen(8000);

Ipfs-mini cat APi's output buffer seems like corrupted for the hash pointing the image file

I am a newbie to both Javascript and ipfs and I am trying an experiment to fetch an image buffer from the ipfs hash "QmdD8FL7N3kFnWDcPSVeD9zcq6zCJSUD9rRSdFp9tyxg1n" using ipfs-mini node module.
Below is my code
const IPFS = require('ipfs-mini');
const FileReader = require('filereader');
var multer = require('multer');
const ipfs = initialize();
app.post('/upload',function(req,res){
upload(req,res, function(err){
console.log(req.file.originalname);
ipfs.cat('QmdD8FL7N3kFnWDcPSVeD9zcq6zCJSUD9rRSdFp9tyxg1n', function(err, data){
if(err) console.log("could not get the image from the ipfs for hash " + ghash);
else {
var wrt = data.toString('base64');
console.log('size ; ' + wrt.length);
fs.writeFile('tryipfsimage.gif',wrt, (err) =>{
if(err)console.log('can not write file');
else {
//console.log(data);
ipfs.stat('QmdD8FL7N3kFnWDcPSVeD9zcq6zCJSUD9rRSdFp9tyxg1n', (err, data)=>{
// console.log(hexdump(wrt));
});
console.log("files written successfully");
}
});
}
});
});
});
function initialize() {
console.log('Initializing the ipfs object');
return new IPFS({
host: 'ipfs.infura.io',
protocol: 'https'
});
}
I could view the image properly in the browser using the link below "https://ipfs.io/ipfs/QmdD8FL7N3kFnWDcPSVeD9zcq6zCJSUD9rRSdFp9tyxg1n", but if I open the file 'tryipfsimage.gif' in which I dump the return buffer of the cat API in above program, the content of the image seems corrupted. I am not sure what the mistake I am doing in the code. it would be great If someone points me the mistake.
From ipfs docs https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/FILES.md#javascript---ipfsfilescatipfspath-callback
file in the callback is actually a Buffer so by toString('base64')'ing it you are writing actual base64 into the .gif file - no need to do this. you can pass the Buffer directly to the fs.writeFile api with
fs.writeFile('tryipsimage.gif', file, ...
For larger files I would recommend using the ipfs catReadableStream, where you can do something more like:
const stream = ipfs.catReadableStream('QmdD8FL7N3kFnWDcPSVeD9zcq6zCJSUD9rRSdFp9tyxg1n')
// don't forget to add error handlers to stream and whatnot
const fileStream = fs.createWriteStream('tryipsimage.gif')
stream.pipe(fileStream);

Download a PDF automatically once its generated (Node, Express)

I am using following node module html-pdf
to convert html to pdf. I have successfully converted html to pdf but I am having trouble downloading the file once it has been created.
Code to generate PDF:
var fs = require('fs');
var pdf = require('html-pdf');
var html = fs.readFileSync('./test/businesscard.html', 'utf8');
var options = { format: 'Letter' };
pdf.create(html, options).toFile('./businesscard.pdf', function(err, res) {
if (err) return console.log(err);
console.log(res); // { filename: '/app/businesscard.pdf' }
});
How can I either open the PDF within the browser for the user to see or automatically download the pdf within the browser without the user having to go through another step.
I just made some changes to your code.In this code I create a route. Whenever you made a request with this route it converts HTML file to PDF and creating a pdf file into your directory from where you are executing. And at the sametime it displays the html file in browser with downloading option also. Hope this helps for you. And here is my code.
var express=require('express');
var fs = require('fs');
var pdf = require('html-pdf');
var html = fs.readFileSync('C:/Users/nodejs/tasks/file.html', 'utf8');
var options = { format: 'Letter' };
var app=express();
app.get('/file',function(request,response)
{
pdf.create(html, options).toFile('./businesscaw.pdf', function(err, res) {
if (err) return console.log(err);
console.log(res);
var file= 'C:/Users/nodejs/tasks/businesscaw.pdf';
fs.readFile(file,function(err,data){
response.contentType("application/pdf");
response.send(data);
});
});
});
app.listen(3000,function(){
console.log("Server listening on port http://loalhost:3000");
});
Output:
See the output in browser like : localhost:3000/file

How to upload image to S3 using Node

I am writing an Express app that takes in a base64 encoded string that represents an image. Right now, i'm not really sure how I can take that string and upload the image to AWS S3, so i'm reading in the encoded image string data, decoding it, writing a file using fs, and then trying to upload. I have this working for an endpoint that just takes in a raw file, and all of its content is correctly uploaded to AWS s3.
Now when I try to do what I described above, i'm able to upload to S3, but the file has 0kb and is empty, and i'm not sure why. I tested just taking the stringData and writing a file to a test file, and it works. However, when I try uploading to s3, the file shows but it's empty. Here is my code:
router.post('/images/tags/nutritionalInformation/image/base64encoded', function (req, res) {
console.log(req.body.imageString);
var base64Stream = req.body.imageString;
var imgDecodedBuffer = decodeBase64Image(base64Stream);
console.log(imgDecodedBuffer);
// write to image file
var prefix = guid().toString() + ".jpg";
var filePath = './uploads/' + prefix;
console.log(filePath);
fs.writeFile(filePath, imgDecodedBuffer.data, function(err) {
console.log(err);
});
var stream = fs.createReadStream(filePath);
console.log(stream);
return s3fsImpl.writeFile(prefix, stream).then(function () {
fs.unlink(filePath, function (err) {
if (err) {
console.error(err);
}
});
});
})
Here are the relevant import statements:
var fs = require('fs');
var s3fs = require('s3fs');
var multiparty = require('connect-multiparty'),
multipartyMidleware = multiparty();
var s3fsImpl = new s3fs('blahblah', {
accessKeyId: 'ACCESS_KEY_ID',
secretAccessKey: 'SECRET'
});
Any help would be greatly appreciated!
If you simply just pass in the buffer, which I presume is in your imgDecodedBuffer.data value, it should work.

Categories