Keep getting "undefined" when converting readStream to String in GridFS - javascript

I'm working on a website that stores images in a MongoDB GridFS database and then can display one of the chosen images to the user.
The upload part is already working, but I'm having trouble with the download part of the image because whenever I try to convert the readStream from the database to a string, I get undefined in my console.
I think the problem lies around the readStream part, more specifically, the conversion from readStream to String. When I replaced the readStream with writeStream, I was able to get a perfect copy of my image downloaded.
However, I don't need that. What I need is for the image to be sent to the front-end.
I've tried a lot of different ideas like moving the createServer around and the MongoClient.connect. I've also tried different packages that supposedly convert a stream straight into a String.
MongoClient.connect(dbURL, function(err, client){
if(err) throw err;
// setting db name
var db = client.db("formImgs");
// bucket to interact with GridFS
bucket = new mongodb.GridFSBucket(db);
// loop to upload each image
for(var i = 0; i < fileNames.length; i++){
// uploads to the database using file locations
fs.createReadStream("photos/" + fileNames[i]).pipe(bucket.openUploadStream(fileNames[i]))
.on("error", function(e){
// error handling
console.log(e);
}).on("finish", function(){
// success
console.log("Done uploading!");
});
}
const server = http.createServer((req, res) => {
var readStream = fs.createReadStream("photos/LaptopRegistry.png");
bucket.openDownloadStreamByName("LaptopRegistry.png").pipe(readStream);
var bufs = [];
// THIS IS WHERE I THINK MY CODE MESSES UP
readStream.on("data", function(chunk){
bufs.push(chunk);
}).on("end", function(){
var fbuf = Buffer.concat(bufs);
base64 = fbuf.toString("base64");
});
res.statusCode = 200;
res.setHeader("Content-Type", "text/plain");
fs.readFile("index.html", function(err, data) {
res.writeHead(200, {"Content-Type": "text/html"});
res.write(data);
//insert use of base64 string here
});
});
});
});

Related

Ipfs-mini cat APi's output buffer seems like corrupted for the hash pointing the image file

I am a newbie to both Javascript and ipfs and I am trying an experiment to fetch an image buffer from the ipfs hash "QmdD8FL7N3kFnWDcPSVeD9zcq6zCJSUD9rRSdFp9tyxg1n" using ipfs-mini node module.
Below is my code
const IPFS = require('ipfs-mini');
const FileReader = require('filereader');
var multer = require('multer');
const ipfs = initialize();
app.post('/upload',function(req,res){
upload(req,res, function(err){
console.log(req.file.originalname);
ipfs.cat('QmdD8FL7N3kFnWDcPSVeD9zcq6zCJSUD9rRSdFp9tyxg1n', function(err, data){
if(err) console.log("could not get the image from the ipfs for hash " + ghash);
else {
var wrt = data.toString('base64');
console.log('size ; ' + wrt.length);
fs.writeFile('tryipfsimage.gif',wrt, (err) =>{
if(err)console.log('can not write file');
else {
//console.log(data);
ipfs.stat('QmdD8FL7N3kFnWDcPSVeD9zcq6zCJSUD9rRSdFp9tyxg1n', (err, data)=>{
// console.log(hexdump(wrt));
});
console.log("files written successfully");
}
});
}
});
});
});
function initialize() {
console.log('Initializing the ipfs object');
return new IPFS({
host: 'ipfs.infura.io',
protocol: 'https'
});
}
I could view the image properly in the browser using the link below "https://ipfs.io/ipfs/QmdD8FL7N3kFnWDcPSVeD9zcq6zCJSUD9rRSdFp9tyxg1n", but if I open the file 'tryipfsimage.gif' in which I dump the return buffer of the cat API in above program, the content of the image seems corrupted. I am not sure what the mistake I am doing in the code. it would be great If someone points me the mistake.
From ipfs docs https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/FILES.md#javascript---ipfsfilescatipfspath-callback
file in the callback is actually a Buffer so by toString('base64')'ing it you are writing actual base64 into the .gif file - no need to do this. you can pass the Buffer directly to the fs.writeFile api with
fs.writeFile('tryipsimage.gif', file, ...
For larger files I would recommend using the ipfs catReadableStream, where you can do something more like:
const stream = ipfs.catReadableStream('QmdD8FL7N3kFnWDcPSVeD9zcq6zCJSUD9rRSdFp9tyxg1n')
// don't forget to add error handlers to stream and whatnot
const fileStream = fs.createWriteStream('tryipsimage.gif')
stream.pipe(fileStream);

Request ends before readstream events are handled

I'm attempting to make a nodejs function which reads back data from a file with the following code:
app.post('/DownloadData', function(req, res)
{
req.on('data', function(data) {
if (fs.existsSync('demoDataFile.dat')) {
var rstream = fs.createReadStream('demoDataFile.dat');
var bufs = [];
rstream.on('data', function(chunk) {
bufs.push(chunk);
console.log("data");
});
rstream.on('end', function() {
downbuf = Buffer.concat(bufs);
console.log(downbuf.length);
});
}
});
req.on('end', function() {
console.log("end length: " + downbuf.length);
res.end(downbuf);
});
req.on('error', function(err)
{
console.error(err.stack);
});
});
The problem is, the buffer comes back as empty as the req.on('end' ... is called before any of the rstream.on events ("data" and the length aren't printed in the console until after "end length: " has been printed). Am I handling the events wrong or is there some other issue? Any guidance would be appreciated.
Not sure why you're reading from req, because you're not using the body data at all. Also, because the data event can trigger multiple times, the code you're using to read the file may also get called multiple times, which probably isn't what you want.
Here's what I think you want:
app.post("/DownloadData", function(req, res) {
let stream = fs.createReadStream("demoDataFile.dat");
// Handle error regarding to creating/opening the file stream.
stream.on('error', function(err) {
console.error(err.stack);
res.sendStatus(500);
});
// Read the file data into memory.
let bufs = [];
stream.on("data", function(chunk) {
bufs.push(chunk);
console.log("data");
}).on("end", function() {
let downbuf = Buffer.concat(bufs);
console.log(downbuf.length);
...process the buffer...
res.end(downbuf);
});
});
You have to be aware that this will read the file into memory entirely. If it's a big file, it may require a lot of memory.
Since you don't specify which operations you have to perform on the file data, I can't recommend an alternative, but there are various modules available that can help you process file data in a streaming fashion (i.e. without having to read the file into memory entirely).

How to upload image to S3 using Node

I am writing an Express app that takes in a base64 encoded string that represents an image. Right now, i'm not really sure how I can take that string and upload the image to AWS S3, so i'm reading in the encoded image string data, decoding it, writing a file using fs, and then trying to upload. I have this working for an endpoint that just takes in a raw file, and all of its content is correctly uploaded to AWS s3.
Now when I try to do what I described above, i'm able to upload to S3, but the file has 0kb and is empty, and i'm not sure why. I tested just taking the stringData and writing a file to a test file, and it works. However, when I try uploading to s3, the file shows but it's empty. Here is my code:
router.post('/images/tags/nutritionalInformation/image/base64encoded', function (req, res) {
console.log(req.body.imageString);
var base64Stream = req.body.imageString;
var imgDecodedBuffer = decodeBase64Image(base64Stream);
console.log(imgDecodedBuffer);
// write to image file
var prefix = guid().toString() + ".jpg";
var filePath = './uploads/' + prefix;
console.log(filePath);
fs.writeFile(filePath, imgDecodedBuffer.data, function(err) {
console.log(err);
});
var stream = fs.createReadStream(filePath);
console.log(stream);
return s3fsImpl.writeFile(prefix, stream).then(function () {
fs.unlink(filePath, function (err) {
if (err) {
console.error(err);
}
});
});
})
Here are the relevant import statements:
var fs = require('fs');
var s3fs = require('s3fs');
var multiparty = require('connect-multiparty'),
multipartyMidleware = multiparty();
var s3fsImpl = new s3fs('blahblah', {
accessKeyId: 'ACCESS_KEY_ID',
secretAccessKey: 'SECRET'
});
Any help would be greatly appreciated!
If you simply just pass in the buffer, which I presume is in your imgDecodedBuffer.data value, it should work.

How can I get a buffer for a file (image) from CollectionFS

I'm trying to insert an image into a pdf I'm creating server-side with PDFkit. I'm using cfs:dropbox to store my files. Before when I was using cvs:filesystem, it was easy to add the images to my pdf's cause they were right there. Now that they're stored remotely, I'm not sure how to add them, since PDFkit does not support adding images with just the url. It will, however, accept a buffer. How can I get a buffer from my CollectionFS files?
So far I have something like this:
var portrait = Portraits.findOne('vS2yFy4gxXdjTtz5d');
readStream = portrait.createReadStream('portraits');
I tried getting the buffer two ways so far:
First using dataMan, but the last command never comes back:
var dataMan = new DataMan.ReadStream(readStream, portrait.type());
var buffer = Meteor.wrapAsync(Function.prototype.bind(dataMan.getBuffer, dataMan))();
Second buffering the stream manually:
var buffer = new Buffer(0);
readStream.on('readable', function() {
buffer = Buffer.concat([buffer, readStream.read()]);
});
readStream.on('end', function() {
console.log(buffer.toString('base64'));
});
That never seems to come back either. I double-checked my doc to make sure it was there and it has a valid url and the image appears when I put the url in my browser. Am I missing something?
I had to do something similar and since there's no answer to this question, here is how I do it:
// take a cfs file and return a base64 string
var getBase64Data = function(file, callback) {
// callback has the form function (err, res) {}
var readStream = file.createReadStream();
var buffer = [];
readStream.on('data', function(chunk) {
buffer.push(chunk);
});
readStream.on('error', function(err) {
callback(err, null);
});
readStream.on('end', function() {
callback(null, buffer.concat()[0].toString('base64'));
});
};
// wrap it to make it sync
var getBase64DataSync = Meteor.wrapAsync(getBase64Data);
// get a cfs file
var file = Files.findOne();
// get the base64 string
var base64str = getBase64DataSync(file);
// get the buffer from the string
var buffer = new Buffer(base64str, 'base64')
Hope it'll help!

Problems in writing binary data with node.js

I am trying to write the binary body of a request to a file and failing. The file is created on server but I am unable to open it. I am getting 'Fatal error: Not a png' on Ubuntu. Here is how I am making the request:
curl --request POST --data-binary "#abc.png" 192.168.1.38:8080
And here is how I am trying to save it with the file. The first snippet is a middleware for appending all the data together and second one is the request handler:
Middleware:
app.use(function(req, res, next) {
req.rawBody = '';
req.setEncoding('utf-8');
req.on('data', function(chunk) {
req.rawBody += chunk;
});
req.on('end', function() {
next();
});
});
Handler:
exports.save_image = function (req, res) {
fs.writeFile("./1.png", req.rawBody, function(err) {
if(err) {
console.log(err);
} else {
console.log("The file was saved!");
}
});
res.writeHead(200);
res.end('OK\n');
};
Here's some info which might help. In the middleware, if I log the length of rawBody, it looks to be correct. I am really stuck at how to correctly save the file. All I need is a nudge in the right direction.
Here is a complete express app that works. I hit it with curl --data-binary #photo.jpg localhost:9200 and it works fine.
var app = require("express")();
var fs = require("fs");
app.post("/", function (req, res) {
var outStream = fs.createWriteStream("/tmp/upload.jpg");
req.pipe(outStream);
res.send();
});
app.listen(9200);
I would just pipe the request straight to the filesystem. As to your actual problem, my first guess is req.setEncoding('utf-8'); as utf-8 is for text data not binary data.
For fix your code: I'm with #Peter Lyons, that the error is probably the req.setEncoding('utf-8'); line.
I know the following don't ask your question directly, but proposes an alternative to it by using req.files functionality provided by Express.js, which you are using.
if (req.files.photo && req.files.photo.name) {
// Get the temporary location of the file.
var tmp_path = req.files.photo.path;
// set where the file should actually exists - in this case it is in the "images" directory.
var target_path = './public/profile/' + req.files.photo.name;
// Move the file from the temporary location to the intended location.
fs.rename(tmp_path, target_path, function (error) {
if (!error) {
/*
* Remove old photo from fs.
* You can remove the following if you want to.
*/
fs.unlink('./public/profile/' + old_photo, function () {
if (error) {
callback_parallel(error);
}
else {
callback_parallel(null);
}
});
}
else {
callback_parallel(error);
}
});
}

Categories