How to write a base64 video to file in nodejs - javascript

My Express app is receiving a base64-encoded MP4 from the browser and writing it to a file. But the file isn't a valid video file, and the "file" utility simply identifies it as "data".
My Code:
const path = `${config.uploadPath}`;
const filename = `${uniqid()}.mp4`;
let base64Data = req.body.base64.replace(/^data:([A-Za-z-+/]+);base64,/, '');
fs.writeFileSync(`${path}${filename}`, base64Data, 'base64');

Are you sure there is a variable named base64 is request response? If so, please try this code:
req.body.base64 = req.body.base64.replace(/^data:(.*?);base64,/, ""); // <--- make it any type
req.body.base64 = req.body.base64.replace(/ /g, '+'); // <--- this is important
fs.writeFile(`${path}${filename}`, req.body.base64, 'base64', function(err) {
console.log(err);
});

Related

Python decoding image spawned by Node.js gives `TypeError: memoryview: a bytes-like object is required, not 'str'`

I'm trying to decode and save an image file passed to a python script.
The image file is read by a Node.JS script index.js and its data passed as image inside a JSON string {"image":readImageFile(), "name":"image.png"}.
The JSON string is received by a spawned Python script script.py.
The problem is when I try to decode the image data, I get the error TypeError: memoryview: a bytes-like object is required, not 'str'.
I tried converting the image data to bytes like base64.decodebytes(bytes(params['image'],'utf-8')) but got the error UnicodeEncodeError: 'utf-8' codec can't encode character '\udc8f' in position 66: surrogates not allowed.
How do I rightly decode the image binary data so I can save it as a file?
Could you please help me spot the problem?
Thanks in advance!
script.py:
import sys, json
import base64
json_str = input() # Capture input
params = json.loads(json_str) # parse input
fileName = params['name'] # Capture file name
fileData = base64.decodebytes(params['image']) # decode image
...
...
print("Image Saved!")
sys.stdout.flush()
index.js:
const spawn = require("child_process").spawn;
const fs = require('fs');
let params = {
"image":readImageFile(),
"name":"image.png"
}
const pyProcess = spawn('py',['script.py']);
pyProcess.stdin.write(JSON.stringify(params) + '\n');
pyProcess.stdout.on("data", (data) =>{
console.log(data.toString());
});
function readImageFile() {
try {
return fs.readFileSync('color.png', 'binary');
}
catch (err) {
return err;
}
}

Return URL of locally stored image [Node JS API]

I'm create an API that receive base64 string, convert it to image [label.jpeg], then return URL of the image. Now I'm stuck at how to create URL of local image so that it can be accessed publicly.
Below is my code, the goal is return URL address of the 'label.jpeg' when the API is called.
// Post base64 string
app.post('/post-base64', (req, res) => {
// Captured img [base64 string]
let strBase64 = req.body
console.log(strBase64)
// Convert base64 --> img.jpeg
let buffer = Buffer.from(strBase64, 'base64')
console.log(buffer)
// Write base64 to file
let writeStream = fs.createWriteStream('label.txt')
writeStream.write(buffer, 'base64')
writeStream.on('finish', () => {
console.log('WROTE ALL!')
})
writeStream.end()
// Save img as 'label.jpeg'
fs.writeFileSync('label.jpeg', buffer)
res.send({
// TOD0: Return label.jpeg link
})
})
Thanks.
You could store the uploaded image in a "public" folder that you serve statically through express.static. So for example:
// add this to your app.js
app.use(express.static('public'))
// store uploaded image in the public folder
app.post(..) => {
// ... note that you should probably switch
// to the non-blocking version e.g. fs.promises.writeFile(...)
fs.writeFileSync(__dirname + '../public/name-of-img.jpeg', buffer)
})
Express will now take care of serving the file, so the URL for the image will simply be http://yourhostname/name-of-img.jpeg

How to upload image to S3 using Node

I am writing an Express app that takes in a base64 encoded string that represents an image. Right now, i'm not really sure how I can take that string and upload the image to AWS S3, so i'm reading in the encoded image string data, decoding it, writing a file using fs, and then trying to upload. I have this working for an endpoint that just takes in a raw file, and all of its content is correctly uploaded to AWS s3.
Now when I try to do what I described above, i'm able to upload to S3, but the file has 0kb and is empty, and i'm not sure why. I tested just taking the stringData and writing a file to a test file, and it works. However, when I try uploading to s3, the file shows but it's empty. Here is my code:
router.post('/images/tags/nutritionalInformation/image/base64encoded', function (req, res) {
console.log(req.body.imageString);
var base64Stream = req.body.imageString;
var imgDecodedBuffer = decodeBase64Image(base64Stream);
console.log(imgDecodedBuffer);
// write to image file
var prefix = guid().toString() + ".jpg";
var filePath = './uploads/' + prefix;
console.log(filePath);
fs.writeFile(filePath, imgDecodedBuffer.data, function(err) {
console.log(err);
});
var stream = fs.createReadStream(filePath);
console.log(stream);
return s3fsImpl.writeFile(prefix, stream).then(function () {
fs.unlink(filePath, function (err) {
if (err) {
console.error(err);
}
});
});
})
Here are the relevant import statements:
var fs = require('fs');
var s3fs = require('s3fs');
var multiparty = require('connect-multiparty'),
multipartyMidleware = multiparty();
var s3fsImpl = new s3fs('blahblah', {
accessKeyId: 'ACCESS_KEY_ID',
secretAccessKey: 'SECRET'
});
Any help would be greatly appreciated!
If you simply just pass in the buffer, which I presume is in your imgDecodedBuffer.data value, it should work.

node.js upload and download pdf file

Framework: node.js/express.js/busboy/gridfs-stream(mongodb)
I am using busboy to upload files and then use gridfs-stream to store files in mongodb gridfs.
req.pipe(req.busboy);
req.busboy.on('file', function (bus_fieldname, bus_file, bus_filename) {
var writestream = gfs.createWriteStream({
filename: bus_filename,
});
bus_file.pipe(writestream);
writestream.on('close', function (file) {
res.redirect('/xxxxx/');
});
});
Download is simple: Use gridfs-stream's createReadStream I read the contents from mongodb and then use the following code to send it to browser.
gfs.findOne({_id: attachmentid}, function (err, file) {
if (err || !file){
res.send(404);
}else{
var fileName = file.filename;
var readstream = gfs.createReadStream({_id: attachmentid});
var buffer = "";
readstream.on("data", function (chunk) {
buffer += chunk;
});
// dump contents to buffer
readstream.on("end", function () {
res.set("Content-Disposition","attachment; filename=" + fileName);
res.send(buffer);
});
}
Problem: When I upload a 90kb pdf file, it uploads fine. I see the size is correct in mongodb. But when I download, the file size of the downloaded file is about 165kb. There is a mismatch. This does not happen with text files. Sure its something to do with data type.
can anyone please help?
Pipe the gfs read stream to the response directly. This is what works for me
res.set("Content-Disposition","attachment; filename=" + fileName);
var readstream = gfs.createReadStream({_id: attachmentid});
readstream.pipe(res);
Like Sasikanth said you can fix the problem with piping and that's the best approach.
But if you are wondering what's wrong with your current approach, then I suggest to read the Buffer documentation.You are naming your variable as buffer but assigning string.
Then in your data callback you are adding string with Buffer. When you do that the chunk buffer is converted to string using it's toString() method. So your binary data is converted to utf8 encoded string and here it goes wrong. Since utf8 is multi-byte encoding the output size becomes large(I don't know the exact mechanism of this conversion).
So the right way to do is to keep it in buffers:
var buffers = [];
readstream.on("data", function (chunk) {
buffers.push(chunk)
});
readstream.on("end", function () {
res.set("Content-Disposition","attachment; filename=" + fileName);
res.send(Buffer.concat(buffers));
});

ReadFile in Base64 Nodejs

I'm trying to read an image from client side encoded in base64.
How to read with nodejs?
My code:
// add to buffer base64 image
var encondedImage = new Buffer(image.name, 'base64');
fs.readFile(encondedImage, "base64", function(err, buffer){
if ( err ) {
console.log('In read file')
console.log(err)
} else {
// check err
lwip.open(buffer, 'jpg', function(err, image){
console.log('in open')
if ( err ) console.log(err)
if ( image ) console.log(image)
// check 'err'. use 'image'.
// image.resize(...), etc.
});
}
})
But, I got this error:
In read file
[Error: Path must be a string without null bytes.]
Latest and greatest way to do this:
Node supports file and buffer operations with the base64 encoding:
const fs = require('fs');
const contents = fs.readFileSync('/path/to/file.jpg', {encoding: 'base64'});
Or using the new promises API:
const fs = require('fs').promises;
const contents = await fs.readFile('/path/to/file.jpg', {encoding: 'base64'});
I think that the following example demonstrates what you need:
I removed the link because it was broken.
The essence of the article is this code part:
var fs = require('fs');
// function to encode file data to base64 encoded string
function base64_encode(file) {
// read binary data
var bitmap = fs.readFileSync(file);
// convert binary data to base64 encoded string
return new Buffer(bitmap).toString('base64');
}
// function to create file from base64 encoded string
function base64_decode(base64str, file) {
// create buffer object from base64 encoded string, it is important to tell the constructor that the string is base64 encoded
var bitmap = new Buffer(base64str, 'base64');
// write buffer to file
fs.writeFileSync(file, bitmap);
console.log('******** File created from base64 encoded string ********');
}
// convert image to base64 encoded string
var base64str = base64_encode('kitten.jpg');
console.log(base64str);
// convert base64 string back to image
base64_decode(base64str, 'copy.jpg');
var fs = require('fs');
function base64Encode(file) {
var body = fs.readFileSync(file);
return body.toString('base64');
}
var base64String = base64Encode('test.jpg');
console.log(base64String);
The following code reads a text file. Then converts it to text, then reads it as base64 and puts that into a binary buffer, and saves it back to a true binary.
import { readFileSync, writeFileSync } from 'fs'
import { tmpdir } from 'os'
const contents = readFileSync('./temp.txt')
const buffedInput = Buffer.from(contents.toString(), 'base64')
writeFileSync(`${tmpdir}/temp.zip`, buffedInput)

Categories