Uploaded file at azure is 0 bites - javascript

I am trying to upload image to azure blob storage. It succeeded but, My uploaded image appears 0 bites in my azure blob storage.
var fs = require('fs');
const fileUpload = (req, res, next) => {
console.log(req.files.file)
var blobSvc = azure.createBlobService();
var file = req.files.file.tempFilePath;
var stream = fs.createReadStream(file)
var dataLength = 0;
// using a readStream that we created already
stream
.on('data', function (chunk) {
dataLength += chunk.length;
})
.on('end', function () {
console.log('The length was:', dataLength);
});
blobSvc.createBlockBlobFromStream("image", req.files.file.name, stream, dataLength, function (error, result, response) {
if (!error) {
console.log('ok Blob uploaded')
console.log(result)
console.log(response)
} else {
console.log(error)
}
})
}
module.exports = fileUpload
This is how, my req.files.file obj received from frontend.
file: {
name: 'test.png',
data: <Buffer >,
size: 184332,
encoding: '7bit',
tempFilePath: '/tmp/tmp-1-1585187683435',
truncated: false,
mimetype: 'image/png',
md5: 'b8532c65a5c1ba95bbe89b6589b94a6c',
mv: [Function: mv]
}

You don't have to specify the length, you could try createWriteStreamToBlockBlob.
The code will look like this:
var azure = require('azure-storage');
var blobService = azure.createBlobService();
var stream = fs.createReadStream(fileNameTarget).pipe(blobService.createWriteStreamToBlockBlob(containerName, blobName, { blockIdPrefix: 'block' }));

Related

pdf encryption and uploading aws S3 not working

In this code i am trying to create pdf file and encrypting, i am able to create pdf encryption, in this code i am creating one Tempfile in my local machine after that i am encrypting that file and in here 'qpdf --encrypt Decorpot test 40 -- ${tempFileName} --replace-input' i am just replacing the non encrypted file with encrypted file and its storing correctly as a encrypted file in local machine,but when i am going to upload in s3 aws only non encryted file is added. i tried so many methods..please help me.. ThankYou
module.exports.generateChecklistPdf2 = function () {
return new Promise((resolve, reject) => {
let htmlFilePath = '';
htmlFilePath = path.join(__dirname, '../static/html/checkList.html');
htmlFilePath = htmlFilePath.replace(new RegExp(/\\/g), '/');
pdfFilePath = path.join(__dirname, '../uploads/' + Math.ceil(Math.random() *
810000) + '.pdf');
pdfFilePath = pdfFilePath.replace(new RegExp(/\\/g), '/');
let date = new Date();
return ejs.renderFile(htmlFilePath, { moment: moment, date }, {}, function (err,
htmlFile) {
if (err) return console.log(err);
var tempFileName = pdfFilePath;
var file = fs.createWriteStream(tempFileName);
assetsPath = path.join(__dirname, '../static/');
assetsPath = assetsPath.replace(new RegExp(/\\/g), '/');
options = {
base: 'file:///' + assetsPath,
}
setTimeout(() => {
var cmd = `qpdf --encrypt Decorpot test 40 -- ${tempFileName}
--replace-input`;
exec(cmd, function (err) {
if (err) {
console.error('Error occured: ' + err);
} else {
console.log('PDF encrypted :)', tempFileName);
}
});
}, 1000)
return pdf.create(htmlFile, options).toStream(function (err, stream) {
stream.pipe(file).on('close', () => {
formData = {
customerName: "customername",
file: fs.createReadStream(pdfFilePath)
}
let url = '';
if (process.env.NODE_ENV == 'production') {
url = 'http://13.233.26.162:5003/api/file/upload';
} else {
url = 'http://localhost:5003/api/file/upload';
}
return request.post({ url: url, formData: formData },
function (err, res) {
if (err) return console.log(err);
console.log(res.body)
let resolveObject = {};
resolveObject.pdfUrl = res.body;
resolve(resolveObject);
});
});
});
});
});
}

Uploading Multiple Files to Google Cloud Storage using For Loop

I am trying to upload multiple files to Google Cloud Storage. I am using a for loop to for each file in the list of files which I want to upload.
However, the problem is that the for loop does not pause to wait for the upload to finish before moving on to the next upload. It eventually uploads them, however, the for loop finishes earlier which then sends back to the client the empty urlList.
How do I make it pause and wait for each upload process before moving on to the next file in the for loop?
const processFile = require('../middleware');
const { format } = require('util');
let uuidv4 = require('uuid/v4');
const Cloud = require('#google-cloud/storage');
const { Storage } = Cloud;
const storage = new Storage({
keyFilename: './xxx.json',
projectId: 'xxx'
});
const bucket = storage.bucket('xxx');
exports.upload = async (req, res) => {
const urlList = [];
await processFile(req, res); //multer
for (var i = 0; i < req.files.length; i++) {
if (!req.files[i]) {
return res.status(400).send({ message: 'Please upload a file!' });
}
const { originalname, buffer } = req.files[i];
var filename = originalname
.toLowerCase()
.split(' ')
.join('-');
filename = uuidv4() + '-' + filename;
console.log(filename);
const blob = bucket.file(filename);
const blobStream = blob.createWriteStream({
resumable: false
});
blobStream.on('error', err => {
res.status(500).send({ message: err.message });
});
blobStream.on('finish', async data => {
const publicUrl = format(
`https://storage.googleapis.com/${bucket.name}/${blob.name}`
);
urlList.push(publicUrl);
try {
await bucket.file(filename).makePublic();
} catch (err) {
console.log('failed to make it public');
reject(err);
}
});
blobStream.end(buffer);
}
return res.status(200).send({
message: 'Uploaded the files successfully',
url: urlList
});
};
Just put your "upload" code in a Promise that you can await in the loop. Othervise by using on the code inside of it will not follow the for loop. By using such event based code your for loop will just go trough it and can't await it to finish. This should do the trick:
const uploadFile = (f) => {
return new Promise((resolve, reject) => {
const { originalname, buffer } = f;
var filename = originalname.toLowerCase().split(" ").join("-");
filename = uuidv4() + "-" + filename;
console.log(filename);
const blob = bucket.file(filename);
const blobStream = blob.createWriteStream({
resumable: false,
});
blobStream.on("error", (err) => {
res.status(500).send({ message: err.message });
reject(err);
});
blobStream.on("finish", async (data) => {
const publicUrl = format(
`https://storage.googleapis.com/${bucket.name}/${blob.name}`
);
try {
await bucket.file(filename).makePublic();
resolve(publicUrl);
} catch (err) {
console.log("failed to make it public");
reject(err);
}
});
blobStream.end(buffer);
});
};
exports.upload = async (req, res) => {
const urlList = [];
await processFile(req, res); //multer
for (var i = 0; i < req.files.length; i++) {
if (!req.files[i]) {
return res.status(400).send({ message: "Please upload a file!" });
}
const publicUrl = await uploadFile(req.files[i]);
urlList.push(publicUrl);
}
return res.status(200).send({
message: "Uploaded the files successfully",
url: urlList,
});
};

Firebase function storage POST image issue

I have encountered a problem when following a maximilian schwarzmüller course, which has otherwise been great: https://www.youtube.com/watch?v=qZ1EFnFOGvE
The image logs in the Firebase console as uploaded, recognises the type of file/size etc. But continually loads and never displays the image. I use a post request in POSTMAN to upload the image.
When I upload manually to firebase on their UI, everything works fine.
My code:
const functions = require("firebase-functions");
const os = require("os");
const path = require("path");
const spawn = require("child-process-promise").spawn;
const cors = require("cors")({ origin: true });
const Busboy = require("busboy");
const fs = require("fs");
const gcconfig = {
projectId: "REDACTED",
keyFilename: "REDACTED"
};
const gcs = require("#google-cloud/storage")(gcconfig);
//
exports.onFileChange = functions.storage.object().onFinalize(event => {
const object = event.data;
const bucket = object.bucket;
const contentType = object.contentType;
const filePath = object.name;
console.log("File change detected, function execution started");
if (object.resourceState === "not_exists") {
console.log("We deleted a file, exit...");
return;
}
if (path.basename(filePath).startsWith("resized-")) {
console.log("We already renamed that file!");
return;
}
const destBucket = gcs.bucket(bucket);
const tmpFilePath = path.join(os.tmpdir(), path.basename(filePath));
const metadata = { contentType: contentType };
return destBucket
.file(filePath)
.download({
destination: tmpFilePath
})
.then(() => {
return spawn("convert", [tmpFilePath, "-resize", "500x500", tmpFilePath]);
})
.then(() => {
return destBucket.upload(tmpFilePath, {
destination: "resized-" + path.basename(filePath),
metadata: metadata
});
});
});
exports.uploadFile = functions.https.onRequest((req, res) => {
cors(req, res, () => {
if (req.method !== "POST") {
return res.status(500).json({
message: "Not allowed"
});
}
const busboy = new Busboy({ headers: req.headers });
let uploadData = null;
busboy.on("file", (fieldname, file, filename, encoding, mimetype) => {
const filepath = path.join(os.tmpdir(), filename);
uploadData = { file: filepath, type: mimetype };
file.pipe(fs.createWriteStream(filepath));
});
busboy.on("finish", () => {
const bucket = gcs.bucket("REDACTED");
bucket
.upload(uploadData.file, {
uploadType: "media",
metadata: {
metadata: {
contentType: uploadData.type
}
}
})
.then(() => {
res.status(200).json({
message: "It worked!"
});
})
.catch(err => {
res.status(500).json({
error: err
});
});
});
busboy.end(req.rawBody);
});
});
My security rules:
rules_version = '2';
service firebase.storage {
match /b/{bucket}/o {
match /{allPaths=**} {
allow read, write:if true;
}
}
}

Decoding base64 image email attachment retrieved using imap nodejs

I'm attempting to retrieve an e-mail attachment image using Node.js imap that can be found here: https://github.com/mscdex/node-imap
After retrieving the image I want to save it to file and then save the name in the MySQL database so I can retrieve it in the front end using EJS.
I've gotten as far as retrieving the e-mail attachment image and attempting to decode it and then save it. Unfortunately when opened from the folder it says: "It appears that we don't support this file format".
Upon further investigation, if I convert it to a base64 string using this online tool: https://www.motobit.com/util/base64-decoder-encoder.asp and then go to a base64 to image converter (https://codebeautify.org/base64-to-image-converter), it shows the image just fine.
I'm skeptical that my code is actually converting the image to base64 as the file size goes up from 250kb to 332kb.
I am unsure of how to proceed to allow the photo to be properly decoded for viewing as the original .jpeg image.
var fs = require('fs'), fileStream;
var {Base64Encode} = require('base64-stream');
const Imap = require('imap'),
inspect = require('util').inspect;
var imap = new Imap({
user: 'gmailaccount#gmail.com',
password: 'gmailaccount',
host: 'imap.gmail.com',
port: 993,
tls: true
});
/* To Uppercase function */
function toUpper(thing) { return thing && thing.toUpperCase ? thing.toUpperCase() : thing;}
/* function to find attachments in imap email */
function findAttachmentParts(struct, attachments) {
attachments = attachments || [];
for (var i = 0, len = struct.length, r; i < len; ++i) {
if (Array.isArray(struct[i])) {
findAttachmentParts(struct[i], attachments);
}
else {
if (struct[i].disposition && ['INLINE', 'ATTACHMENT'].indexOf(struct[i].disposition.type) > -1) {
attachments.push(struct[i]);
}
}
}
return attachments;
}
function buildAttMessageFunction(attachment) {
var filename = attachment.params.name;
var encoding = attachment.encoding;
return function (msg, seqno) {
var prefix = '(#' + seqno + ') ';
msg.on('body', function(stream, info) {
//Create a write stream so that we can stream the attachment to file;
console.log(prefix + 'Streaming this attachment to file', filename, info);
var writeStream = fs.createWriteStream(filename);
writeStream.on('finish', function() {
console.log(prefix + 'Done writing to file %s', filename);
});
//stream.pipe(writeStream); this would write base64 data to the file.
//so we decode during streaming using
if (toUpper(encoding) === 'BASE64') {
//the stream is base64 encoded, so here the stream is decode on the fly and piped to the write stream (file)
stream.pipe(new Base64Encode()).pipe(writeStream);
} else {
//here we have none or some other decoding streamed directly to the file which renders it useless probably
stream.pipe(writeStream);
}
});
msg.once('end', function() {
console.log(prefix + 'Finished attachment %s', filename);
});
};
}
function openInbox(cb){
imap.openBox('INBOX', true, cb);
}
/* Take all unseen emails, output to console and save them to a text file */
imap.once('ready', function(){
openInbox(function(err, box){
if (err) throw err;
imap.search([ 'UNSEEN' ], function(err, results) {
var messages = [];
if (err) throw err;
var f = imap.fetch(results, { id: 1, bodies: ['HEADER.FIELDS (FROM TO SUBJECT DATE)', '1.1'], struct: true });
f.on('message', function(msg, seqno) {
var body = ''
, header = ''
, parsedMsg = {}
var prefix = '(#' + seqno + ') ';
msg.on('body', function(stream, info) {
var buffer = '', count = 0;
if(info.which === 'TEXT' || info.which === '1.1'){
stream.on('data', function(chunk) { body += chunk.toString('utf8') })
stream.once('end', function() { parsedMsg.body = body })
}
else{
stream.on('data', function(chunk) { header += chunk.toString('utf-8') })
stream.once('end', function() { parsedMsg.header = Imap.parseHeader(header) })
}
stream.pipe(fs.createWriteStream('msg-' + seqno + '-body.txt'));
});
msg.once('attributes', function(attrs) {
var attachments = findAttachmentParts(attrs.struct);
console.log(prefix + 'Has attachments: %d', attachments.length);
for(var i = 0, len = attachments.length; i < len; ++i){
var attachment = attachments[i];
/*This is how each attachment looks like {
partID: '2',
type: 'application',
subtype: 'octet-stream',
params: { name: 'file-name.ext' },
id: null,
description: null,
encoding: 'BASE64',
size: 44952,
md5: null,
disposition: { type: 'ATTACHMENT', params: { filename: 'file-name.ext' } },
language: null
}
*/
console.log(prefix + 'Fetching attachment %s', attachment.params.name);
var f = imap.fetch(attrs.uid , {
bodies: [attachment.partID],
struct: true
});
//build function to process attachment message
f.on('message', buildAttMessageFunction(attachment));
}
parsedMsg.attrs = attrs;
console.log(prefix + 'Attributes: %s', inspect(attrs, false, 8));
});
msg.once('end', function() {
console.log(prefix + 'Finished email');
messages.push( parsedMsg );
});
});
f.once('error', function(err) {
console.log('Fetch error: ' + err);
});
f.once('end', function() {
console.log('Done fetching all messages!');
for( i in messages ) {
console.log( i + ': ' + inspect( messages[i], false, 4 ) );
}
imap.end();
});
});
});
});
imap.once('error', function(err){
console.log(err);
});
imap.once('end', function(){
console.log('Connection ended');
});
imap.connect();
The expected output is a .jpeg image saved to the file directory that is able to be viewed. The actual output I am getting is an image file that when double clicked to view says: "It appears that we don't support this file format."
Unfortunately, I couldn't find a way to use node-imap to properly decode and retrieve email attachments. I ended up using imap-simple instead and was able to achieve the desired result.
I used imap-simple's example code block to retrieve attachments.
var imaps = require('imap-simple');
var config = {
imap: {
user: 'your#email.address',
password: 'yourpassword',
host: 'imap.gmail.com',
port: 993,
tls: true,
authTimeout: 3000
}
};
imaps.connect(config).then(function (connection) {
connection.openBox('INBOX').then(function () {
// Fetch emails from the last 24h
var delay = 24 * 3600 * 1000;
var yesterday = new Date();
yesterday.setTime(Date.now() - delay);
yesterday = yesterday.toISOString();
var searchCriteria = ['UNSEEN', ['SINCE', yesterday]];
var fetchOptions = { bodies: ['HEADER.FIELDS (FROM TO SUBJECT DATE)'], struct: true };
// retrieve only the headers of the messages
return connection.search(searchCriteria, fetchOptions);
}).then(function (messages) {
var attachments = [];
messages.forEach(function (message) {
var parts = imaps.getParts(message.attributes.struct);
attachments = attachments.concat(parts.filter(function (part) {
return part.disposition && part.disposition.type.toUpperCase() === 'ATTACHMENT';
}).map(function (part) {
// retrieve the attachments only of the messages with attachments
return connection.getPartData(message, part)
.then(function (partData) {
return {
filename: part.disposition.params.filename,
data: partData
};
});
}));
});
return Promise.all(attachments);
}).then(function (attachments) {
console.log(attachments);
// =>
// [ { filename: 'cats.jpg', data: Buffer() },
// { filename: 'pay-stub.pdf', data: Buffer() } ]
});
});
On both instances of Base64Encode rename it to Base64Decode.

Dynamically load subtitles into video

Here is the situation: The server receives the srt file stream. The server converts the stream into vtt format. The resulting stream is then buffered into a buffer which is then sent to client through a io.socket.
Here is the server code:
self.expressApp.post("/upload", function (req, res) {
var busboy = new Busboy({ headers: req.headers });
busboy.on('file', function(fieldname, file, filename, encoding, mimetype) {
var subchunks = [],
sub = file.pipe(srt2vtt());
sub.on('data',function(buffer){
subchunks.push(buffer);
});
sub.on('end',function(){
var buffer = Buffer.concat(subchunks);
socket.emit('Subtitles',buffer);
});
});
busboy.on('finish', function() {
res.writeHead(200, { 'Connection': 'close' });
res.end();
});
return req.pipe(busboy);
});
Here is the client code:
var subobj = new Blob([new Uint8Array(payload)],{type: "text/vtt"}),
url = (URL || webkitURL).createObjectURL(subobj),
track = document.createElement("track");
track.kind = "captions";
track.label = "English";
track.srclang = "en";
track.src = url;
track.addEventListener("load", function() {
this.mode = "showing";
videoElement.textTracks[0].mode = "showing";
});
videoElement.append(track);
Why this is not working. Any help is appreciated.
Thanks.
I'm using plyr, there are some bug for dashjs captions, so i created a blob of that webvtt and append that on video container, here is my code
fetch("url")
.then(response => response.text())
.then(result => {
var blob = new Blob([result], {
type: "text/vtt"
});
const track = document.createElement('track');
Object.assign(track, {
label: 'language',
default: true,
src: window.URL.createObjectURL(blob)
});
video.appendChild(track);
})
.catch(error => console.log('error', error));`
Here is the solution.
What was I doing wrong?
I was sending the vtt stream as binary array rather than plan text string.
Server-side Code:
self.expressApp.post("/upload", function (req, res) {
var busboy = new Busboy({ headers: req.headers });
busboy.on('file', function(fieldname, file, filename, encoding, mimetype) {
var subchunks = "",
sub = file.pipe(srt2vtt());
sub.on('data',function(buffer){
subchunks += buffer.toString('utf8');
});
sub.on('end',function(){
socket.emit('Subtitles',subchunks);
});
});
busboy.on('finish', function() {
res.writeHead(200, { 'Connection': 'close' });
res.end();
});
return req.pipe(busboy);
});
Client-side Code:
var subobj = new Blob([new Uint8Array(payload)],{type: "text/vtt"}),
url = (URL || webkitURL).createObjectURL(subobj),
track = document.createElement("track");
track.kind = "captions";
track.label = "English";
track.srclang = "en";
track.src = url;
videoElement.append(track);
track.mode = "showing";
videoElement.textTracks[0].mode = "showing";

Categories