running the formatVideoToWebpSticker function - javascript

this is my code trying to convert an mp4 to webp file.
for some reason its not saving the file to my folder and when I console.logged it, it sent a path to my temp folder but the file wasn't there.
Tried looking it up but found nothing helpful :(
how to fix this please? thank you
const path = require('path');
const Crypto = require('crypto');
const { tmpdir } = require('os');
const ffmpeg = require('fluent-ffmpeg');
const webp = require('node-webpmux');
const fs = require('fs').promises;
const has = (o, k) => Object.prototype.hasOwnProperty.call(o, k);
const { MessageMedia } = require('whatsapp-web.js');
const media = MessageMedia.fromFilePath('./1.mp4');
async function formatVideoToWebpSticker(media) {
if (!media.mimetype.includes('video'))
throw new Error('media is not a video');
const videoType = media.mimetype.split('/')[1];
const tempFile = path.join(
tmpdir(),
`${Crypto.randomBytes(6).readUIntLE(0, 6).toString(36)}.webp`
);
const stream = new (require('stream').Readable)();
const buffer = Buffer.from(
media.data.replace(`data:${media.mimetype};base64,`, ''),
'base64'
);
stream.push(buffer);
stream.push(null);
await new Promise((resolve, reject) => {
ffmpeg(stream)
.inputFormat(videoType)
.on('error', reject)
.on('end', () => resolve(true))
.addOutputOptions([
'-vcodec',
'libwebp',
'-vf',
// eslint-disable-next-line no-useless-escape
'scale=\'iw*min(300/iw\,300/ih)\':\'ih*min(300/iw\,300/ih)\',format=rgba,pad=300:300:\'(300-iw)/2\':\'(300-ih)/2\':\'#00000000\',setsar=1,fps=10',
'-loop',
'0',
'-ss',
'00:00:00.0',
'-t',
'00:00:05.0',
'-preset',
'default',
'-an',
'-vsync',
'0',
'-s',
'512:512',
])
.toFormat('webp')
.save(tempFile)
console.log(tempFile);
});
const data = await fs.readFile(tempFile, 'base64');
console.log(tempFile)
await fs.unlink(tempFile);
return {
mimetype: 'image/webp',
data: data,
filename: media.filename,
};
}
formatVideoToWebpSticker(media)

Related

how can QR async function code on a custom command in cypress

hi somebody can help me to create a custom command on cypress to use this async function
var jimp = require("jimp");
var qrCode = require('qrcode-reader');
var fs = require('fs')
var path = require("path")
const readQRCode = async (fileName) => {
const filePath = path.join(__dirname, fileName)
try {
if (fs.existsSync(filePath)) {
const img = await jimp.read(fs.readFileSync(filePath));
const qr = new qrCode();
const value = await new Promise((resolve, reject) => {
qr.callback = (err, v) => err != null ? reject(err) : resolve(v);
qr.decode(img.bitmap);
});
return value.result;
}
} catch (error) {
return error.message
}
}

Nodejs AWS Lambda s3 getObject method returns nothing

The script used when trying to get contents from the csv stored in the s3 bucket
const mysql = require("mysql");
const fs = require("fs");
const { google } = require("googleapis");
const AWS = require("aws-sdk");
const client = new AWS.SecretsManager({ region: "eu-west-1" });
const analyticsreporting = google.analyticsreporting("v4");
const csv = require('ya-csv')
const fastCsv = require('fast-csv')
const s3 = new AWS.S3();
const getParams = {
Bucket: 'data',
Key: 'athena_test/nameplate.csv'
};
exports.handler = async (context, event) => {
const data = await s3.getObject(getParams, function (err, data){
if(err){console.log("ERROR: ",err)}
else {return data}
})
console.log(data.Body)
}
the console log returns undefined rather than the contents of the csv
Hey you can try this one:-
const csv = require('#fast-csv/parse');
const s3Stream = await s3.getObject(params).createReadStream();
const data = await returnDataFromCSV();
console.log(data.Body);
const returnDataFromCSV =()=> {
let promiseData = new Promise((resolve, reject) => {
const parser = csv
.parseStream(csvFile, { headers: true })
.on("data", (data) => {
console.log('Parsed Data:-', data);
})
.on("end", ()=> {
resolve("CSV finished here");
})
.on("error",()=> {
reject("if failed");
});
});
try {
return await promiseData;
} catch (error) {
console.log("Get Error: ", error);
return error;
}
}
CreateStream: https://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/Request.html#createReadStream-property

Pass PDF as Blob from Azure to Node to React

I am attempting to grab a PDF stored in Azure Blob Storage via a node backend and then serve that PDF file to a React Frontend. I am using Microsofts #azure/storage-blob with a BlockBlobClient but every example I find online converts the readableStreamBody to a string. The blob has a content type of application/pdf. Ive tried passing the readableStreamBody and the pure output to the frontend but those result in broken pdf's. I also followed the documentation online and made it a string and passed that to the frontend. That produced a PDF that would open and had the proper amount of pages but was completly blank.
Node.js Code on the Backend
app.get('/api/file/:company/:file', (req, res) => {
const containerClient = blobServiceClient.getContainerClient(req.params.company);
const blockBlobClient = containerClient.getBlockBlobClient(req.params.file);
blockBlobClient.download(0)
.then(blob => streamToString(blob.readableStreamBody))
.then(response => res.send(response))
});
FrontEnd Code
getFileBlob = (company,file) => {
axios(`/api/file/${company}/${file}`, { method: 'GET', responseType: 'blob'})
.then(response => {
const file = new Blob(
[response.data],
{type: 'application/pdf'});
const fileURL = URL.createObjectURL(file);
window.open(fileURL);
})
.catch(error => {
console.log(error);
});
}
This might help you, its working for me.
Node
var express = require('express');
const { BlobServiceClient } = require('#azure/storage-blob');
var router = express.Router();
const AZURE_STORAGE_CONNECTION_STRING =
'YOUR_STRING';
async function connectAzure() {
// Create the BlobServiceClient object which will be used to create a container client
const blobServiceClient = BlobServiceClient.fromConnectionString(
AZURE_STORAGE_CONNECTION_STRING
);
const containerName = 'filestorage';
const blobName = 'sample.pdf';
console.log('\nConnecting container...');
console.log('\t', containerName);
// Get a reference to a container
const containerClient = blobServiceClient.getContainerClient(containerName);
// Get a block blob client
const blockBlobClient = containerClient.getBlockBlobClient(blobName);
for await (const blob of containerClient.listBlobsFlat()) {
console.log('\t', blob.name);
}
const downloadBlockBlobResponse = await blockBlobClient.download(0);
const data = await streamToString(downloadBlockBlobResponse.readableStreamBody)
return data;
}
async function streamToString(readableStream) {
return new Promise((resolve, reject) => {
const chunks = [];
readableStream.on('data', data => {
chunks.push(data.toString());
});
readableStream.on('end', () => {
resolve(chunks.join(''));
});
readableStream.on('error', reject);
});
}
router.get('/', async function(req, res, next) {
const data = await connectAzure();
res.send({data}).status(200);
});
module.exports = router;
Front-end
function createFile() {
fetch('/createfile').then(res => {
res.json().then(data => {
var blob = new Blob([data.data], { type: 'application/pdf' });
var fileURL = URL.createObjectURL(blob);
if (filename) {
if (typeof a.download === 'undefined') {
window.location.href = fileURL;
} else {
window.open(fileURL, '_blank');
}
}
})
}).catch(err => console.log(err))
}
HTML
<body><h1>Express</h1><p>Welcome to Express</p><button onclick="createFile()">Create File</button></body>

NodeJS: How to zip (local) files after done with JIMP

I'm trying to use NodeJS to automate some trivial procedures on my computer. Right now I'm able to convert some png files into jpg. I would like to bundle them all up in a zip.
const fs = require('fs')
const path = require('path')
const jimp = require('jimp')
const files = fs.readdirSync('./')
// Convert all png to jpg
const pngs = files.filter(file => path.extname(file).toLowerCase() === '.png')
let jpgs = []
Promise.all(pngs.map(png => jimp.read('./' + png))).then(jimps => {
jimps.map((img, i) => {
img
.rgba(false)
.background(0xffffffff)
.write(`./jpgs/${path.basename(pngs[i], '.png')}.jpg`)
})
console.log('Done converting')
})
// Zip all the .png and .jpg files into PNGs.zip and JPGs.zip
// TODO:
I fiddled a bite around with JSZip but couldn't make it work.
SOLUTION
const fs = require('fs')
const path = require('path')
const jimp = require('jimp')
const CLIProgress = require('cli-progress')
const zipPNG = new require('node-zip')()
const zipJPG = new require('node-zip')()
const files = fs.readdirSync('./')
// Convert all png to jpg
const pngs = files.filter(file => path.extname(file).toLowerCase() === '.png')
let jpgs = []
Promise.all(pngs.map(png => jimp.read('./' + png))).then(jimps => {
const bar = new CLIProgress.Bar({}, CLIProgress.Presets.shades_classic)
bar.start(pngs.length, 0)
jimps.map((img, i) => {
img
.rgba(false)
.background(0xffffffff)
.write(`./jpgs/${path.basename(pngs[i], '.png')}.jpg`)
bar.update(i + 1)
})
bar.stop()
console.log('Done converting')
// Pack the files nicely in ZIP
pngs.forEach(png => {
zipPNG.file(png, fs.readFileSync(path.join('./', png)))
zipJPG.file(
`${path.basename(png, '.png')}.jpg`,
fs.readFileSync(`./jpgs/${path.basename(png, '.png')}.jpg`)
)
})
let data = zipPNG.generate({ base64: false, compression: 'DEFLATE' })
fs.writeFileSync('PNG.zip', data, 'binary')
console.log('PNGs zipped')
data = zipJPG.generate({ base64: false, compression: 'DEFLATE' })
fs.writeFileSync('./jpgs/JPG.zip', data, 'binary')
console.log('JPGs zipped')
})
I would use the npm package node-zip. It is a very straightforward library with an easy to use interface.

Error: Cloud Functions for Firebase spawn EACCES (ghostscript)

I tried using Firebase Cloud Functions to create a thumbnail of a PDF file.
After the call of gs I get the following error:
2018-06-12T11:29:08.685Z E makeThumbnail: Error: spawn EACCES
at exports._errnoException (util.js:1020:11)
at ChildProcess.spawn (internal/child_process.js:328:11)
at exports.spawn (child_process.js:370:9)
at Object.exec (/user_code/node_modules/gs/index.js:86:28)
at Promise (/user_code/index.js:95:12)
at mkdirp.then.then (/user_code/index.js:86:12)
2018-06-12T11:29:08.698166767Z D makeThumbnail: Function execution took 780 ms, finished with status: 'error'
Is it necessary to use a component like ghostscript to use a plan other than Spark?
In addition, my code. Maybe I just do not see my problem in the code
const functions = require('firebase-functions');
const mkdirp = require('mkdirp-promise');
const gcs = require('#google-cloud/storage')();
const admin = require('firebase-admin');
const spawn = require('child-process-promise').spawn;
const path = require('path');
const os = require('os');
const fs = require('fs');
const gs = require('gs');
const THUMB_MAX_HEIGHT = 200;
const THUMB_MAX_WIDTH = 200;
const THUMB_PREFIX = 'thumb_';
const gs_exec_path = path.join(__dirname, './lambda-ghostscript/bin/gs');
try{admin.initializeApp(functions.config().firebase); } catch(e) {}
exports.makeThumbnail = functions.storage.object().onFinalize((object) => {
const filePath = object.name;
const contentType = object.contentType;
const fileDir = path.dirname(filePath);
const fileName = path.basename(filePath);
const thumbFilePath = path.normalize(path.join(fileDir, `${THUMB_PREFIX} ${fileName}`));
const tempLocalFile = path.join(os.tmpdir(), filePath);
const tempLocalDir = path.dirname(tempLocalFile);
const tempLocalThumbFile = path.join(os.tmpdir(), thumbFilePath);
const tmp_dir = os.tmpdir();
if (fileName.startsWith(THUMB_PREFIX)) {
console.log('Is thumbnail');
return null;
}
const bucket = gcs.bucket(object.bucket);
const file = bucket.file(filePath);
const thumbFile = bucket.file(thumbFilePath);
const metadata = {
contentType: contentType,
};
return mkdirp(tmp_dir).then(() => {
console.log("Dir Created");
console.log(tempLocalFile);
return file.download({destination: tempLocalFile});
}).then(() => {
console.log("File downloaded");
if(!contentType.startsWith("image/")){
return new Promise((resolve, reject) => {
const pg= 1;
gs().batch().nopause()
.option(`-dFirstPage=${pg}`)
.option(`-dLastPage=${pg}`)
.executablePath(gs_exec_path)
.device('png16m')
.output(tempLocalThumbFile+".png")
.input(tempLocalFile)
.exec(err => err ? reject(err) : resolve());
});
}
else
{
var args = [ tempLocalFile, '-thumbnail', `${THUMB_MAX_WIDTH}x${THUMB_MAX_HEIGHT}>`, tempLocalThumbFile ];
return spawn('convert', args, {capture: ['stdout', 'stderr']});
}
}).then(() => {
return bucket.upload(tempLocalThumbFile, { destination: thumbFilePath });
}).then(() => {
fs.unlinkSync(tempLocalFile);
fs.unlinkSync(tempLocalThumbFile);
return result[0];
});
});
After hours of scratching my head and running same code over and over again pointlessly, I've finally found the problem!
The executable path that you've defined is not correct. It should be 'gs'.
Here's a complete gs() call sample:
gs()
.batch()
.option('-dFirstPage=2')
.option('-dLastPage=2')
.nopause()
.res(90)
.executablePath('gs')
.device('jpeg')
.output(tempNewPath2)
.input(tempFilePath)
.exec((err, stdout, stderr) => {
if (!err) {
console.log('gs executed w/o error');
console.log('stdout', stdout);
console.log('stderr', stderr);
resolve();
} else {
console.log('gs error:', err);
reject(err);
}
});
For more help, you can go through a sample repo that I created for this issue
https://github.com/krharsh17/ghostscript-firebase-sample

Categories