NodeJS: How to zip (local) files after done with JIMP - javascript

I'm trying to use NodeJS to automate some trivial procedures on my computer. Right now I'm able to convert some png files into jpg. I would like to bundle them all up in a zip.
const fs = require('fs')
const path = require('path')
const jimp = require('jimp')
const files = fs.readdirSync('./')
// Convert all png to jpg
const pngs = files.filter(file => path.extname(file).toLowerCase() === '.png')
let jpgs = []
Promise.all(pngs.map(png => jimp.read('./' + png))).then(jimps => {
jimps.map((img, i) => {
img
.rgba(false)
.background(0xffffffff)
.write(`./jpgs/${path.basename(pngs[i], '.png')}.jpg`)
})
console.log('Done converting')
})
// Zip all the .png and .jpg files into PNGs.zip and JPGs.zip
// TODO:
I fiddled a bite around with JSZip but couldn't make it work.
SOLUTION
const fs = require('fs')
const path = require('path')
const jimp = require('jimp')
const CLIProgress = require('cli-progress')
const zipPNG = new require('node-zip')()
const zipJPG = new require('node-zip')()
const files = fs.readdirSync('./')
// Convert all png to jpg
const pngs = files.filter(file => path.extname(file).toLowerCase() === '.png')
let jpgs = []
Promise.all(pngs.map(png => jimp.read('./' + png))).then(jimps => {
const bar = new CLIProgress.Bar({}, CLIProgress.Presets.shades_classic)
bar.start(pngs.length, 0)
jimps.map((img, i) => {
img
.rgba(false)
.background(0xffffffff)
.write(`./jpgs/${path.basename(pngs[i], '.png')}.jpg`)
bar.update(i + 1)
})
bar.stop()
console.log('Done converting')
// Pack the files nicely in ZIP
pngs.forEach(png => {
zipPNG.file(png, fs.readFileSync(path.join('./', png)))
zipJPG.file(
`${path.basename(png, '.png')}.jpg`,
fs.readFileSync(`./jpgs/${path.basename(png, '.png')}.jpg`)
)
})
let data = zipPNG.generate({ base64: false, compression: 'DEFLATE' })
fs.writeFileSync('PNG.zip', data, 'binary')
console.log('PNGs zipped')
data = zipJPG.generate({ base64: false, compression: 'DEFLATE' })
fs.writeFileSync('./jpgs/JPG.zip', data, 'binary')
console.log('JPGs zipped')
})

I would use the npm package node-zip. It is a very straightforward library with an easy to use interface.

Related

running the formatVideoToWebpSticker function

this is my code trying to convert an mp4 to webp file.
for some reason its not saving the file to my folder and when I console.logged it, it sent a path to my temp folder but the file wasn't there.
Tried looking it up but found nothing helpful :(
how to fix this please? thank you
const path = require('path');
const Crypto = require('crypto');
const { tmpdir } = require('os');
const ffmpeg = require('fluent-ffmpeg');
const webp = require('node-webpmux');
const fs = require('fs').promises;
const has = (o, k) => Object.prototype.hasOwnProperty.call(o, k);
const { MessageMedia } = require('whatsapp-web.js');
const media = MessageMedia.fromFilePath('./1.mp4');
async function formatVideoToWebpSticker(media) {
if (!media.mimetype.includes('video'))
throw new Error('media is not a video');
const videoType = media.mimetype.split('/')[1];
const tempFile = path.join(
tmpdir(),
`${Crypto.randomBytes(6).readUIntLE(0, 6).toString(36)}.webp`
);
const stream = new (require('stream').Readable)();
const buffer = Buffer.from(
media.data.replace(`data:${media.mimetype};base64,`, ''),
'base64'
);
stream.push(buffer);
stream.push(null);
await new Promise((resolve, reject) => {
ffmpeg(stream)
.inputFormat(videoType)
.on('error', reject)
.on('end', () => resolve(true))
.addOutputOptions([
'-vcodec',
'libwebp',
'-vf',
// eslint-disable-next-line no-useless-escape
'scale=\'iw*min(300/iw\,300/ih)\':\'ih*min(300/iw\,300/ih)\',format=rgba,pad=300:300:\'(300-iw)/2\':\'(300-ih)/2\':\'#00000000\',setsar=1,fps=10',
'-loop',
'0',
'-ss',
'00:00:00.0',
'-t',
'00:00:05.0',
'-preset',
'default',
'-an',
'-vsync',
'0',
'-s',
'512:512',
])
.toFormat('webp')
.save(tempFile)
console.log(tempFile);
});
const data = await fs.readFile(tempFile, 'base64');
console.log(tempFile)
await fs.unlink(tempFile);
return {
mimetype: 'image/webp',
data: data,
filename: media.filename,
};
}
formatVideoToWebpSticker(media)

output list of files in a directory by array of extensions

I'm trying to get only image filenames to output into console from a given directory.
I was originally following this answer but I couldn't figure out how to give it multiple extensions.
How do I get a list of files with specific file extension using node.js?
function fName(fp){
const { readdirSync } = require('fs');
const path = require("path");
const fs = require('fs');
// dont need whole filepaths atm
// const gl = require('glob');
//gl(fp + `/**/*.#(jpg|png)`, {}, (err, files) => {
// console.log(files)
// })
let extensions = ["jpg", "jpeg", "png"];
let nameFilter = [];
const rawFileNames = readdirSync(fp, {withFileTypes: true})
.filter(dirent => dirent.isFile())
.map(dirent => dirent.name)
//filter the extensions
let fileNames = rawFileNames.filter(file => {
path.extname(file).toLowerCase() === extensions;
})
console.log(fileNames);
}
EDIT
I also just attempted to make a function that iterates over the extension list and call it, but it still returns an empty array
let extensions = ["jpg", "jpeg", "png"];
let getexts = function(exte){
for (ex in exte){
return exte[ex];
}
}
let nameFilter = [];
const rawFileNames = readdirSync(fp, {withFileTypes: true})
.filter(dirent => dirent.isFile())
.map(dirent => dirent.name)
//filter the extensions
let fileNames = rawFileNames.filter(file => {
path.extname(file).toLowerCase() === getexts(extensions);
})
In your filter, couldn't you just do:
let fileNames = rawFileNames.filter(file => {
return extensions.includes(path.extname(file).toLowerCase())
})
Modify your filer condition as below.
//filter the extensions
let fileNames = rawFileNames.filter(file => {
return extensions.includes(path.extname(file).toLowerCase())
});
console.log(fileNames);

Images put to storage are saved as 'octet-stream' rather than image/jpeg (firebase and ReactNative)

I am using the camera(react-native-image-Picker) to take a pick and save it to storage.
Here is how I am doing it.
const saveImage = async () => {
const id = firebase.firestore().collection('food').doc().id
const storageRef = firebase.storage().ref()
const fileRef = storageRef.child(file.fileName) //name of image to store
await fileRef.put(file) //store image
firebase.firestore().collection("food").doc(id).update({
image: firebase.firestore.FieldValue.arrayUnion({
name: file.fileName,
url: await fileRef.getDownloadURL()
})
})
}
console.log(typeof file);
gives => "object"
console.log(file);
//gives =>
file = {height: 2322,
uri:"content://com.photodocumentation.imagepickerprovidlib_temp_7a0448df-1fac-4ac7-a47c-402c62ecce4c.jpg",
width: 4128,
fileName: "rn_image_picker_lib_temp_7a0448df-1fac-4ac7-a47c-402c62ecce4c.jpg",
type: "image/jpeg"}
Results:
In Firebase (storage) The image is being saved as application/octet-stream instead of image/jpeg.
The image is not shown, it says undefined when downloaded from storage.
Any help will be so appreciated.
This is how I was able to fix it:
const uploadImage = async () => {
const response = await fetch(file.uri)
const blob = await response.blob();
var ref = firebase.storage().ref().child("FolderName");
return ref.put(blob)
}
The Reference#put() method accepts a Blob, Uint8Array or ArrayBuffer. Your "file" object doesn't appear to be any of these.
Instead, we need to read the file into memory (using react-native-fs - referred to as RNFS) and then upload that data along with the required metadata. Because the file is read as base64 by RNFS, we will use Reference#putString instead as it accepts Base64 strings for uploads.
const rnfs = require('react-native-fs');
const saveImage = async () => {
const capture = /* this is your "file" object, renamed as it's not a `File` object */
const fileRef = firebase.storage().ref(capture.fileName);
const captureBase64Data = await rnfs.readFile(capture.uri, 'base64');
const uploadSnapshot = await fileRef.putString(captureBase64Data, 'base64', {
contentType: capture.type,
customMetadata: {
height: capture.height,
width: capture.width
}
});
// const id = colRef.doc().id and colRef.doc(id).update() can be replaced with just colRef.add() (colRef being a CollectionReference)
return await firebase.firestore().collection('food').add({
image: {
name: capture.fileName,
url: await fileRef.getDownloadURL()
}
});
};
Solution: Image reference in uploadBytesResumable() method
const storageRef = ref(storage,`product-images/${image.name}`);
uploadBytesResumable(storageRef,image);

How to extract .zip files to a directory within a lambda function

The issue that I am running into is that when I test the function against a .zip file when the function gets to the fs.createReadStream&Zip the function is not running or returning an error and I would like to get an understanding on what I am doing wrong and how a correct solution would look.
const AWS = require('aws-sdk');
const fs = require('fs');
const mkdirp = require('mkdirp');
const unzipper = require('unzipper');
exports.handler = async (event, context) => {
// Variables for bucket init
let sourceBucket = 'am-doc-mgmt-s3-dev-landing';
let storageBucket = 'am-doc-mgmt-s3-dev';
// Variables for folder init and Buffer config
const localZippedFolder = '/tmp/ZippedStudentData/';
const localUnzippedFolder = '/tmp/UnzippedStudentData/';
const ZipBuffer = Buffer.from(localZippedFolder, 'base64');
const UnzippedBuffer = Buffer.from(localUnzippedFolder, 'base64');
// Inits AWS s3 Bucket and DynamoDB
let s3 = new AWS.S3();
let docClient = new AWS.DynamoDB.DocumentClient({ region: 'us-east-1' });
// Gets the file bucket and file name of the s3 object from context
let fileBucket = event.Records[0].s3.bucket.name;
let fileName = event.Records[0].s3.object.key;
let params = {
Bucket: fileBucket,
Key: fileName
};
// Creates temporary variables
let tempFile = localZippedFolder + fileBucket;
let tempUnzippedFile = localUnzippedFolder + fileBucket;
// Make Directories for Zipped and Unzipped files
try {
const zipDirFolder = await mkdirp(localZippedFolder, { recursive: true })
const unZipDirFolder = await mkdirp(localUnzippedFolder, { recursive: true });
console.log('SUCCESS: unzipped directory created!');
console.log('SUCCESS: zipped directory create!')
// Download files from s3 Bucket
let newFolder = await s3.getObject(params).promise()
.then(data => {
console.log(data);
return data;
});
// Extract files from zipped folder and store them in a local directory
fs.createReadStream(params.Key)
.pipe(unzipper.Extract({path: unZipDirFolder}))
.on('finish', () => {
fs.readdir(unZipDirFolder);
}).on('error', (err) => {
// error handling here
console.log(err);
});
}
catch (error) {
console.log(error);
}
};
error: not getting anything back from the fs.createReadStream function. Its as if it just skips over the function.
It's honestly hard to figure out what problem you're really trying to solve since you just aren't very specific about that. If you want the containing async function to not resolve it's promise until the unzipping is done, you can wrap the stream in a promise like this:
const AWS = require('aws-sdk');
const fs = require('fs');
const mkdirp = require('mkdirp');
const unzipper = require('unzipper');
exports.handler = async (event, context) => {
// Variables for bucket init
let sourceBucket = 'am-doc-mgmt-s3-dev-landing';
let storageBucket = 'am-doc-mgmt-s3-dev';
// Variables for folder init and Buffer config
const localZippedFolder = '/tmp/ZippedStudentData/';
const localUnzippedFolder = '/tmp/UnzippedStudentData/';
const ZipBuffer = Buffer.from(localZippedFolder, 'base64');
const UnzippedBuffer = Buffer.from(localUnzippedFolder, 'base64');
// Inits AWS s3 Bucket and DynamoDB
let s3 = new AWS.S3();
let docClient = new AWS.DynamoDB.DocumentClient({ region: 'us-east-1' });
// Gets the file bucket and file name of the s3 object from context
let fileBucket = event.Records[0].s3.bucket.name;
let fileName = event.Records[0].s3.object.key;
let params = {
Bucket: fileBucket,
Key: fileName
};
// Creates temporary variables
let tempFile = localZippedFolder + fileBucket;
let tempUnzippedFile = localUnzippedFolder + fileBucket;
// Make Directories for Zipped and Unzipped files
try {
const zipDirFolder = await mkdirp(localZippedFolder, { recursive: true })
const unZipDirFolder = await mkdirp(localUnzippedFolder, { recursive: true });
console.log('SUCCESS: unzipped directory created!');
console.log('SUCCESS: zipped directory create!')
// Download files from s3 Bucket
let newFolder = await s3.getObject(params).promise();
await new Promise((resolve, reject) => {
// Extract files from zipped folder and store them in a local directory
fs.createReadStream(params.Key)
.pipe(unzipper.Extract({path: unZipDirFolder}))
.on('finish', resolve);
.on('error', reject);
});
} catch (error) {
console.log(error);
// rethrow error so caller sees the error
throw error;
}
};
And, your caller using this exported function will HAVE to use .then() or await on the returned promise to know when it's done. And, use .catch() or try/catch around await to catch errors.
If someone's open to using Python, they can use a buffer to read and unzip the files. Something like this:
zipped_file = s3_resource.Object(bucket_name=sourcebucketname, key=filekey)
buffer = BytesIO(zipped_file.get()["Body"].read())
zipped = zipfile.ZipFile(buffer)
for file in zipped.namelist():
logger.info(f'current file in zipfile: {file}')
final_file_path = file + '.extension'
with zipped.open(file, "r") as f_in:
content = f_in.read()
destinationbucket.upload_fileobj(io.BytesIO(content),
final_file_path,
ExtraArgs={"ContentType": "text/plain"}
)
There's also a tutorial here: https://betterprogramming.pub/unzip-and-gzip-incoming-s3-files-with-aws-lambda-f7bccf0099c9

Extract svgs from multiple paths

here is my code just need to refactor it a bit
const fs = require('fs');
const path = require('path');
const config = require('../configs/icons.config.js');
const filterDirectory = (file, srcPath) => fs.statSync(path.join(srcPath, file)).isDirectory();
const ensureDirectoryExistence = (filePath) => {
const dirname = path.dirname(filePath);
if (fs.existsSync(dirname)) {
return true;
}
ensureDirectoryExistence(dirname);
return fs.mkdirSync(dirname);
};
const filterExclude = fileName => config.ignore
.filter(ignore => fileName.indexOf(ignore) >= 0).length === 0;
const getDirectories = srcPath => fs.readdirSync(srcPath)
.filter(file => filterDirectory(file, srcPath))
.filter(file => filterExclude(file));
const availableIcons = getDirectories(path.resolve(config.origin));
availableIcons.forEach((file) => {
ensureDirectoryExistence(path.resolve(`${config.temporary}/${file}`));
fs.createReadStream(`${config.origin}/${file}/_jcr_content/renditions/original`)
.pipe(fs.createWriteStream(`${config.temporary}/${file}`));
});
problem is that is want the same funcionality with
const config = require('../configs/globalicons.config.js')
if i change the line 3 it is generating new directory for a different config file how to pass both configs at the same time??? like
const config = require('../configs/icons.config.js,../configs/globalicons.config.js');
not working though
i want to execute the same javascript for 2 different configs 2 generate 2 different directory on a single deployment any tip?

Categories