Using gulp with request - javascript

I have the following Gulpfile.js:
'use strict';
const gulp = require('gulp'),
request = require('request');
const paths = {
vendor: [
'https://raw.githubusercontent.com/jquery/jquery-dist/master/dist/jquery.min.js',
'https://raw.githubusercontent.com/kenwheeler/slick/master/slick/slick.js'
]
};
gulp.task('vendor', (res) => {
const url = request.get(paths.vendor).pipe(res);
return gulp.src(url)
.pipe(gulp.dest('public/vendor'));
});
gulp.task('default', gulp.parallel('vendor'));
I'm getting the following error:
Error: options.uri is a required argument
With this method I trying to dicthing client-side package managers, like Bower. Is there a way to use request with gulp and looping through a list of object?
EDIT:
I placed this code for testing, only returning the first line from the loop:
gulp.task('vendor', () => {
for (let i=0; i<paths.vendor.length; i++) {
return console.log(paths.vendor[i]);
};
});
Just like:
gulp.task('vendor', (res) => {
const url = request.get(paths.vendor[index++]).pipe(res);
return gulp.src(url)
.pipe(gulp.dest('public/vendor'));
});

You cannot pass a URL to gulp.src(). The gulp instance inherits src() and dest() from vinyl-fs meaning you can only use it to read from and write to the local file system.
Try gulp-download instead, which wraps request into a vinyl stream:
var download = require('gulp-download');
gulp.task('vendor', () => {
return download(paths.vendor)
.pipe(gulp.dest('public/vendor'));
});

request.get only works on one URI at a time and you are passing an array, also AFAIK parallel expects a list of tasks, not one task that processes many items. Maybe this would work for you:
'use strict';
const gulp = require('gulp'),
request = require('request');
const paths = {
vendor: [
'https://raw.githubusercontent.com/jquery/jquery-dist/master/dist/jquery.min.js',
'https://raw.githubusercontent.com/kenwheeler/slick/master/slick/slick.js'
]
};
let index = 0;
gulp.task('vendor', (res) => {
const url = request.get(paths.vendor[index++]).pipe(res);
return gulp.src(url)
.pipe(gulp.dest('public/vendor'));
});
let parallelTasks = (new Array(paths.vendor.length)).fill('vendor');
gulp.task('default', gulp.parallel(...parallelTasks));

Related

Using Cypress functions in different files

I am trying to use cypress functions in files different from the main one (which is the test file). I am wondering if it is possible.
Actually, I did this: this is the code in my test.js file; note that the first function is what I'm trying to do; the second function works normally and I have no problem with that. The reason why I am trying to do that is that I could need to reuse the same function multiple times.
my tree folders:
static_copied
pages
cities
Rome
New York
Bombay
Tokyo
London
Moscow
test.js file:
const pathCities = 'static_copied/pages/cities'
it('Retrieve cities from static and divide links', () => {
let cities1 = misc.retrieveCities()
console.log(cities1)
// this works
cy.task('readFolder', pathCities).then(cities => {
console.log('cities ', cities, typeof cities) // prints an array of cities, and 'object'
})
})
})
my misc.help.js file:
const pathCities = 'static_copied/pages/cities'
module.exports = {
retrieveCities,
[...]
}
[...]
function retrieveCities() {
cy.task('readFolder', pathCities).then(res => {
console.log('here', res, typeof res)
return res
})
}
and finally my cypress/plugins/index.js file:
const fs = require('fs')
// opens devTools by default
module.exports = (on, config) => {
[...]
// reads a folder, both folder and file names
on('task', {
readFolder(path) {
let foldersAnFiles = fs.readdirSync(path, 'utf8')
console.log('--->', foldersAnFiles, typeof foldersAnFiles)
let folders = []
// if its a file, exclude from result
foldersAnFiles.filter(function (folder) {
if (folder.indexOf('.') === -1) {
folders.push(folder)
}
})
return folders
},
})
}
What happens is that in misc.help.js file, print is correct: in retrieveCities() function, this console log console.log('here', res, typeof res) correctly prints an array.
But when i return it in the main test file, console.log(cities1) prints undefined.
Is there a way to pass to the main file my result?
Add this to your commands file and it then call cy.retrieveCities() in any test file and it will work.
Cypress.Commands.add('retrieveCities', () => {
return cy.task('readFolder', pathCities).then(res => {
return res
})
})

Outputting file details using ffprobe in ffmpeg AWS Lambda layer

I am trying to output the details of an audio file with ffmpeg using the ffprobe option. But it is just returning 'null' at the moment? I have added the ffmpeg layer in Lambda. can anyone spot why this is not working?
const { spawnSync } = require("child_process");
const { readFileSync, writeFileSync, unlinkSync } = require("fs");
const util = require('util');
var fs = require('fs');
let path = require("path");
exports.handler = (event, context, callback) => {
spawnSync(
"/opt/bin/ffprobe",
[
`var/task/myaudio.flac`
],
{ stdio: "inherit" }
);
};
This is the official AWS Lambda layer I am using, it is a great prooject but a little lacking in documentation.
https://github.com/serverlesspub/ffmpeg-aws-lambda-layer
First of all, I would recommend using NodeJS 8.10 over NodeJs 6.10 (it will be soon EOL, although AWS is unclear on how long it will be supported)
Also, I would not use the old style handler with a callback.
A working example below - since it downloads a file from the internet (couldn't be bothered to create a package to deploy on lambda with the file uploaded) give it a bit more time to work.
const { spawnSync } = require('child_process');
const util = require('util');
var fs = require('fs');
let path = require('path');
const https = require('https');
exports.handler = async (event) => {
const source_url = 'https://upload.wikimedia.org/wikipedia/commons/b/b2/Bell-ring.flac'
const target_path = '/tmp/test.flac'
async function downloadFile() {
return new Promise((resolve, reject) => {
const file = fs.createWriteStream(target_path);
const request = https.get(source_url, function(response) {
const stream = response.pipe(file)
stream.on('finish', () => {resolve()})
});
});
}
await downloadFile()
const test = spawnSync('/opt/bin/ffprobe',[
target_path
]);
console.log(test.output.toString('utf8'))
const response = {
statusCode: 200,
body: JSON.stringify([test.output.toString('utf8')]),
};
return response;
}
NB! In production be sure to generate a unique temporary file as instances that the Lambda function run on are often shared from invocation to invocation, you don't want multiple invocations stepping on each others files! When done, delete the temporary file, otherwise you might run out of free space on the instance executing your functions. The /tmp folder can hold 512MB, so it can run out fast if you work with many large flac files
I'm not fully familiar with this layer, however from looking at the git repo of the thumbnail-builder it looks like child_process is a promise, so you should be waiting for it's result using .then(), otherwise it is returning null because it doesn't wait for the result.
So try something like:
return spawnSync(
"/opt/bin/ffprobe",
[
`var/task/myaudio.flac`
],
{ stdio: "inherit" }
).then(result => {
return result;
})
.catch(error => {
//handle error
});

Unable to pass a dynamically generated array of cacheable links to Service Worker

How do I pass a programmatically populated array of links to a service worker script for caching?
I am generating the array in cachelist.js like this:
const fs = require('fs');
const path = require('path');
require('dotenv').config();
var cachedItems = ['/'];
function walkSync(currentDirPath, callback) {
fs.readdirSync(currentDirPath).forEach(function (name) {
var filePath = path.join(currentDirPath, name);
var stat = fs.statSync(filePath);
if (stat.isFile()) {
callback(filePath, stat);
} else if (stat.isDirectory()) {
walkSync(filePath, callback);
}
});
}
walkSync('./pages/', function(filePath, stat) {
cachedItem = filePath.substr(5);
if(cachedItem.indexOf('_') == -1) {
cachedItems.push(cachedItem);
}
});
module.exports = { cachedItems };
And then I'm trying to use this cachedItems array in /offline/serviceWorker.js as follows:
const URLSTOCACHE = require("../cachelist.js");
const CACHE_NAME = "version-0.0.46";
// Call install event
self.addEventListener("install", e => {
e.waitUntil(
caches
.open(CACHE_NAME)
.then(cache => cache.addAll(URLSTOCACHE))
.then(() => self.skipWaiting())
);
});
// Call fetch event
self.addEventListener("fetch", e => {
e.respondWith(
fetch(e.request).catch(() => caches.match(e.request))
)
});
However, this fails with an "Uncaught ReferenceError: require is not defined." Any workaround?
require isn't a built-in browser-side utility. There are various libraries (like RequireJS) and bundlers (which rewrite the require call), but unless you're using one of those, you can't use require browser-side.
If your goal is to read that file in the service worker and add the URLs in it to the cache, use fetch to load it, and then use thme in addAll.

How to zip a directory with node.js [duplicate]

I need to zip an entire directory using Node.js. I'm currently using node-zip and each time the process runs it generates an invalid ZIP file (as you can see from this Github issue).
Is there another, better, Node.js option that will allow me to ZIP up a directory?
EDIT: I ended up using archiver
writeZip = function(dir,name) {
var zip = new JSZip(),
code = zip.folder(dir),
output = zip.generate(),
filename = ['jsd-',name,'.zip'].join('');
fs.writeFileSync(baseDir + filename, output);
console.log('creating ' + filename);
};
sample value for parameters:
dir = /tmp/jsd-<randomstring>/
name = <randomstring>
UPDATE: For those asking about the implementation I used, here's a link to my downloader:
I ended up using archiver lib. Works great.
Example
var file_system = require('fs');
var archiver = require('archiver');
var output = file_system.createWriteStream('target.zip');
var archive = archiver('zip');
output.on('close', function () {
console.log(archive.pointer() + ' total bytes');
console.log('archiver has been finalized and the output file descriptor has closed.');
});
archive.on('error', function(err){
throw err;
});
archive.pipe(output);
// append files from a sub-directory, putting its contents at the root of archive
archive.directory(source_dir, false);
// append files from a sub-directory and naming it `new-subdir` within the archive
archive.directory('subdir/', 'new-subdir');
archive.finalize();
I'm not going to show something new, just wanted to summarise the solutions above for those who like Promises as much as I do 😉.
const archiver = require('archiver');
/**
* #param {String} sourceDir: /some/folder/to/compress
* #param {String} outPath: /path/to/created.zip
* #returns {Promise}
*/
function zipDirectory(sourceDir, outPath) {
const archive = archiver('zip', { zlib: { level: 9 }});
const stream = fs.createWriteStream(outPath);
return new Promise((resolve, reject) => {
archive
.directory(sourceDir, false)
.on('error', err => reject(err))
.pipe(stream)
;
stream.on('close', () => resolve());
archive.finalize();
});
}
Hope it will help someone 🤞
Use Node's native child_process api to accomplish this.
No need for third party libs. Two lines of code.
const child_process = require("child_process");
child_process.execSync(`zip -r <DESIRED_NAME_OF_ZIP_FILE_HERE> *`, {
cwd: <PATH_TO_FOLDER_YOU_WANT_ZIPPED_HERE>
});
The example above showcases the synchronous API. You can also use child_process.exec(path, options, callback) if you want async behavior. There are a lot more options you can specify other than cwd to further fine-tune your request.
If you don't have the ZIP utility:
This question is specifically asks about the zip utility for archiving/compression purposes. Therefore, this example assumes you have the zip utility installed on your system. For completeness sakes, some operating systems may not have utility installed by default. In that case you have at least three options:
Work with the archiving/compression utility that is native to your platform
Replace the shell command in the above Node.js code with code from your system. For example, linux distros usually come with tar/gzip utilities:
tar -cfz <DESIRED_NAME_OF_ZIP_FILE_HERE> <PATH_TO_FOLDER_YOU_WANT_ZIPPED_HERE>.
This is a nice option as you don't need to install anything new onto your operating system or manage another dependency (kind of the whole point for this answer).
Obtain the zip binary for your OS/distribution.
For example on Ubuntu: apt install zip.
The ZIP utility is tried and tested for decades, it's fairly ubiquitous and it's a safe choice. Do a quick google search or go to the creator, Info-ZIP's, website for downloadable binaries.
Use a third party library/module (of which there are plenty on NPM).
I don't prefer this option. However, if you don't really care to understand the native methods and introducing a new dependency is a non-issue, this is also a valid option.
This is another library which zips the folder in one line :
zip-local
var zipper = require('zip-local');
zipper.sync.zip("./hello/world/").compress().save("pack.zip");
Archive.bulk is now deprecated, the new method to be used for this is glob:
var fileName = 'zipOutput.zip'
var fileOutput = fs.createWriteStream(fileName);
fileOutput.on('close', function () {
console.log(archive.pointer() + ' total bytes');
console.log('archiver has been finalized and the output file descriptor has closed.');
});
archive.pipe(fileOutput);
archive.glob("../dist/**/*"); //some glob pattern here
archive.glob("../dist/.htaccess"); //another glob pattern
// add as many as you like
archive.on('error', function(err){
throw err;
});
archive.finalize();
To include all files and directories:
archive.bulk([
{
expand: true,
cwd: "temp/freewheel-bvi-120",
src: ["**/*"],
dot: true
}
]);
It uses node-glob(https://github.com/isaacs/node-glob) underneath, so any matching expression compatible with that will work.
To pipe the result to the response object (scenarios where there is a need to download the zip rather than store locally)
archive.pipe(res);
Sam's hints for accessing the content of the directory worked for me.
src: ["**/*"]
I have found this small library that encapsulates what you need.
npm install zip-a-folder
const zip-a-folder = require('zip-a-folder');
await zip-a-folder.zip('/path/to/the/folder', '/path/to/archive.zip');
https://www.npmjs.com/package/zip-a-folder
Adm-zip has problems just compressing an existing archive https://github.com/cthackers/adm-zip/issues/64 as well as corruption with compressing binary files.
I've also ran into compression corruption issues with node-zip https://github.com/daraosn/node-zip/issues/4
node-archiver is the only one that seems to work well to compress but it doesn't have any uncompress functionality.
Since archiver is not compatible with the new version of webpack for a long time, I recommend using zip-lib.
var zl = require("zip-lib");
zl.archiveFolder("path/to/folder", "path/to/target.zip").then(function () {
console.log("done");
}, function (err) {
console.log(err);
});
As today, I'm using AdmZip and works great:
import AdmZip = require('adm-zip');
export async function archiveFile() {
try {
const zip = new AdmZip();
const outputDir = "/output_file_dir.zip";
zip.addLocalFolder("./yourFolder")
zip.writeZip(outputDir);
} catch (e) {
console.log(`Something went wrong ${e}`);
}
}
import ... from answer based on https://stackoverflow.com/a/51518100
To zip single directory
import archiver from 'archiver';
import fs from 'fs';
export default zipDirectory;
/**
* From: https://stackoverflow.com/a/51518100
* #param {String} sourceDir: /some/folder/to/compress
* #param {String} outPath: /path/to/created.zip
* #returns {Promise}
*/
function zipDirectory(sourceDir, outPath) {
const archive = archiver('zip', { zlib: { level: 9 }});
const stream = fs.createWriteStream(outPath);
return new Promise((resolve, reject) => {
archive
.directory(sourceDir, false)
.on('error', err => reject(err))
.pipe(stream)
;
stream.on('close', () => resolve());
archive.finalize();
});
}
To zip multiple directories:
import archiver from 'archiver';
import fs from 'fs';
export default zipDirectories;
/**
* Adapted from: https://stackoverflow.com/a/51518100
* #param {String} sourceDir: /some/folder/to/compress
* #param {String} outPath: /path/to/created.zip
* #returns {Promise}
*/
function zipDirectories(sourceDirs, outPath) {
const archive = archiver('zip', { zlib: { level: 9 }});
const stream = fs.createWriteStream(outPath);
return new Promise((resolve, reject) => {
var result = archive;
sourceDirs.forEach(sourceDir => {
result = result.directory(sourceDir, false);
});
result
.on('error', err => reject(err))
.pipe(stream)
;
stream.on('close', () => resolve());
archive.finalize();
});
}
You can try in a simple way:
Install zip-dir :
npm install zip-dir
and use it
var zipdir = require('zip-dir');
let foldername = src_path.split('/').pop()
zipdir(<<src_path>>, { saveTo: 'demo.zip' }, function (err, buffer) {
});
I ended up wrapping archiver to emulate JSZip, as refactoring through my project woult take too much effort. I understand Archiver might not be the best choice, but here you go.
// USAGE:
const zip=JSZipStream.to(myFileLocation)
.onDone(()=>{})
.onError(()=>{});
zip.file('something.txt','My content');
zip.folder('myfolder').file('something-inFolder.txt','My content');
zip.finalize();
// NodeJS file content:
var fs = require('fs');
var path = require('path');
var archiver = require('archiver');
function zipper(archive, settings) {
return {
output: null,
streamToFile(dir) {
const output = fs.createWriteStream(dir);
this.output = output;
archive.pipe(output);
return this;
},
file(location, content) {
if (settings.location) {
location = path.join(settings.location, location);
}
archive.append(content, { name: location });
return this;
},
folder(location) {
if (settings.location) {
location = path.join(settings.location, location);
}
return zipper(archive, { location: location });
},
finalize() {
archive.finalize();
return this;
},
onDone(method) {
this.output.on('close', method);
return this;
},
onError(method) {
this.output.on('error', method);
return this;
}
};
}
exports.JSzipStream = {
to(destination) {
console.log('stream to',destination)
const archive = archiver('zip', {
zlib: { level: 9 } // Sets the compression level.
});
return zipper(archive, {}).streamToFile(destination);
}
};

how to require from URL in Node.js

Is there a standard way to require a Node module located at some URL (not on the local filesystem)?
Something like:
require('http://example.com/nodejsmodules/myModule.js');
Currently, I am simply fetching the file into a temporary file, and requiring that.
You can fetch module using http.get method and execute it in the sandbox using vm module methods runInThisContext and runInNewContext.
Example
var http = require('http')
, vm = require('vm')
, concat = require('concat-stream'); // this is just a helper to receive the
// http payload in a single callback
// see https://www.npmjs.com/package/concat-stream
http.get({
host: 'example.com',
port: 80,
path: '/hello.js'
},
function(res) {
res.setEncoding('utf8');
res.pipe(concat({ encoding: 'string' }, function(remoteSrc) {
vm.runInThisContext(remoteSrc, 'remote_modules/hello.js');
}));
});
IMO, execution of the remote code inside server application runtime may be reasonable in the case without alternatives. And only if you trust to the remote service and the network between.
Install the module first :
npm install require-from-url
And then put in your file :
var requireFromUrl = require('require-from-url/sync');
requireFromUrl("http://example.com/nodejsmodules/myModule.js");
0 dependency version (node 6+ required, you can simply change it back to ES5)
const http = require('http'), vm = require('vm');
['http://example.com/nodejsmodules/myModule.js'].forEach(url => {
http.get(url, res => {
if (res.statusCode === 200 && /\/javascript/.test(res.headers['content-type'])) {
let rawData = '';
res.setEncoding('utf8');
res.on('data', chunk => { rawData += chunk; });
res.on('end', () => { vm.runInThisContext(rawData, url); });
}
});
});
It is still the asynchronous version, if sync load is the case, a sync http request module for example should be required
If you want something more like require, you can do this:
var http = require('http')
, vm = require('vm')
, concat = require('concat-stream')
, async = require('async');
function http_require(url, callback) {
http.get(url, function(res) {
// console.log('fetching: ' + url)
res.setEncoding('utf8');
res.pipe(concat({encoding: 'string'}, function(data) {
callback(null, vm.runInThisContext(data));
}));
})
}
urls = [
'http://example.com/nodejsmodules/myModule1.js',
'http://example.com/nodejsmodules/myModule2.js',
'http://example.com/nodejsmodules/myModule3.js',
]
async.map(urls, http_require, function(err, results) {
// `results` is an array of values returned by `runInThisContext`
// the rest of your program logic
});
You could overwrite the default require handler for .js files:
require.extensions['.js'] = function (module, filename) {
// ...
}
You might want to checkout better-require as it does pretty much this for many file formats. (I wrote it)
const localeSrc = 'https://www.trip.com/m/i18n/100012631/zh-HK.js';
const http = require('http');
const vm = require('vm');
const concat = require('concat-stream');
http.get(
localeSrc,
res => {
res.setEncoding('utf8');
res.pipe(
concat({ encoding: 'string' }, remoteSrc => {
let context = {};
const script = new vm.Script(remoteSrc);
script.runInNewContext(context);
console.log(context);
}),
);
},
err => {
console.log('err', err);
},
);

Categories