If I have a watcher like this:
gulp.watch('js/**/*.js').on('change', path => {
gulp.series(build, reload)();
});
...and task build would look like this:
const build = done => {
return gulp
.src(path) // Use "path" here
.pipe(
rename({
dirname: ''
})
)
.pipe(uglify())
.pipe(gulp.dest('build'));
};
How can I pass path argument to the build task?
As an exercise I believe I got this working as you wanted. But first let me say that the traditional way of limiting the source pipeline would be with something like gulp-newer. You should see if that accomplishes what you want.
But here is something that may work for you [not well tested!]:
function build (path) {
return new Promise(resolve => {
// using setTimeout() to prove async/await is working as expected
setTimeout(() => {
resolve('resolved');
}, 2000);
// put your gulp.src pipeline here using 'path'
console.log("2 path = " + path);
});
};
function anotherTask (path) {
return new Promise(resolve => {
// put your gulp.src pipeline here
console.log("4 path = " + path); });
};
function testWatch () {
console.log("in testWatch");
// debounceDelay because gulp likes to call the watcher 2 or 3times otherwise
// see [gulp watch task running multiple times when a file is saved][2]
var watcher = gulp.watch('js/**/*.js', { debounceDelay: 2000 });
// I added the async/await because I wasn't sure those functions would be run in series
// as you wanted.
// With the event listener route I couldn't get gulp.series to work,
// so went with async/await.
watcher.on('change', async function(path, stats) {
console.log('1 File ' + path + ' was changed');
await build(path);
console.log("3 after build");
// I would assume that the **last** task in the chain doesn't need 'await'
// or to return a promise as in anotherTask
await anotherTask(path);
console.log("5 after anotherTask");
});
};
gulp.task('default', gulp.series(testWatch));
gulp watch running multiple times mentioned above in code.
Output (my js watch src is different than yours) :
in testWatch
1 File src\js\main.js was changed
2 path = src\js\main.js
3 after build
4 path = src\js\main.js
5 after anotherTask
1 File src\js\taxonomy.js was changed
2 path = src\js\taxonomy.js
3 after build
4 path = src\js\taxonomy.js
5 after anotherTask
Related
I have a project that has functions that read files and extract their hash code. After these hash codes are extracted in the project, subfiles are built one by one. Finally, what I want to do is to throw all these hash codes into an array and create a json file. I need to do this after the IterateFolders() function has run and finished in readDirectory function. But console.log is running on a bottom line without waiting for this function, please help.
My functions are as follows:
//Calculate build time
function getBuildTime(start,end) {
let time = (end - start);
let buildTime = `${new Date().toLocaleDateString()} ${new Date().toLocaleTimeString()} Build time: ${time} ms \n`
fs.writeFile('build-time.log', buildTime,function (err) { //output log file
if (err) return console.log(err);
});
}
//async metaHash calculation from folder path
async function computeMetaHash(folder, inputHash = null) {
const hash = inputHash ? inputHash : createHash('sha256');
const info = await fsp.readdir(folder, { withFileTypes: true });
//construct a string from the modification date, the filename and the filesize
for (let item of info) {
const fullPath = path.join(folder, item.name)
if (item.isFile()) {
const statInfo = await fsp.stat(fullPath); //stat return all informations about file
// compute hash string name:size:mtime
const fileInfo = `${fullPath}:${statInfo.size}:${statInfo.mtimeMs}`;
hash.update(fileInfo);
} else if (item.isDirectory()) {
// recursively walk sub-folders
await computeMetaHash(fullPath, hash);
}
}
// if not being called recursively, get the digest and return it as the hash result
if (!inputHash) {
return hash.digest('base64');
}
}
async function iterateFolders(folderPath) {
folderPath.forEach(function (files) {
//function takes folder path as inputh
computeMetaHash(files).then(result => { //call create hash function
console.log({"path":files,"hashCode":result});
}).then(()=>{ //build fragments
//The files is array, so each. files is the folder name. can handle the folder.
console.log("%s build...", files);
execSync(`cd ${files} && npm run build`, { encoding: 'utf-8' });
}).then(()=>{// Finish timing
end = new Date().getTime();
getBuildTime(start,end);
}).catch(err => {
console.log(err);
});
});
}
async function readDirectory() {
let files = await readdir(p)
const folderPath = files.map(function (file) {
//return file or folder path
return path.join(p, file);
}).filter(function (file) {
//use sync judge method. The file will add next files array if the file is directory, or not.
return fs.statSync(file).isDirectory();
})
//check hash.json exist or not
if (fs.existsSync(hashFile)) {
// path exists
console.log("File exists: ", hashFile);
}
else
{
//This is the first pipeline, all fragments will build then hash.json will created.
console.log(hashFile," does NOT exist, build will start and hash.json will created:");
// Start timing
start = new Date().getTime();
iterateFolders(folderPath,files);
console.log("IT WILL BE LAST ONE ")
}
}
readDirectory();
Well if you want to wait for its execution, then you have to use await :) Currently it's just iterateFolders(folderPath,files);, so you run it, but you don't wait for it.
await iterateFolders(folderPath,files);
That's your first issue. Then this method runs some loop and calls some other methods. But first async-await needs to return a promise (which you do not do). And second - it doesn't work in forEach, as stated in the comments above. Read Using async/await with a forEach loop for more details.
Fix those three issues and you'll make it.
In the iterateFolders function, you need to await computeMetaHash calls. To do so you can either use a for loop instead of calling forEach on folderPath or change forEach to map and use Promise.all.
Using the for loop method (synchronous):
async function iterateFolders(folderPath) {
for (let files of folderPath) {
//function takes folder path as inputh
await computeMetaHash(files).then(result => { //call create hash function
console.log({"path":files,"hashCode":result});
}).then(()=>{ //build fragments
//The files is array, so each. files is the folder name. can handle the folder.
console.log("%s build...", files);
execSync(`cd ${files} && npm run build`, { encoding: 'utf-8' });
}).then(()=>{// Finish timing
end = new Date().getTime();
getBuildTime(start,end);
}).catch(err => {
console.log(err);
});
}
}
Using the Promise.all method (asynchronous):
async function iterateFolders(folderPath) {
return Promise.all(folderPath.map(function (files) {
//function takes folder path as inputh
return computeMetaHash(files).then(result => { //call create hash function
console.log({"path":files,"hashCode":result});
}).then(()=>{ //build fragments
//The files is array, so each. files is the folder name. can handle the folder.
console.log("%s build...", files);
execSync(`cd ${files} && npm run build`, { encoding: 'utf-8' });
}).then(()=>{// Finish timing
end = new Date().getTime();
getBuildTime(start,end);
}).catch(err => {
console.log(err);
});
}));
}
If you prefer, using async/await also allows you to get rid of the then and catch in both methods which I believe makes it a little easier to read and understand.
Here's an example using the Promise.all method:
async function iterateFolders(folderPath) {
return Promise.all(folderPath.map(async (files) => {
try {
const result = await computeMetaHash(files);
console.log({ path: files, hashCode: result });
// build fragments
//The files is array, so each. files is the folder name. can handle the folder.
console.log('%s build...', files);
execSync(`cd ${files} && npm run build`, { encoding: 'utf-8' });
// Finish timing
const end = Date.now();
getBuildTime(start, end);
} catch(err) {
console.log(err);
}
}));
}
You might also want to check out for await... of
Note: you also need to await iterateFolders when it's called in readDirectory.
So I have a mind boggler, my app grabs a git repo (simple-git) then makes an npm i on the files inside (shelljs) and then zips using archiver. Of course it needs to be async but the first part and second part work however at the point of archiving it fails (the next step is for axios to await the zip being done), also before this when I ran the zip code with the grabbing repo code it would create the zip file in the correct root directory but now does it in the folder directory instead (repo/folder) , the zip is empty now though instead of zipping the other contents. Please assist if you can
The code:
// // //Make call
function makeCall() {
return new Promise((resolve) => {
resolve(
git()
.silent(true)
.clone(remote, ["-b", branch])
.then(() => console.log("finished"))
.catch((err) => console.error("failed: ", err))
);
});
}
//Make NPM Modules
async function makeModules() {
await makeCall();
const pathOfFolder = path.join(__dirname, "folder/sub");
shell.cd(pathOfFolder)
return new Promise((resolve) => {
resolve(
shell.exec("npm i"));
});
}
async function makeZip() {
await makeModules();
const output = fs.createWriteStream(`${branch}.zip`);
const archive = archiver("zip");
output.on("close", function () {
console.log(archive.pointer() + " total bytes");
console.log(
"archiver has been finalized and the output file descriptor has closed."
);
});
archive.pipe(output);
// append files from a sub-directory, putting its contents at the root of archive
archive.directory("folder", false);
// append files from a sub-directory and naming it `new-subdir` within the archive
archive.directory("subdir/", "new-subdir");
archive.finalize();
}
makeZip();
Resolved it, moved to other files and set path correctly
I'm in the process of migrating from gulp#3.9.1 to gulp#4.0.2 and upgrading my gulp dependencies in the process. I have the following task in my gulpfile, where you can assume directories is just an array of directories I want to perform this operation on:
var gulp = require('gulp');
var ngAnnotate = require('gulp-ng-annotate'); //annotates dependencies in Angular components
var rev = require('gulp-rev'); //appends a hash to the end of file names to eliminate stale cached files
var revReplace = require('gulp-rev-replace');
var uglify = require('gulp-uglify'); // minimizes javascript files
var compressCss = require('gulp-minify-css');
var useref = require('gulp-useref'); // replaces style and script blocks in HTML files
var filter = require('gulp-filter');
var merge = require('merge-stream');
var sourcemaps = require('gulp-sourcemaps');
function minify() {
var tasks = directories.map(function (directory) {
var cssFilter = filter("**/all.min.css", {restore:true});
var jsAppFilter = filter("**/app.min.js", {restore:true});
var jsFilter = filter("**/*.js", {restore:true});
return gulp.src(dstBasePath + directory + "index.html", {allowEmpty: true})
.pipe(useref())
.pipe(cssFilter)
.pipe(compressCss({keepSpecialComments:false}))
.pipe(rev())
.pipe(cssFilter.restore)
.pipe(jsAppFilter)
.pipe(sourcemaps.init())
.pipe(ngAnnotate({add:true, single_quotes:true}))
.pipe(jsAppFilter.restore)
.pipe(jsFilter)
.pipe(uglify())
.pipe(rev())
.pipe(jsFilter.restore)
.pipe(revReplace())
.pipe(sourcemaps.write('.')) // sourcemaps need to be written to same folder for Datadog upload to work
.pipe(gulp.dest(dstBasePath + directory))
});
return merge(tasks);
}
Why would this result in the error "Did you forget to signal async completion?" from Gulp when running the task? Note that I'm using Gulp 4. I've tried passing a callback done to this task, and adding .addListener('end', done) to the final pipe, but this causes my merged stream to end prematurely (presumably when the first one ends). So perhaps one of these plugins is not signaling when it's completed, but how would you even get this to work otherwise? Thanks for any insight you can provide.
return merge(folders.map(function (folder) { // this has worked for me in the past
as has this form without merge
gulp.task('init', function (done) {
var zips = getZips(zipsPath);
var tasks = zips.map(function (zip) {
return gulp.src(zipsPath + "/" + zip)
.pipe(unzip({ keepEmpty: true }))
.pipe(gulp.dest(path.join("src", path.basename(zip, ".zip"))))
.on('end', function() { // this bit is necessary
done();
});
});
return tasks;
});
Gulp 4 requires that you signal async completion. There's some good information about it in this answer to a similar question:
Gulp error: The following tasks did not complete: Did you forget to signal async completion?
I had a similar case where I was returning a merged set of tasks, and I was able to resolve the error by making the function async and awaiting the merge. My case looked something like this:
gulp.task("build", async function () {
...
return await merge(tasks);
});
so I think you should be able to do something like
async function minify(){
...
return await merge(tasks);
}
When I execute a testing witch need the ending result of a vscode.windows.terminal the testing gave me a false positive. I need wait until the terminal operation end in order to execute the asserts.
I use a class named Stack witch have a pom file. My test start with the execution of cd and mvn clean install using the vscode.windows.terminal. The idea of the test assertion is verify the existence of the target file.
const buildProgram = () => {
const terminal = vscode.window.createTerminal();
terminal.show();
terminal.sendText('cd ' + stackDirectory);
terminal.sendText('mvn clean install');
}
it("Stack Project build taget directory exists", function() {
const promise = Promise.all([buildProgram()])
.then(() => {
return fs.existsSync(stackDirectory + "/target");
});
expect(promise).to.eventually.equal(false);
});
This test runs without problem but in the end the target directory is not created.
Here is a function which listens for terminal exit then resolves promise. Essensially we sendText(yourCommandHere+";exit") such that the terminal exits after executing your command, and then listen for that exit.
Additionally, I would believe that when sending multiple commands its better to send them on one line, eg: terminal.sendText(command+";", false); terminal.sendText(command2+";exit") such that we assure the commands are run in the correct order.
export async function callInInteractiveTerminal(
command: string,
shellPath?: string,
name?: string,
location?: vscode.TerminalLocation
): Promise<vscode.TerminalExitStatus> {
const terminal = vscode.window.createTerminal({
shellPath,
name,
location: location ? location : vscode.TerminalLocation.Panel,
});
terminal.show();
terminal.sendText(command, false);
terminal.sendText("; exit");
return new Promise((resolve, reject) => {
const disposeToken = vscode.window.onDidCloseTerminal(
async (closedTerminal) => {
if (closedTerminal === terminal) {
disposeToken.dispose();
if (terminal.exitStatus !== undefined) {
resolve(terminal.exitStatus);
} else {
reject("Terminal exited with undefined status");
}
}
}
);
});
}
After further research it seems this function only works when terminalLocation is TerminalLocation.Panel, not TerminalLocation.Editor.
I'm also a little worried that a fast command might have the exit run before the eventlistener starts, but it works for my use case since i only use it when the terminal needs human interaction.
I find a possible solution replacing the test for this:
it("Stack Project build taget directory exists", function() {
buildProgram();
return new Promise((resolve, reject) => setTimeout(function(){
// Assert here.
if(fs.existsSync(stackDirectory + "/target")){
resolve();
}
reject();
}, 5000));
}).timeout('7s');
I don't really like the idea of using timeout but I don't find a way to say if the terminal is busy.
Is it possible with gulp v.4.0.0 to watch for files outside of the folder where gulpfile.js is located?
In older gulp it was possible to set file path to ../../someFile.css and watch for its changes, but in v4 it doesn't detect changes for the same path.
// snip....
let rootFolder = '..\..\root\';
// Watch function
let watchThis = (filePath, gulpTasks) => {
gulp.watch(filePath, gulp.series(gulpTasks))
.on('change', function(path) {
console.log(`${chalk.blue('Changed')}: ${chalk.yellow(path)}`);
})
.on('unlink', function(path) {
console.log(`${chalk.red('Deleted')}: ${chalk.yellow(path)}`);
});
}
// Watch task configuration
let watchFiles = () => {
watchThis([`${rootFolder}/style1.scss`, `${rootFolder}/style2.scss`], 'scss')
watchThis('./js/main.js', 'js')
}
// Final watch task
gulp.task('watch', gulp.series(
'development',
gulp.parallel(
watchFiles,
'startLiveServers'
)
));
// ...snip
Changes to files ['../../style1.scss', '../../style2.scss'] will not be detected, but they will be for './js/main.js'. Am I missing something?
Problem with new Gulp 4 watch task is in paths. Unlike watch task from Gulp 3, new version is unforgiving and requires correct path structure with correct paths separators.
So instead of using paths that may result in ..\\..\\root\\/style1.scss, we must convert paths to proper structure like ../../root/style1.scss.
This simple function helps and the rest is handled by gulp and nodejs
let fixPath = (oldPath) => {
const pathString = oldPath;
const fix = /\\{1,}|\/{1,}/;
return pathString.replace(new RegExp(fix, 'gm'), '/').replace(new RegExp(fix, 'gm'), '/')
}