I'm using gulp in one of my projects. When running gulp watch from OS (Arch Linux)
fs.js:921
return binding.readdir(pathModule.toNamespacedPath(path), options.encoding);
^
Error: EACCES: permission denied, scandir '/tmp/systemd-private-c33e4391b18f4b24af3055190fb15730-systemd-hostnamed.service-zrxyaX/'
at Object.fs.readdirSync (fs.js:921:18)
at Gaze._addToWatched (/home/majmun/code/wp/wp-content/plugins/project/node_modules/glob-watcher/node_modules/gaze/lib/gaze.js:274:22)
at Gaze._internalAdd (/home/majmun/code/wp/wp-content/plugins/project/node_modules/glob-watcher/node_modules/gaze/lib/gaze.js:191:10)
at /home/majmun/code/wp/wp-content/plugins/project/node_modules/glob-watcher/node_modules/gaze/lib/gaze.js:400:16
at Array.forEach (<anonymous>)
at /home/majmun/code/wp/wp-content/plugins/project/node_modules/glob-watcher/node_modules/gaze/lib/gaze.js:396:12
at FSReqWrap.oncomplete (fs.js:153:20)
I'm running gulp from host OS instead from a virtual machine because it's much faster.
I understand that problem is that gulp is doing something in /tmp folder of host OS, and some files have root privileges.
If I run sudo chown -R majmun:users /tmp, after while a new file with root privileges appears which break gulp.
Why gulp watch need /tmp folder.
Did someone resolve this problem?
Here is code of gulp watch task
// gulp watch
Gulp.task('watch', ['dev'], function() {
console.log('Initializing assets watcher:')
let stream = new Stream();
let filter = '*.{css,scss,sass,less,js,png,jpg,jpeg,svg}';
let tmp = Tmp.fileSync();
let glob = [
path.src.assets + '**/' + filter,
path.src.elements + '**/' + filter,
path.src.properties + '**/' + filter,
];
for (let i = 0; i < glob.length; i++) {
console.log('- ' + glob[i]);
}
let watcher = Gulp.watch(glob, ['dev']);
watcher.add(tmp.name);
process.on('SIGINT', function() {
stream.emit('end');
process.exit(0);
});
KeyPress(process.stdin);
process.stdin.on('keypress', function(char, key) {
if (key && key.ctrl && key.name == 'c') {
process.emit('SIGINT');
}
else if (key && key.ctrl && key.name == 'r') {
Fs.utimes(tmp.name, new Date(), new Date(), function() {});
}
});
process.stdin.setRawMode(true);
process.stdin.resume();
console.log('Watcher up and running, Ctrl+R to refresh, Ctrl+C to exit.');
return stream;
});
I fix this by adding some option when using tmp library. Instead of default options I specify where temporary folder will be and it's permission.
So instead of this:
let tmp = Tmp.fileSync();
I did this:
let tmp = Tmp.fileSync({
// Use settings from .env if defined
dir: process.env.MY_TMP_DIR || ''
mode: process.env.MY_TMP_MODE || '',
prefix: process.env.MY_TMP_PREFIX || '',
});
Related
I'm helping a remote coworker to setup with gulp on their Windows 10 computer, and have ran into an issue where the watch function in gulp simply will not work.
Other gulp functionality works well, but watch in particular just hangs without any error to the screen.
To ensure I'm not missing anything in my code, I made a simple test gulpfile to be sure that this wasn't due to some other reason.
gulpfile.js
var gulp = require("gulp");
var fs = require('fs');
var compareToPreviousRead = require("./fileReaderTest.js");
gulp.task("watch", function(){
return gulp.watch("Test.txt", async function(){
var diff = await compareToPreviousRead();
console.log(`Diff: "${diff}", Time: "${new Date().toGMTString()}"`);
Promise.resolve(true);
});
});
fileReaderTest.js
var fs = require('fs');
var readTestFile = () => new Promise(resolve => {
fs.readFile('Test.txt', 'utf8', (err, contents) => resolve(contents));
});
var originalValue = "";
var compareToPreviousRead = () => new Promise(resolve => {
readTestFile().then(updatedValue => {
var diff = updatedValue.replace(originalValue, "");
originalValue = updatedValue;
resolve(diff);
});
});
module.exports = compareToPreviousRead
I also included a few PowerShell Scripts to install and run the app from the same directory.
Someone could also just manually edit a file called "Test.txt" in the same directory (only appending to the file) and the gulp task would work correctly.
WriteToFileEverySecond.ps1
param ($testFileLocation)
Start-Sleep -Seconds 1
$i = 0
for ($i=0; $i -lt 10; $i++){
[System.IO.File]::AppendAllText($testFileLocation, "$i--", [System.Text.Encoding]::Ascii)
Start-Sleep -Seconds 1
}
TestGulpWatch.ps1
$currDir = $PSScriptRoot
cd $currDir
npm install --silent
Remove-Item "$currDir/Test.txt" -ErrorAction SilentlyContinue > $null
New-Item "Test.txt" -ItemType File > $null
$job1 = Start-Job -ScriptBlock {
param($currDir)
cd $currDir
gulp watch --silent
} -ArgumentList $currDir
Start-Sleep -Seconds 1
$job2 = Start-Job -FilePath "$currDir/WriteToFileEverySecond.ps1" -ArgumentList "$currDir/Test.txt"
while ($job1.HasMoreData -and $job2.HasMoreData) {
$job1,$job2 | Receive-Job
}
$job1,$job2 | Stop-Job
$job1,$job2 | Remove-Job
pause
They are using the following versions:
npm: 6.13.1
gulp cli version: 2.2.0
gulp repo version: 4.0.2
I am experiencing such an issue when starting my Redux app:
./node_modules/draftjs-md-converter/dist/index.js
Syntax error: /Users/vlasenkona/Desktop/gris-seqr2/ui/node_modules/draftjs-md-converter/dist/index.js: Identifier '_toConsumableArray' has already been declared (196:9)
194 | var _extends = Object.assign || function (target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i]; for (var key in source) { if (Object.prototype.hasOwnProperty.call(source, key)) { target[key] = source[key]; } } } return target; };
195 |
> 196 | function _toConsumableArray(arr) { if (Array.isArray(arr)) { for (var i = 0, arr2 = Array(arr.length); i < arr.length; i++) { arr2[i] = arr[i]; } return arr2; } else { return Array.from(arr); } }
| ^
197 |
198 | var parse = require('#textlint/markdown-to-ast').parse;
199 |
at parser.next (<anonymous>)
The startup script is the following:
'use strict';
// Do this as the first thing so that any code reading it knows the right env.
process.env.BABEL_ENV = 'development';
process.env.NODE_ENV = 'development';
// Makes the script crash on unhandled rejections instead of silently
// ignoring them. In the future, promise rejections that are not handled will
// terminate the Node.js process with a non-zero exit code.
process.on('unhandledRejection', err => {
throw err;
});
// Ensure environment variables are read.
require('../config/env');
const fs = require('fs');
const chalk = require('chalk');
const webpack = require('webpack');
const WebpackDevServer = require('webpack-dev-server');
const clearConsole = require('react-dev-utils/clearConsole');
const {
choosePort,
createCompiler,
prepareProxy,
prepareUrls,
} = require('react-dev-utils/WebpackDevServerUtils');
const openBrowser = require('react-dev-utils/openBrowser');
const paths = require('../config/paths');
const config = require('../config/webpack.config.dev');
const createDevServerConfig = require('../config/webpackDevServer.config');
const useYarn = fs.existsSync(paths.yarnLockFile);
const isInteractive = process.stdout.isTTY;
// Tools like Cloud9 rely on this.
const DEFAULT_PORT = parseInt(process.env.PORT, 10) || 3000;
const HOST = process.env.HOST || '0.0.0.0';
// We attempt to use the default port but if it is busy, we offer the user to
// run on a different port. `detect()` Promise resolves to the next free port.
choosePort(HOST, DEFAULT_PORT)
.then(port => {
if (port == null) {
// We have not found a port.
return;
}
const protocol = process.env.HTTPS === 'true' ? 'https' : 'http';
const appName = require(paths.appPackageJson).name;
const urls = prepareUrls(protocol, HOST, port);
// Create a webpack compiler that is configured with custom messages.
const compiler = createCompiler(webpack, config, appName, urls, useYarn);
// Load proxy config
const proxySetting = require(paths.appPackageJson).proxy;
const proxyConfig = prepareProxy(proxySetting, paths.appPublic);
// Serve webpack assets generated by the compiler over a web sever.
const serverConfig = createDevServerConfig(
proxyConfig,
urls.lanUrlForConfig
);
const devServer = new WebpackDevServer(compiler, serverConfig);
// Launch WebpackDevServer.
devServer.listen(port, HOST, err => {
if (err) {
return console.log(err);
}
if (isInteractive) {
clearConsole();
}
console.log(chalk.cyan('Starting the development server...\n'));
openBrowser(urls.localUrlForBrowser);
});
['SIGINT', 'SIGTERM'].forEach(function(sig) {
process.on(sig, function() {
devServer.close();
process.exit();
});
});
})
.catch(err => {
if (err && err.message) {
console.log(err.message);
}
process.exit(1);
});
The startup of the app was working just fine until I switched to babel7, the process of which (and relevant config files) can be found in my other thread:
Switching to babel 7 causes jest to show 'unexpected token'
I tried updating draftjs-md-converter to the latest version but it did not fix the issue. I found in this thread:
Identifier already declared - Identifier 'userScore' has already been declared
That such an error may be happening because the app is somehow launched twice, but not sure why it started to do so right now.
The solution can be found here:
https://github.com/kadikraman/draftjs-md-converter/pull/56
It happens because both Babel 7 and draftjs-md-converter define _toConsumableArray function. So, the pull request was issues on the actual github page where it was built with parcel instead and the solution is the following:
git clone https://github.com/kadikraman/draftjs-md-converter.git
cd draftjs-md-converter
git checkout origin/chore/use-parcel-for-bundling
npm i
npm run compile
After that copying the files from the dist folder into node_modules/draftjs-md-converter/dist/ one solved the issue.
my package.json has scripts like this
{
"scripts": {
"pretest": "npm run tsc",
"test": "gulp e2e",
}
}
we use typescript and webdriverIO for automation. I want to use gulp so that i can pass parameters to my test framework. Example:
npm test --suite HomePageTests
then the specs related to Home page must run.
I have the gulp file like this
// gulpfile.js
const gulp = require('gulp');
const Launcher = require('webdriverio/build/lib/launcher');
const wdio = new Launcher(path.join(__dirname,
'src/config/conf.ts'));
// fetch command line arguments
const arg = (argList => {
let arg = {}, a, opt, thisOpt, curOpt;
for (a = 0; a < argList.length; a++) {
thisOpt = argList[a].trim();
opt = thisOpt.replace(/^\-+/, '');
if (opt === thisOpt) {
// argument value
if (curOpt) arg[curOpt] = opt;
curOpt = null;
}else {
// argument name
curOpt = opt;
arg[curOpt] = true;
}
}
console.log("arg", arg)
return arg;
})(process.argv);
gulp.task('e2e', () => {
return wdio.run(code => {
process.exit(code);
}, error => {
console.error('Launcher failed to start the test',error.stacktrace);
process.exit(1);
});
});
So when I call gulp directly like
gulp e2e --suite HomePageTests
it gets printed as
suite: HomePageTests
But if i use
npm test --suite HomePageTests
It fails as it prints gulp e2e HomePageTests
questions
How do I pass these values from npm to make gulp understand
If I am pass to another value like gulp e2e --server staging and would like to use the variable "staging" in my spec file like
if server=== staging{
// do this
} else {
// do that
}
How should I pass them from gulp file to my spec file?
Thanks!!
You could use the yargs dependence
var argv = require('yargs').argv;
gulp.task('test', function(){
console.log(argv.arg);
});
then if you run a command on a gulp passing the arg like this
gulp test --arg HomePageTests
it will output on console HomePageTests
I use perforce for code checkin in our private server, so after production build (i.e., when index.html is read only) my gulp task fails with below error.
[INFO] [11:30:19] Error: EPERM: operation not permitted, open 'C:\MyProjectDir\index.html'
Once I checkout files or uncheck the read only property (using windows box) the gulp task finishes successfully. Is there a way to change it's permission through gulp task to avoid this manual intervention?
PS: I have used chmod(777)
Gulp file (end part where inject takes place)
var gulp = require('gulp'),
gutil = require('gulp-util'),
uglify = require('gulp-uglify'),
minify = require('gulp-minify-css'),
concatVendor = require('gulp-concat-vendor'),
concatCss = require('gulp-concat-css'),
rev = require('gulp-rev'),
clone = require('gulp-clone'),
inject = require('gulp-inject'),
del = require('del'),
runSequence = require('run-sequence'),
chmod = require('gulp-chmod'),
series = require('stream-series'),
ngAnnotate = require('gulp-ng-annotate'),
rename = require("gulp-rename");
...
gulp.task('index', ['gulp-js-files', 'gulp-css-files'], function() {
var target = gulp.src(mainIndexFile);
return target.pipe(chmod(777)).pipe(inject(series(vendorCss, customCss, vendorJs), { ignorePath: ['MyProjectDir'], addRootSlash: false }))
.pipe(gulp.dest(mainDestination));
});
...
Running cmd in Administrator mode
One option would be to use gulp-exec to remove the read-only flag with attrib -r, then re-set it with attrib +r after injecting the data (if necesarry).
Following is an example:
gulp.task('remove-readonly-attributes', function() {
require("child_process").exec("attrib -r <dir-name>\*.* /s");
});
I'm just wondering whether it is at all possible to transfer a directory from a unix server to my local machine using the ssh2 module in node.js. I have connected to the remote host and can read the directory as well as transfer single files, but there are 28 folders in the directory which each contain files and sub directories. What I'd like to do is take an exact copy of the main directory from the server to my local machine.
I was using fastGet with single files, but transferring a directory gives: Error: EISDIR, open __dirname/../localdirectory/ which I think implies I can't use fastGet to get an entire directory. I also tried using the exec command to try and scp it over, but I couldn't work out the syntax for the local directory:
// c is an active connection
c.exec('scp filethatexists.extension /../filepath/newname.extension', function(err, stream) {
if (err) {
console.log("error: " + err);
stream.end;
};
stream.on('data', function(data, extended) {
console.log((extended === 'stderr' ? 'STDERR: ' : 'STDOUT: ') + data);
});
stream.on('end', function() {
console.log('Stream :: EOF');
});
stream.on('close', function() {
console.log('Stream :: close');
});
stream.on('exit', function(code, signal) {
console.log('Stream :: exit :: code: ' + code + ', signal: ' + signal);
c.end();
});
});
This just results in the EOF calling. This code was just me testing If I could get a single file transferring.
Can anyone provide me with any assistance? Thank you in advance.
A couple of solutions:
You could recursively traverse the directory (making directories and transferring files as needed) using the sftp methods
Tar the directory (compress it too if you want) to stdout (e.g. tar cf - mydir) and then process that incoming stdout data with the tar module (and the built-in zlib module first if you end up compressing the directory).
// Requires:
// * `npm install tar-fs`
// * `ssh2` v0.5.x or newer
var tar = require('tar-fs');
var zlib = require('zlib');
function transferDir(conn, remotePath, localPath, compression, cb) {
var cmd = 'tar cf - "' + remotePath + '" 2>/dev/null';
if (typeof compression === 'function')
cb = compression;
else if (compression === true)
compression = 6;
if (typeof compression === 'number'
&& compression >= 1
&& compression <= 9)
cmd += ' | gzip -' + compression + 'c 2>/dev/null';
else
compression = undefined;
conn.exec(cmd, function(err, stream) {
if (err)
return cb(err);
var exitErr;
var tarStream = tar.extract(localPath);
tarStream.on('finish', function() {
cb(exitErr);
});
stream.on('exit', function(code, signal) {
if (typeof code === 'number' && code !== 0) {
exitErr = new Error('Remote process exited with code '
+ code);
} else if (signal) {
exitErr = new Error('Remote process killed with signal '
+ signal);
}
}).stderr.resume();
if (compression)
stream = stream.pipe(zlib.createGunzip());
stream.pipe(tarStream);
});
}
// USAGE ===============================================================
var ssh = require('ssh2');
var conn = new ssh();
conn.on('ready', function() {
transferDir(conn,
'/home/foo',
__dirname + '/download',
true, // uses compression with default level of 6
function(err) {
if (err) throw err;
console.log('Done transferring');
conn.end();
});
}).connect({
host: '192.168.100.10',
port: 22,
username: 'foo',
password: 'bar'
});
i m also trying to downlaod folders using ssh. It took me more than 10 days and i'm still trying to do that. But in the mean time i found some other code which will do the same thing for me.This code below will download every folder and file inside a directory
enter image description here