I have a gulpfile.js which has a bunch of tasks.
I noticed that my gulp tasks run, but after finishing they don't exit/terminate. I have to use control + c to exit.
I went through a thread that talked about a similar problem, but it was using gulp-mocha and there was an issue in the plug in.
In my case, I also know what is causing this issue. I have required a js(edgecaset.js) file that I've written, into my gulp file (one of the tasks uses a function in that file). It is a simple file with one function that interacts with a rest api. If I don't require that file, everything works great.
I'm exporting just one function from the edgecast.js file to be used in the gulp file. It has a call to the setTimeOut function (could that be the problem?)
Can anyone help me find out the problem?
var main = function () {
console.log(config.edgecast.username);
ftp.put(config.ftp.local_path, config.ftp.remote_path, function(hadError) {
if (!hadError) {
console.log('File transferred successfully');
rest.put(config.edgecast.purge_url, purgeOptions).on('complete', function(data, response) {
if (response.statusCode != 200)
console.log('Purge failed');
else {
console.log('Purge request Issued');
purgeID = data.Id;
setTimeout(function() {
rest.get(config.edgecast.get_purge_url + purgeID, purgeOptions).on('complete', function(data) {
if (data.CompleteDate) {
console.log('Purge completed on ' + data.CompleteDate);
rest.put(config.edgecast.load_url, loadOptions).on('complete', function(data, response) {
if (response.statusCode == 200)
console.log('Asset Loaded');
else {
console.log('Load Failed');
console.log(response.statusCode);
}
});
}
});
}, 180000);
}
});
} else console.log('File Transfer Failed. ' + hadError);
ftp.raw.quit(function(err) {
if (err) return console.error(err);
});
});
};
Above is the function I'm exporting from the edgecast.js file. This is the only function in the file. The rest is only variables. And then in the gulpfile.js there is
var edgecast = require('./edgecast.js');
And this require statement causes all the problems.
The edgecast.js file has the following libraries imported.
var rest = require('restler'),
config = require('./config'),
JSFtp = require('jsftp');
Found the problem. Turns out gulp tasks don't exit because the FTP connection was left open and they wait until it is closed
Related
I have a file dmreboot_service.js in my /js folder. When I run this file using node /js/dmreboot_service.js it successfully invokes a direct method in Azure.
I need to be able to execute this function or file on a button click from my web app.
I tried loading the script into the head of my html using :
<script src="js/dmreboot_service.js"></script>
I put an alert in the external file.
If I put this alert at the top of the file it works, but at
the bottom it fails, so the contents of the file are not loading.
The content of dmreboot_service.js is :
'use strict';
var Registry = require('azure-iothub').Registry;
var Client = require('azure-iothub').Client;
var connectionString ="HostName=XxxxxxxX.azure-devices.net;SharedAccessKeyName=service;SharedAccessKey=XxxxxxxxxxxxxxxxxxxxxxxxxxxxxxX=";
var registry = Registry.fromConnectionString(connectionString);
var client = Client.fromConnectionString(connectionString);
var deviceToReboot = 'Runner';
var startRebootDevice = function (twin) {
var methodName = "reboot";
var methodParams = {
methodName: methodName,
payload: null,
timeoutInSeconds: 30
};
client.invokeDeviceMethod(deviceToReboot, methodParams, function(err, result) {
if (err) {
console.error("Direct method error: "+err.message);
} else {
console.log("Successfully invoked the device to reboot.");
}
});
};
var queryTwinLastReboot = function() {
registry.getTwin(deviceToReboot, function(err, twin){
if (twin.properties.reported.iothubDM != null)
{
if (err) {
console.error('Could not query twins: ' + err.varructor.name + ': ' + err.message);
} else {
var lastRebootTime = twin.properties.reported.iothubDM.reboot.lastReboot;
console.log('Last reboot time: ' + JSON.stringify(lastRebootTime, null, 2));
}
} else
console.log('Waiting for device to report last reboot time.');
});
};
startRebootDevice();
setInterval(queryTwinLastReboot, 2000);
alert('dmreboot included!');
I have also tried creating a function in the head of my html that includes the entire contents of dmreboot_service.js, but although the function is called successfully the code does not execute.
This is the last part of a project that I need to get working. I'm fairly new to this, and this is driving me nuts!! Any advice much appreciated.
I usually handle click in HTML with javascript like so;
const doSomething = document.querySelector('.whateverclassnameyouchoose').addEventListener('click',onClick);
function onClick(e){
what ever you want the function to do
}
Hope it helps :)
We've got a Node.js script that is run once a minute to check the status of our apps. Usually, it works just fine. If the service is up, it exits with 0. If it's down, it exits with 1. All is well.
But every once in a while, it just kinda stops. The console reports "Calling status API..." and stops there indefinitely. It doesn't even timeout at Node's built-in two-minute timeout. No errors, nothing. It just sits there, waiting, forever. This is a problem, because it blocks following status check jobs from running.
At this point, my whole team has looked at it and none of us can figure out what circumstance could make it hang. We've built in a start-to-finish timeout, so that we can move on to the next job, but that essentially skips a status check and creates blind spots. So, I open the question to you fine folks.
Here's the script (with names/urls removed):
#!/usr/bin/env node
// SETTINGS: -------------------------------------------------------------------------------------------------
/** URL to contact for status information. */
const STATUS_API = process.env.STATUS_API;
/** Number of attempts to make before reporting as a failure. */
const ATTEMPT_LIMIT = 3;
/** Amount of time to wait before starting another attempt, in milliseconds. */
const ATTEMPT_DELAY = 5000;
// RUNTIME: --------------------------------------------------------------------------------------------------
const URL = require('url');
const https = require('https');
// Make the first attempt.
make_attempt(1, STATUS_API);
// FUNCTIONS: ------------------------------------------------------------------------------------------------
function make_attempt(attempt_number, url) {
console.log('\n\nCONNECTION ATTEMPT:', attempt_number);
check_status(url, function (success) {
console.log('\nAttempt', success ? 'PASSED' : 'FAILED');
// If this attempt succeeded, report success.
if (success) {
console.log('\nSTATUS CHECK PASSED after', attempt_number, 'attempt(s).');
process.exit(0);
}
// Otherwise, if we have additional attempts, try again.
else if (attempt_number < ATTEMPT_LIMIT) {
setTimeout(make_attempt.bind(null, attempt_number + 1, url), ATTEMPT_DELAY);
}
// Otherwise, we're out of attempts. Report failure.
else {
console.log("\nSTATUS CHECK FAILED");
process.exit(1);
}
})
}
function check_status(url, callback) {
var handle_error = function (error) {
console.log("\tFailed.\n");
console.log('\t' + error.toString().replace(/\n\r?/g, '\n\t'));
callback(false);
};
console.log("\tCalling status API...");
try {
var options = URL.parse(url);
options.timeout = 20000;
https.get(options, function (response) {
var body = '';
response.setEncoding('utf8');
response.on('data', function (data) {body += data;});
response.on('end', function () {
console.log("\tConnected.\n");
try {
var parsed = JSON.parse(body);
if ((!parsed.started || !parsed.uptime)) {
console.log('\tReceived unexpected JSON response:');
console.log('\t\t' + JSON.stringify(parsed, null, 1).replace(/\n\r?/g, '\n\t\t'));
callback(false);
}
else {
console.log('\tReceived status details from API:');
console.log('\t\tServer started:', parsed.started);
console.log('\t\tServer uptime:', parsed.uptime);
callback(true);
}
}
catch (error) {
console.log('\tReceived unexpected non-JSON response:');
console.log('\t\t' + body.trim().replace(/\n\r?/g, '\n\t\t'));
callback(false);
}
});
}).on('error', handle_error);
}
catch (error) {
handle_error(error);
}
}
If any of you can see any places where this could possibly hang without output or timeout, that'd be very helpful!
Thank you,
James Tanner
EDIT: p.s. We use https directly, instead of request so that we don't need to do any installation when the script runs. This is because the script can run on any build machine assigned to Jenkins without a custom installation.
Aren't you missing the .end()?
http.request(options, callback).end()
Something like explained here.
Inside your response callback your not checking the status..
The .on('error', handle_error); is for errors that occur connecting to the server, status code errors are those that the server responds with after a successful connection.
Normally a 200 status response is what you would expect from a successful request..
So a small mod to your http.get to handle this should do..
eg.
https.get(options, function (response) {
if (response.statusCode != 200) {
console.log('\tHTTP statusCode not 200:');
callback(false);
return; //no point going any further
}
....
I am running node.js on raspbian and trying to save/update a file every 2/3 seconds using the following code:
var saveFileSaving = false;
function loop() {
mainLoop = setTimeout(function() {
// update data
saveSaveFile(data, function() {
//console.log("Saved data to file");
loop();
});
}, 1500);
}
function saveSaveFile(data, callback) {
if(!saveFileSaving) {
saveFileSaving = true;
var wstream = fs.createWriteStream(path.join(__dirname, 'save.json'));
wstream.on('finish', function () {
saveFileSaving = false;
callback(data);
});
wstream.on('error', function (error) {
console.log(error);
saveFileSaving = false;
wstream.end();
callback(null);
});
wstream.write(JSON.stringify(data));
wstream.end();
} else {
callback(null);
}
}
When I run this it works fine for an hour then starts spitting out:
[25/May/2016 11:3:4 am] { [Error: EROFS, open '<path to file>']
errno: 56,
code: 'EROFS',
path: '<path to file>' }
I have tried jsonfile plugin which also sends out a similiar write error after an hour.
I have tried both fileSystem.writeFile and fileSystem.writeFileSync both give the same error after an hour.
I was thinking it had to do with the handler not being let go before a new save occurs which is why I started using the saveFileSaving flag.
Resetting the system via hard reset fixes the issue (soft reset does not work as the system seems to be locked up).
Any suggestions guys? I have searched the web and so only found one other question slightly similar from 4 years ago which was left in limbo.
Note: I am using the callback function from the code to continue with the main loop.
I was able to get this working by unlinking the file and saving the file every time I save while it is not pretty it works and shouldn't cause too much overhead.
I also added a backup solution which saves a backup every 5 minutes in case the save file has issues.
Thank you for everyone's help.
Here is my ideas:
1) Check free space when this problem happens by typing in terminal:
df -h
2) Also check if file is editable when problem occurs. with nano or vim and etc.
3) Your code too complicated for simply scheduling data manipulation and writing it to file. Because of even Your file will be busy (saveFileSaving) You will lose data until next iteration, try to use that code:
var
async = require('async'),
fs = require('fs'),
path = require('path');
async.forever(function(next) {
// some data manipulation
try {
fs.writeFileSync(path.join(__dirname, 'save.json'), JSON.stringify(data));
}
catch(ex) {
console.error('Error writing data to file:', ex);
}
setTimeout(next, 2000);
});
4) How about keeping file descriptor open?
var
async = require('async'),
fs = require('fs'),
path = require('path');
var file = fs.createWriteStream(path.join(__dirname, 'save.json'));
async.forever(function(next) {
// some data manipulation
file.write(JSON.stringify(data));
setTimeout(next, 2000);
});
var handleSignal = function (exc) {
// close file
file.end();
if(exc) {
console.log('STOPPING PROCESS BECAUSE OF:', exc);
}
process.exit(-1);
}
process.on('uncaughtException', handleSignal);
process.on('SIGHUP', handleSignal);
5) hardware or software problems (maybe because of OS drivers) with raspberry's storage controller.
I'm actually trying to learn a bit about node.js
At the moment I try to understand the principles about callbacks.
I've written a module that should filter me files from a given directory by a specified file extension. But it won't work. I've tested a bit and I noticed that the function 'getFiles' will be called more the ones. But why? I can't find the mistake, can someone help me, to understood my problem? If someone thinks my code is ugly, please give me a better example, thanks.
So here's my code:
//Returns a list of filtered files from a specified directory and extension
function getFilteredFiles(dir, ext, callback)
{
getFiles(dir, function(error, data){
if(error)
{
callback(error);
}
else
{
var result = getFilteredFiles(data, ext);
callback(null, result);
}
});
}
//Reading files from a given directory
function getFiles(dir, callback)
{
var fs = require('fs');
console.log(typeof dir);
fs.readdir(dir, function(err, data){
if(err)
{
callback(err);
}
else
{
callback(null, data);
}
});
}
//Filters a file by a specified extension
function filterFiles(data, extension)
{
var path = require('path');
return data.filter(function(file){
return path.extname(file) == '.' + extension;
});
}
I'm new to NodeJS and I wonder how to properly send the content of several files using FS and Socket.IO.
My problem is more about best practices on Node/javascript than the actual 'raw' logic of my scripts.
So, the purpose of my app is to watch a log File (File1.log) and a Result File (File2.log).
Until the File2.log contains a string (such as "Done", or "Error"), I need to continue sending the result of the File1.log to the client.
When the key ("Error","Done") has been read, I send the result to the client and have to launch the same process for another couple of log/result files - after having closed the FileWatcher on the first one.
In the end, I need to close the connection and stop all of the sleeping FileWatcher processes.
The 'streaming' of my files is working pretty well, but I am confused about the best way to switch between the differents FileWatch Processes and how to notify the client.
Server.JS
/*
* [SomeCode]...
*/
io.sockets.on('connection', function (client) {
//Starting the process for the first couple of files
client.on('logA', function (res) {
var PATH_to_A = "path/to/A/directory/";
readFile(client,PATH_to_A);
});
//Starting the process for the seconde couple of files
client.on('logB', function (res) {
//I need to stop the first readFile Watcher process
var PATH_to_B = "path/to/B/directory/";
readFile(client,PATH_to_B);
});
});
function readFile(client,PATH){
var File1 = path.join(PATH,'File1.log');
var File2 = path.join(PATH,'File2.log');
//Get the file stats
fs.stat(File1,function(err,stats){
if (err) throw err;
//Send the data;
});
//Watch the first file
var w1 = fs.watch(File1,function(status, file){
if(status == "change"){
fs.stat(File1,function(err,stats){
if (err) throw err;
//Send the data;
});
}
});
//Watch the second file
var w2 = fs.watch(File2,function(status, file){
if(status == "change"){
fs.readFile(File2, "utf8", function (err, body) {
if (err) throw err;
//Some Code....
client.emit('done',body);
});
}
});
//Closing FileWatcher
client.on('ack',function(){
w1.close();
w2.close();
});
}
Client.JS
var socket = io.connect('http://127.0.0.1:8000');
//On connect, waiting for the first couple of files
socket.on('connect', function(server) {
socket.emit('init',data);
socket.emit('logA');
});
//If the first process is done, i ask for the second couple of files
socket.on('done',function(message){
socket.emit('ack');
socket.emit('logB');
});
Thanks for your help!