Access function in external js file from button in webapp - javascript

I have a file dmreboot_service.js in my /js folder. When I run this file using node /js/dmreboot_service.js it successfully invokes a direct method in Azure.
I need to be able to execute this function or file on a button click from my web app.
I tried loading the script into the head of my html using :
<script src="js/dmreboot_service.js"></script>
I put an alert in the external file.
If I put this alert at the top of the file it works, but at
the bottom it fails, so the contents of the file are not loading.
The content of dmreboot_service.js is :
'use strict';
var Registry = require('azure-iothub').Registry;
var Client = require('azure-iothub').Client;
var connectionString ="HostName=XxxxxxxX.azure-devices.net;SharedAccessKeyName=service;SharedAccessKey=XxxxxxxxxxxxxxxxxxxxxxxxxxxxxxX=";
var registry = Registry.fromConnectionString(connectionString);
var client = Client.fromConnectionString(connectionString);
var deviceToReboot = 'Runner';
var startRebootDevice = function (twin) {
var methodName = "reboot";
var methodParams = {
methodName: methodName,
payload: null,
timeoutInSeconds: 30
};
client.invokeDeviceMethod(deviceToReboot, methodParams, function(err, result) {
if (err) {
console.error("Direct method error: "+err.message);
} else {
console.log("Successfully invoked the device to reboot.");
}
});
};
var queryTwinLastReboot = function() {
registry.getTwin(deviceToReboot, function(err, twin){
if (twin.properties.reported.iothubDM != null)
{
if (err) {
console.error('Could not query twins: ' + err.varructor.name + ': ' + err.message);
} else {
var lastRebootTime = twin.properties.reported.iothubDM.reboot.lastReboot;
console.log('Last reboot time: ' + JSON.stringify(lastRebootTime, null, 2));
}
} else
console.log('Waiting for device to report last reboot time.');
});
};
startRebootDevice();
setInterval(queryTwinLastReboot, 2000);
alert('dmreboot included!');
I have also tried creating a function in the head of my html that includes the entire contents of dmreboot_service.js, but although the function is called successfully the code does not execute.
This is the last part of a project that I need to get working. I'm fairly new to this, and this is driving me nuts!! Any advice much appreciated.

I usually handle click in HTML with javascript like so;
const doSomething = document.querySelector('.whateverclassnameyouchoose').addEventListener('click',onClick);
function onClick(e){
what ever you want the function to do
}
Hope it helps :)

Related

Watson Assistant context is not updated

I use watson assistant v1
My problem is that every time I make a call to the code in Nodejs, where I return the context, to have a coordinated conversation, the context is only updated once and I get stuck in a node of the conversation
this is my code
client.on('message', message => {
//general variables
var carpetaIndividual = <../../../>
var cuerpoMensaje = <....>
var emisorMensaje = <....>
//detect if context exists
if(fs.existsSync(carpetaIndividual+'/contexto.json')) {
var watsonContexto = require(carpetaIndividual+'/contexto.json');
var variableContexto = watsonContexto;
} else {
var variableContexto = {}
}
//conection with Watson Assistant
assistant.message(
{
input: { text: cuerpoMensaje },
workspaceId: '<>',
context: variableContexto,
})
.then(response => {
let messageWatson = response.result.output.text[0];
let contextoWatson = response.result.context;
console.log('Chatbot: ' + messageWatson);
//Save and create JSON file for context
fs.writeFile(carpetaIndividual+'/contexto.json', JSON.stringify(contextoWatson), 'utf8', function (err) {
if (err) {
console.error(err);
}
});
//Send messages to my application
client.sendMessage(emisorMensaje, messageWatson)
})
.catch(err => {
console.log(err);
});
}
client.initialize();
the context.json file is updated, but when it is read the code only reads the first update of the context.json file and not the other updates
This will be because you are using require to read the .json file. For all subsequent requires of an already-required file, the data is cached and reused.
You will need to use fs.readfile and JSON.parse
// detect if context exists
if (fs.existsSync(carpetaIndividual+'/contexto.json')) {
var watsonContexto = fs.readFileSync(carpetaIndividual+'/contexto.json');
// Converting to JSON
var variableContexto = JSON.parse(watsonContexto);
} else {
var variableContexto = {}
}
There is another subtle problem with your code, in that you are relying on
your async call to fs.writeFile completing before you read the file. This will be the case most of the time, but as you don't wait for the fs.writeFile to complete there is the chance that you may try to read the file, before it is written.

How do I make JakeJS watch work reliably with Vim?

I'm using a Jakefile to help me update Wordpress pages from the command line. I'm using Jake's watch task to re-build ever time I edit a file. When I edit a file with Vim, after the first successful build, Jake fails with following error :
WatchTask started for: default
cp home.html dist/home.html
exec wp --path=../wordpress post list --post_type=page --format=json --fields=ID,post_name { silent: true }
exec wp --path=../wordpress post update 2 dist/home.html --post_type=page
Success: Updated post 2.
jake aborted.
Error: File-task home.html has no existing file, and no action to create one.
at FileBase.isNeeded (/usr/local/lib/node_modules/jake/lib/task/file_task.js:50:17)
at TaskBase.run (/usr/local/lib/node_modules/jake/lib/task/task.js:256:26)
(See full trace by running task with --trace)
I've tried using a sleep function in the rule to delay the rebuild. I tried this because Vim, when saving a file, write the contents to a new temp file and then renames the new temp file to the original file name. I think the build fails because it's trying to build before the file is fully renamed. Using the sleep doesn't work reliably, it may work once or twice but then it fails the same way as above.
Here is my Jakefile:
var shell = require('shelljs');
var sleep = require('sleep');
shell.config.verbose = true;
const destDir = 'dist';
const wpDir = '../wordpress';
var files = new jake.FileList();
files.include('*.html');
var outputFiles = files.toArray().map(function(fileName){
return destDir + '/' + fileName;
});
var sourceFile = function(name) {
return name.substr(name.lastIndexOf('/') + 1);
}
function objectToStr(object) {
var s = '';
for(var property in object){
s += property + ': ' + object[property] + '\n';
}
return s;
}
function rmExt(name) {
return name.substr(0, name.lastIndexOf('.'));
}
directory(destDir);
task('default', [destDir].concat(outputFiles));
task('clean', function() {
jake.rmRf(destDir);
});
rule('dist/%.html', sourceFile, function() {
shell.cp(this.source, this.name);
var pages = JSON.parse(shell.exec('wp --path=' + wpDir
+ ' post list --post_type=page --format=json --fields=ID,post_name',
{ silent: true }).stdout);
var postId = null;
var l = pages.length;
for(var i = 0; i < l; i++){
if(pages[i].post_name === rmExt(this.source)){
postId = pages[i].ID;
break;
}
}
if(postId !== null){
shell.exec('wp --path=' + wpDir + ' post update ' + postId
+ ' ' + this.name + ' --post_type=page');
}else{
shell.echo('Unable to find matching post ID for file: ' + this.name);
}
shell.echo('1');
sleep.sleep(2);
shell.echo('2');
});
watchTask('watch', ['default'], function() {
this.watchFiles.include('*.html');
}
I found a reliable solution using the library node-watch. I replaced the watchTask with a normal Jake task and invoked the task I wanted to call using node-watch to watch the current working directory. The library node-watch is able filter files based on regex so the watch won't be trigger for the Vim swap file and the like. Below is the code I used:
task('watch', function() {
var defaultTask = jake.Task['default'];
defaultTask.invoke();
defaultTask.reenable(true);
watch('./', { filter: /.*.html$/ }, function(evt, name) {
defaultTask.invoke();
defaultTask.reenable(true);
});
});
Note: I also invoked the task I want to call when the watch task is first run to make sure any changes made before the watch started are built.

Crypto module - Node.js

Which is the simplest way to compare a hash of a file without storing it in a database?
For example:
var filename = __dirname + '/../public/index.html';
var shasum = crypto.createHash('sha1');
var s = fs.ReadStream(filename);
s.on('data', function(d) {
shasum.update(d);
});
s.on('end', function() {
var d = shasum.digest('hex');
console.log(d + ' ' + filename);
fs.writeFile(__dirname + "/../public/log.txt", d.toString() + '\n', function(err) {
if(err) {
console.log(err);
} else {
console.log("The file was saved!");
}
});
});
The above code returns the hash of the HTML file. If I edit the file how can I know if it has been changed? In other words, how can I know if the hash has been changed?
Any suggestions?
Edited
Now the hash is being saved in the log file. How can I retrieve the hash from the file and match it with the new generated one? A code example would be awesome to give me a better understanding.
There is no difference with this question, but it isn't clear for me yet how to implement it.
If you're looking for changes on a file, then you can use one of Node's filesystem functions, fs.watch. This is how it's used:
fs.watch(filename, function (event, filename) {
//event is either 'rename' or 'change'
//filename is the name of the file which triggered the event
});
The watch function is however not very consistent, so you can use fs.watchFile as an alternative. fs.watchFile uses stat polling, so it's quite a bit slower than fs.watch, which detects file changes instantly.
Watching a file will return an instance of fs.FSWatcher, which has the events change and error. Calling .close will stop watching for changes on the file.
Here's an example relating to your code:
var filename = __dirname + '/../public/index.html';
var shasum = crypto.createHash('sha1');
var oldhash = null;
var s = fs.ReadStream(filename);
s.on('data', function(d) {
shasum.update(d);
});
s.on('end', function() {
var d = shasum.digest('hex');
console.log(d + ' ' + filename);
oldhash = d.toString();
fs.writeFile(__dirname + "/../public/log.txt", d.toString() + '\n', function(err) {
if(err) {
console.log(err);
}
else {
console.log("The file was saved!");
}
});
});
//watch the log for changes
fs.watch(__dirname + "/../public/log.txt", function (event, filename) {
//read the log contents
fs.readFile(__dirname + "/../public/log.txt", function (err, data) {
//match variable data with the old hash
if (data == oldhash) {
//do something
}
});
});
What's the difference between this question and the previous one you asked? If you're not wanting to store it in a database, then store it as a file. If you want to save the hash for multiple files, then maybe put them in a JSON object and write them out as a .json file so they're easy to read/write.
EDIT
Given what you added to your question, it should be pretty simple. You might write a function to do check and re-write:
function updateHash (name, html, callback) {
var sha = crypto.createHash('sha1');
sha.update(html);
var newHash = sha.digest('hex');
var hashFileName = name + '.sha';
fs.readFile(hashFileName, 'utf8', function (err, oldHash) {
var changed = true;
if (err)
console.log(err); // probably indicates the file doesn't exist, but you should consider doing better error handling
if (oldHash === newHash)
changed = false;
fs.writeFile(hashFileName, newHash, { encoding: 'utf8' }, function (err) {
callback(err, changed);
});
});
}
updateHash('index.html', "<html><head><title>...", function (err, isChanged) {
// do something with this information ?
console.log(isChanged);
});

Node.js - Organising code and closures - SFTP/Inotify

I was hoping I could get some advice on why my nodejs program is behaving in the way it is.
I am using two modules, node-sftp and node-inotify. I have setup node-inotify to watch a directory and call a function when something is written there, the function being an sftp upload.
Now the problem I have is that processing one file at a time is fine but when I drop 4 files in one go there, the function is called four times but only one sftp upload goes through.
Do I need to order my code in a particular way to ensure that the sftp upload occurs x times, is this something to do with closures perhaps?
This is a basic version of my code...
"event_handler" is called when something happens on a "watched" directory
"check_event" figures out if this type of event is one we want, in this case it's a "write"
"ftp_to_server" prepare connection details
"do_ftp" basically uses the node-sftp module to perform the sftp upload
event_handler = function(event){
var supplier;
check_event(event, supplier, type, ftp_to_server);
};
=================
function check_event(event, handler)
{
if (event.type === 'xxxxxx') {
var file_to_process_name = 'abc';
var file_to_process_dir = 'abc';
var remote_dir = 'abc';
handler(file_to_process_name, file_to_process_dir, remote_dir);
}
}
function ftp_to_server(file_to_process_name, file_to_process_dir, remote_dir) {
var connection_details = conf.ftp.connections
do_ftp(connection_details, file_to_process_name, file_to_process_dir, remote_dir);
}
function do_ftp(connection_details, file_to_process_name, file_to_process_dir, remote_dir) {
var credentials = {
// FTP settings here
};
var local_file = file_to_process_dir + file_to_process_name;
var remote_file = remote_dir + file_to_process_name;
connection = new sftp(credentials, function(err) {
if (err){
throw err;
}
connection.writeFile(remote_file, fs.readFileSync(local_file, "utf8"), null, function(err) {
if (err) {
throw err;
}
console.info('FTP PUT DONE');
});
});
};
Your "connection = new sftp(credentials, function(err) {"
should be
var connection = new sftp(credentials, function(err) {
The way you currently have it coded, "connection" is a global and you are writing over it.

"Meteor code must always run within a Fiber" when calling Collection.insert on server

I have the following code in server/statusboard.js;
var require = __meteor_bootstrap__.require,
request = require("request")
function getServices(services) {
services = [];
request('http://some-server/vshell/index.php?type=services&mode=json', function (error, response, body) {
var resJSON = JSON.parse(body);
_.each(resJSON, function(data) {
var host = data["host_name"];
var service = data["service_description"];
var hardState = data["last_hard_state"];
var currState = data["current_state"];
services+={host: host, service: service, hardState: hardState, currState: currState};
Services.insert({host: host, service: service, hardState: hardState, currState: currState});
});
});
}
Meteor.startup(function () {
var services = [];
getServices(services);
console.log(services);
});
Basically, it's pulling some data from a JSON feed and trying to push it into a collection.
When I start up Meteor I get the following exception;
app/packages/livedata/livedata_server.js:781
throw exception;
^
Error: Meteor code must always run within a Fiber
at [object Object].withValue (app/packages/meteor/dynamics_nodejs.js:22:15)
at [object Object].apply (app/packages/livedata/livedata_server.js:767:45)
at [object Object].insert (app/packages/mongo-livedata/collection.js:199:21)
at app/server/statusboard.js:15:16
at Array.forEach (native)
at Function.<anonymous> (app/packages/underscore/underscore.js:76:11)
at Request._callback (app/server/statusboard.js:9:7)
at Request.callback (/usr/local/meteor/lib/node_modules/request/main.js:108:22)
at Request.<anonymous> (/usr/local/meteor/lib/node_modules/request/main.js:468:18)
at Request.emit (events.js:67:17)
Exited with code: 1
I'm not too sure what that error means. Does anyone have any ideas, or can suggest a different approach?
Just wrapping your function in a Fiber might not be enough and can lead to unexpected behavior.
The reason is, along with Fiber, Meteor requires a set of variables attached to a fiber. Meteor uses data attached to a fiber as a dynamic scope and the easiest way to use it with 3rd party api is to use Meteor.bindEnvironment.
T.post('someurl', Meteor.bindEnvironment(function (err, res) {
// do stuff
// can access Meteor.userId
// still have MongoDB write fence
}, function () { console.log('Failed to bind environment'); }));
Watch these videos on evented mind if you want to know more:
https://www.eventedmind.com/posts/meteor-dynamic-scoping-with-environment-variables
https://www.eventedmind.com/posts/meteor-what-is-meteor-bindenvironment
As mentioned above it is because your executing code within a callback.
Any code you're running on the server-side needs to be contained within a Fiber.
Try changing your getServices function to look like this:
function getServices(services) {
Fiber(function() {
services = [];
request('http://some-server/vshell/index.php?type=services&mode=json', function (error, response, body) {
var resJSON = JSON.parse(body);
_.each(resJSON, function(data) {
var host = data["host_name"];
var service = data["service_description"];
var hardState = data["last_hard_state"];
var currState = data["current_state"];
services+={host: host, service: service, hardState: hardState, currState: currState};
Services.insert({host: host, service: service, hardState: hardState, currState: currState});
});
});
}).run();
}
I just ran into a similar problem and this worked for me. What I have to say though is that I am very new to this and I do not know if this is how this should be done.
You probably could get away with only wrapping your insert statement in the Fiber, but I am not positive.
Based on my tests you have to wrap the insert in code I tested that is similar to the above example.
For example, I did this and it still failed with Fibers error.
function insertPost(args) {
if(args) {
Fiber(function() {
post_text = args.text.slice(0,140);
T.post('statuses/update', { status: post_text },
function(err, reply) {
if(reply){
// TODO remove console output
console.log('reply: ' + JSON.stringify(reply,0,4));
console.log('incoming twitter string: ' + reply.id_str);
// TODO insert record
var ts = Date.now();
id = Posts.insert({
post: post_text,
twitter_id_str: reply.id_str,
created: ts
});
}else {
console.log('error: ' + JSON.stringify(err,0,4));
// TODO maybe store locally even though it failed on twitter
// and run service in background to push them later?
}
}
);
}).run();
}
}
I did this and it ran fine with no errors.
function insertPost(args) {
if(args) {
post_text = args.text.slice(0,140);
T.post('statuses/update', { status: post_text },
function(err, reply) {
if(reply){
// TODO remove console output
console.log('reply: ' + JSON.stringify(reply,0,4));
console.log('incoming twitter string: ' + reply.id_str);
// TODO insert record
var ts = Date.now();
Fiber(function() {
id = Posts.insert({
post: post_text,
twitter_id_str: reply.id_str,
created: ts
});
}).run();
}else {
console.log('error: ' + JSON.stringify(err,0,4));
// TODO maybe store locally even though it failed on twitter
// and run service in background to push them later?
}
}
);
}
}
I thought this might help others encountering this issue. I have not yet tested calling the asynchy type of external service after internal code and wrapping that in a Fiber. That might be worth testing as well. In my case I needed to know the remote action happened before I do my local action.
Hope this contributes to this question thread.

Categories