I have the following code in index.js
var settingsFile = "config.json";
var settings = JSON.parse(require("fs").readFileSync(settingsFile));
const net = require('net');
const robot = require("robotjs");
const fs = require("fs");
var client;
var customKeys = {
"scroll_up":'robot.scrollMouse(50, "up");',
"scroll_down":"robot.scrollMouse(50,'down');"
}
function startCommunication(address,port) {
client = net.connect({port: port,host:address}, () => {
// 'connect' listener
console.log('connected to server!');
//client.write('world!\r\n');
});
client.on('data', (data) => {
console.log(data.toString());
var string = data.toString();
console.log(settings.keys[string.substr(1)]);
if(string.substr(0,1) == "d") {
robot.keyToggle(settings.keys[string.substr(1)],"down");
} else {
robot.keyToggle(settings.keys[string.substr(1)],"up");
}
//client.end();
});
client.on('end', () => {
console.log('disconnected from server');
});
}
startCommunication(settings.address,settings.port);
i also have this code in config.json, aka what is parsed into the settings variable.
{
"port":5555,
"address":"192.168.1.118",
"keys":{
"KEY_A":"a",
"KEY_B":"b",
"KEY_X":"x",
"KEY_Y":"y",
"KEY_L":"y",
"KEY_R":"t",
"KEY_DUP":"up",
"KEY_DDOWN":"down",
"KEY_DLEFT":"left",
"KEY_DRIGHT":"right",
"KEY_START":"z",
"KEY_SELECT":"q"
}
}
What The Problem is is that when i get down to either robotjs.togglekeys statement i get the error
Error: Invalid key code specified.
This means that, as there error clearly states, it is getting an invalid keycode. I am guessing that is is some stupid mistake that I made. data in my testing is equal to "dKEY_DRIGHT". the variable string is equal to that but i need to get rid of the d in order for it to work. when i do the live console i am able to get the data that I need by using the same code but somethig goes wrong when it is being ran in the file. anything helps :)
you may try using node-key-sender to send keys presses to your operational system.
Install it with npm install --save-dev node-key-sender.
And send a key to the keyboard using:
var ks = require('node-key-sender');
ks.sendKey('up');
All the values of you config ('a', 'b', ...) are accepted by the lib. You can send them directly.
Check the documentation page for more information: https://www.npmjs.com/package/node-key-sender.
Related
I use watson assistant v1
My problem is that every time I make a call to the code in Nodejs, where I return the context, to have a coordinated conversation, the context is only updated once and I get stuck in a node of the conversation
this is my code
client.on('message', message => {
//general variables
var carpetaIndividual = <../../../>
var cuerpoMensaje = <....>
var emisorMensaje = <....>
//detect if context exists
if(fs.existsSync(carpetaIndividual+'/contexto.json')) {
var watsonContexto = require(carpetaIndividual+'/contexto.json');
var variableContexto = watsonContexto;
} else {
var variableContexto = {}
}
//conection with Watson Assistant
assistant.message(
{
input: { text: cuerpoMensaje },
workspaceId: '<>',
context: variableContexto,
})
.then(response => {
let messageWatson = response.result.output.text[0];
let contextoWatson = response.result.context;
console.log('Chatbot: ' + messageWatson);
//Save and create JSON file for context
fs.writeFile(carpetaIndividual+'/contexto.json', JSON.stringify(contextoWatson), 'utf8', function (err) {
if (err) {
console.error(err);
}
});
//Send messages to my application
client.sendMessage(emisorMensaje, messageWatson)
})
.catch(err => {
console.log(err);
});
}
client.initialize();
the context.json file is updated, but when it is read the code only reads the first update of the context.json file and not the other updates
This will be because you are using require to read the .json file. For all subsequent requires of an already-required file, the data is cached and reused.
You will need to use fs.readfile and JSON.parse
// detect if context exists
if (fs.existsSync(carpetaIndividual+'/contexto.json')) {
var watsonContexto = fs.readFileSync(carpetaIndividual+'/contexto.json');
// Converting to JSON
var variableContexto = JSON.parse(watsonContexto);
} else {
var variableContexto = {}
}
There is another subtle problem with your code, in that you are relying on
your async call to fs.writeFile completing before you read the file. This will be the case most of the time, but as you don't wait for the fs.writeFile to complete there is the chance that you may try to read the file, before it is written.
I'm working on a home-automation project (IoT) where my websocket server is a subscriber to a MQTT broker. It gets temperature and light intensity data from microcontroller. In a nutshell, the data are saved as JSON data, then I'll have to save the log of all the received data in a file. I used .writeFileSync() but it only resulted in [object Object] and I have to edit the data.JSON manually before I can run my program because it'll give an error if I don't.
This is the script:
var config = require('./data.json');
function writeData() {
fs.writeFileSync('data.json', config);
}
Then I tried changing it into
var config = require('./data.json');
let data2 = JSON.stringify(config);
function writeData() {
fs.writeFileSync('data-2.json', data2);
}
but I can't find the file named data-2.json.
Please help.
EDIT
I've called the writeData() function on this part:
s.on('dev:on', function (id) {
if (id == 'lamp1') {
config.lamp1 = true;
} else if (id == 'fan1') {
config.fan1 = true;
}
client.publish(id, "true");
writeData();
console.log('Device ON RECEIVED for ' + id);
});
And the rest of the codes which follow are similar to this.
This should work:
const fs = require('fs');
let config = require('./data.json');
function writeData() {
fs.writeFileSync('data-2.json', JSON.stringify(config));
}
writeData();
If I execute a certain shell command in node js, the output is on the console. Is there a way I can save it in a variable so it can be POST to Sqlite database.
const shell = require('shelljs');
shell.exec('arp -a');
In this scenario, I want to store the IP address of a specific MAC/Physical address into the database. How can this be done?
Any help would be much appreciated. Thank you
You need to get the output of the command you're passing to exec. To do that, just call stdout, like this:
const shell = require('shelljs');
const stdout = shell.exec('arp -a').stdout;
Then just parse that output to get your ipaddress:
const entries = stdout.split('\r\n');
// entries sample
[ '',
'Interface: 10.17.60.53 --- 0xd',
' Internet Address Physical Address Type',
' 10.11.10.52 6c-4b-90-1d-97-b8 dynamic ',
' 10.10.11.254 xx-yy-53-2e-98-44 dynamic ']
Then you can filter your wanted address with some more manipulation.
EDIT:
To get the ip address, you could do:
let ipAddr = null;
for (let i = 0; i < entries.length; i++) {
if (entries[i].indexOf('6c-4b-90-1d-97-b8') > -1) {
ipAddr = entries[i].match(/\b\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}\b/)[0];
break;
}
}
console.log(ipAddr); // '10.11.10.52'
I'm merely copy pasting from the docs. You should research more.
You need to add a listener to stdout
var child = exec('arp -a', {async:true});
child.stdout.on('data', function(data) {
/* ... do something with data ... */
});
Or adding the callback directly when calling exec
exec('some_long_running_process', function(code, stdout, stderr) {
console.log('Exit code:', code);
console.log('Program output:', stdout);
console.log('Program stderr:', stderr);
});
You can access the result of the command run using shell.exec with the .output property. Try the code below.
var shell = require('shelljs');
var result = shell.exec('arp -a').output;
If you don't want the result in the console, you can specify the silent option.
var result = shell.exec('arp -a', {silent: true}).output;
Now, you can use regular expressions to extract ip and mac address from the result.
I am getting the result of the command like below:
? (xxx.xxx.xxx.xxx) at xx:xx:xx:xx:xx:xx [ether] on eth0
? (yyy.yyy.yyy.yyy) at yy:yy:yy:yy:yy:yy [ether] on eth0
You can use the following code to extract ip and mac.
var res = result.split("\n").map(function(item){
return item.match(/\((\d+\.\d+\.\d+\.\d+)\) at (..:..:..:..:..:..)/);
});
console.log(res[0][1]); //IP of first interface
console.log(res[0][2]); //MAC of first interface
console.log(res[1][1]); //IP of second interface
console.log(res[1][2]); //MAC of second interface
NOTE
I was not able to find the .output property in the documentation but trying the shell.exec function in the node console revealed it.
The .stdout property or the exec function mentioned in other answers doesn't work for me. They are giving undefined errors.
I'm trying to use mock-cli to stub process.arv in mocha tests for a cli app. I want to test that a message is console.logged when an incorrect argument ("imit") is passed to process.argv (as defined by commands).
I'm trying to adapt the example from the documentation but i don't think i have set everything up correctly.
it passes when i comment out "stdin: require('../mocks/fakeInputStream'), // Hook up a fake input stream" though i know it's not working correctly
it fails with TypeError: sourceStream.on is not a function when run as described below
Can someone see what I'm missing?
/index.js
var commands = ['init'];
function getGitHeadArgs() {
return process.argv.slice(2, process.argv.length);
}
if (getGitHeadArgs().length) {
if (!commands.includes(getGitHeadArgs()[0])) {
console.log("Silly Githead! That's not a githead command");
}
eval(getGitHeadArgs()[0])();
} else {
console.log("You didn't tell githead to do anything!");
}
/testIndex.js
var assert = require('assert');
var index = require('../index.js');
var mockCli = require("mock-cli");
describe("incorrect argument", function() {
it("imit throws an error if an invalid command is raised", function() {
var argv = ['node', '../index.js', 'imit']; // Fake argv
var stdio = {
stdin: require('../mocks/fakeInputStream'), // Hook up a fake input stream
stdout: process.stdout, // Display the captured output in the main console
stderr: process.stderr // Display the captured error output in the main console
};
var kill = mockCli(argv, stdio, function onProcessComplete(error, result) {
var exitCode = result.code; // Process exit code
var stdout = result.stdout; // UTF-8 string contents of process.stdout
var stderr = result.stderr; // UTF-8 string contents of process.stderr
assert.equal(exitCode, 0);
assert.equal(stdout, "Silly Githead! That's not a githead command\n");
assert.equal(stderr, '');
});
// Execute the CLI task
require('../index.js');
// Kill the task if still running after one second
setTimeout(kill, 1000);
});
Is ../mocks/fakeInputStream a valid path?
Is the object at ../mocks/fakeInputStream a valid instance of ReadableStream?
The source code is avalible at GitHub.
Make sure you meet the requirements for the captureStdin(sourceStream, callback) function.
The module uses that function to capture your fakeInputStream and pipe it into a captureStream.
I have the following code in server/statusboard.js;
var require = __meteor_bootstrap__.require,
request = require("request")
function getServices(services) {
services = [];
request('http://some-server/vshell/index.php?type=services&mode=json', function (error, response, body) {
var resJSON = JSON.parse(body);
_.each(resJSON, function(data) {
var host = data["host_name"];
var service = data["service_description"];
var hardState = data["last_hard_state"];
var currState = data["current_state"];
services+={host: host, service: service, hardState: hardState, currState: currState};
Services.insert({host: host, service: service, hardState: hardState, currState: currState});
});
});
}
Meteor.startup(function () {
var services = [];
getServices(services);
console.log(services);
});
Basically, it's pulling some data from a JSON feed and trying to push it into a collection.
When I start up Meteor I get the following exception;
app/packages/livedata/livedata_server.js:781
throw exception;
^
Error: Meteor code must always run within a Fiber
at [object Object].withValue (app/packages/meteor/dynamics_nodejs.js:22:15)
at [object Object].apply (app/packages/livedata/livedata_server.js:767:45)
at [object Object].insert (app/packages/mongo-livedata/collection.js:199:21)
at app/server/statusboard.js:15:16
at Array.forEach (native)
at Function.<anonymous> (app/packages/underscore/underscore.js:76:11)
at Request._callback (app/server/statusboard.js:9:7)
at Request.callback (/usr/local/meteor/lib/node_modules/request/main.js:108:22)
at Request.<anonymous> (/usr/local/meteor/lib/node_modules/request/main.js:468:18)
at Request.emit (events.js:67:17)
Exited with code: 1
I'm not too sure what that error means. Does anyone have any ideas, or can suggest a different approach?
Just wrapping your function in a Fiber might not be enough and can lead to unexpected behavior.
The reason is, along with Fiber, Meteor requires a set of variables attached to a fiber. Meteor uses data attached to a fiber as a dynamic scope and the easiest way to use it with 3rd party api is to use Meteor.bindEnvironment.
T.post('someurl', Meteor.bindEnvironment(function (err, res) {
// do stuff
// can access Meteor.userId
// still have MongoDB write fence
}, function () { console.log('Failed to bind environment'); }));
Watch these videos on evented mind if you want to know more:
https://www.eventedmind.com/posts/meteor-dynamic-scoping-with-environment-variables
https://www.eventedmind.com/posts/meteor-what-is-meteor-bindenvironment
As mentioned above it is because your executing code within a callback.
Any code you're running on the server-side needs to be contained within a Fiber.
Try changing your getServices function to look like this:
function getServices(services) {
Fiber(function() {
services = [];
request('http://some-server/vshell/index.php?type=services&mode=json', function (error, response, body) {
var resJSON = JSON.parse(body);
_.each(resJSON, function(data) {
var host = data["host_name"];
var service = data["service_description"];
var hardState = data["last_hard_state"];
var currState = data["current_state"];
services+={host: host, service: service, hardState: hardState, currState: currState};
Services.insert({host: host, service: service, hardState: hardState, currState: currState});
});
});
}).run();
}
I just ran into a similar problem and this worked for me. What I have to say though is that I am very new to this and I do not know if this is how this should be done.
You probably could get away with only wrapping your insert statement in the Fiber, but I am not positive.
Based on my tests you have to wrap the insert in code I tested that is similar to the above example.
For example, I did this and it still failed with Fibers error.
function insertPost(args) {
if(args) {
Fiber(function() {
post_text = args.text.slice(0,140);
T.post('statuses/update', { status: post_text },
function(err, reply) {
if(reply){
// TODO remove console output
console.log('reply: ' + JSON.stringify(reply,0,4));
console.log('incoming twitter string: ' + reply.id_str);
// TODO insert record
var ts = Date.now();
id = Posts.insert({
post: post_text,
twitter_id_str: reply.id_str,
created: ts
});
}else {
console.log('error: ' + JSON.stringify(err,0,4));
// TODO maybe store locally even though it failed on twitter
// and run service in background to push them later?
}
}
);
}).run();
}
}
I did this and it ran fine with no errors.
function insertPost(args) {
if(args) {
post_text = args.text.slice(0,140);
T.post('statuses/update', { status: post_text },
function(err, reply) {
if(reply){
// TODO remove console output
console.log('reply: ' + JSON.stringify(reply,0,4));
console.log('incoming twitter string: ' + reply.id_str);
// TODO insert record
var ts = Date.now();
Fiber(function() {
id = Posts.insert({
post: post_text,
twitter_id_str: reply.id_str,
created: ts
});
}).run();
}else {
console.log('error: ' + JSON.stringify(err,0,4));
// TODO maybe store locally even though it failed on twitter
// and run service in background to push them later?
}
}
);
}
}
I thought this might help others encountering this issue. I have not yet tested calling the asynchy type of external service after internal code and wrapping that in a Fiber. That might be worth testing as well. In my case I needed to know the remote action happened before I do my local action.
Hope this contributes to this question thread.