How do I call a Python function from Node.js? - javascript

I'm working on making a Homebridge plugin for a project. Homebridge is a Node.js server which I have running on a Raspberry Pi which emulates an Apple HomeKit Bridge.
Using this link, I was able to execute Python code from the following Node.js code:
var Service, Characteristic;
var spawn = require('child_process').spawn;
var py = spawn('python', ['/home/pi/Desktop/RFbulb/nRF24L01PLUS.py']);
var data = [10,10,10];
var dataString = '';
var RFstatus = true;
module.exports = function(homebridge) {
Service = homebridge.hap.Service;
Characteristic = homebridge.hap.Characteristic;
homebridge.registerAccessory("homebridge-RFbulb", "RFbulb", RFbulbAccessory);
}
function RFbulbAccessory(log, config) {
this.log = log;
this.config = config;
this.name = config["name"];
this.address = config["address"];
this.service = new Service.Lightbulb(this.name);
this.service
.getCharacteristic(Characteristic.On)
.on('get', this.getOn.bind(this))
.on('set', this.setOn.bind(this));
}
RFbulbAccessory.prototype.setOn = function(on, callback) { // This is the function throwing the error
var state = on ? "on": "off";
if (state == "on") {
data = [1,parseInt(this.address, 10),100];
dataString = '';
py.stdout.on('data', function(data) {
dataString += data.toString();
});
py.stdout.on('end', function() {
console.log(dataString);
});
py.stdin.write(JSON.stringify(data));
py.stdin.end();
RFstatus = true;
}
callback(null);
}
RFbulbAccessory.prototype.getServices = function() {
return [this.service];
}
Interestingly enough, when I activate the setOn function the first time (for example, to turn the device on) it works fine, but when I activate the setOn function a second time (to turn the device off) I get the following errors and the server exits:
events.js:141
throw er; // Unhandled 'error' event
^
Error: write after end
at writeAfterEnd (_stream_writable.js:166:12)
at Socket.Writable.write (_stream_writable.js:211:5)
at Socket.write (net.js:642:40)
at RFbulbAccessory.setOn (/usr/lib/node_modules/homebridge-RFbulb/index.js:47:12)
at emitThree (events.js:97:13)
at emit (events.js:175:7)
at Characteristic.setValue (/usr/lib/node_modules/homebridge/node_modules/hap-nodejs/lib/Characteristic.js:155:10)
at Bridge.<anonymous> (/usr/lib/node_modules/homebridge/node_modules/hap-nodejs/lib/Accessory.js:710:22)
at Array.forEach (native)
at Bridge.Accessory._handleSetCharacteristics (/usr/lib/node_modules/homebridge/node_modules/hap-nodejs/lib/Accessory.js:655:8)
What could be causing this error? Especially since the function appears to work fine for a single use.

You're getting that error because you're closing the input stream:
py.stdin.end();
After a stream has been closed, you can no longer write to it like you are here:
py.stdin.write(JSON.stringify(data));
If the Python program you're running accepts multiple commands over STDIN then simply remove the py.stdin.end() line.
However, it's likely that your Python program runs once then completes. If that's the case, you will need to respawn the process every time you want the program to run.
if (state === "on") {
py = spawn('python', ['/home/pi/Desktop/RFbulb/nRF24L01PLUS.py']);
...
}

Related

Javascript Await Changes Local Variables? [closed]

Closed. This question is not reproducible or was caused by typos. It is not currently accepting answers.
This question was caused by a typo or a problem that can no longer be reproduced. While similar questions may be on-topic here, this one was resolved in a way less likely to help future readers.
Closed 3 years ago.
Improve this question
Anyone able to explain what I'm doing wrong with my use of asynchronous functions in Javascript?
Basically, I must use an asynchronous in my Node.js code to grab an open port for me to use. There is a local variable that is being set outside of the asynchronous call that I can access/use just fine until I await for the asynchronous function to return. After that, the local variable is undefined.
(async () => {
console.log("CHECK AFTER ASYNC1: " + csvFilePath);
// First, grab a valid open port
var port;
while (!port || portsInProcess.indexOf(port) >= 0) {
console.log("CHECK AFTER ASYNC2: " + csvFilePath);
port = await getPort();
console.log(port);
}
console.log("CHECK AFTER ASYNC3: " + csvFilePath);
portsInProcess.push(port);
// ... more code below...
Checks #1 and 2 are fine for the csvFilePath variable, but check #3 shows that it's undefined. The port number, however, is fine. This leads me to believe that there's some weirdness with asynchronous function calls in Javascript that ONLY affects local variables; the global variables I use further down are just fine. Unfortunately here, I cannot make the csvFilePath variable global since that will introduce race conditions on that variable too (which I'm preventing elsewhere; the while loop is to help prevent race conditions on the port number, which is basically unused in my simple tests on localhost).
Just in case it's helpful, here's the output I'm getting:
CHECK AFTER ASYNC1: data/text/crescent_topics.csv
CHECK AFTER ASYNC2: data/text/crescent_topics.csv
58562
CHECK AFTER ASYNC3: null
It might also be worth mentioning it's really only those first few lines of code to dynamically grab an open port that are the lines of code I added. The code that I had before which used a fixed port number worked just fine (including this csvFilePath variable remaining stable).
My understanding of the await functionality was that it makes the asynchronous function act more or less synchronously, which is what seems to be happening here; the code I have farther down that uses the port number is not running until after the port number is set. (But even if that wasn't the case, why is the csvFilePath variable being unset since I'm not altering it or using it in any way here?)
EDIT: Here's some more code to provide additional context
var spawn = require('child_process').spawn;
var fs = require("fs");
var async = require('async');
var zmq = require('zmq');
var readline = require('readline');
const getPort = require('get-port');
/* Export the Nebula class */
module.exports = Nebula;
/* Location of the data for the Crescent dataset */
var textDataPath = "data/text/";
var crescentRawDataPath = textDataPath + "crescent_raw";
var crescentTFIDF = textDataPath + "crescent tfidf.csv";
var crescentTopicModel = textDataPath + "crescent_topics.csv";
/* Location of the data for the UK Health dataset */
var ukHealthRawDataPath = textDataPath + "uk_health_raw";
var ukHealthTFIDF = textDataPath + "uk_health.csv";
/* Map CSV files for text data to raw text location */
var textRawDataMappings = {};
textRawDataMappings[crescentTFIDF] = crescentRawDataPath;
textRawDataMappings[crescentTopicModel] = crescentRawDataPath;
textRawDataMappings[ukHealthTFIDF] = ukHealthRawDataPath;
textRawDataMappings[textDataPath + "uk_health_sm.csv"] = ukHealthRawDataPath;
/* The pipelines available to use */
var flatTextUIs = ["cosmos", "composite", "sirius", "centaurus"];
var pipelines = {
andromeda: {
file: "pipelines/andromeda.py",
defaultData: "data/highD/Animal_Data_study.csv"
},
cosmos: {
file: "pipelines/cosmos.py",
defaultData: textDataPath + "crescent tfidf.csv"
},
sirius: {
file: "pipelines/sirius.py",
defaultData: "data/highD/Animal_Data_paper.csv"
},
centaurus: {
file: "pipelines/centaurus.py",
defaultData: "data/highD/Animal_Data_paper.csv"
},
twitter: {
file: "pipelines/twitter.py",
},
composite: {
file: "pipelines/composite.py",
defaultData: textDataPath + "crescent tfidf.csv"
},
elasticsearch: {
file: "pipelines/espipeline.py",
args: []
}
};
/* The locations of the different types of datasets on the server */
var textDataFolder = "data/text/";
var highDDataFolder = "data/highD/";
var customCSVFolder = "data/customCSV/";
var sirius_prototype = 2;
// An array to track the ports being processed to eliminate race conditions
// as much as possible
var portsInProcess = [];
var nextSessionNumber = 0;
var usedSessionNumbers = [];
/* Nebula class constructor */
function Nebula(io, pipelineAddr) {
/* This allows you to use "Nebula(obj)" as well as "new Nebula(obj)" */
if (!(this instanceof Nebula)) {
return new Nebula(io);
}
/* The group of rooms currently active, each with a string identifier
* Each room represents an instance of a visualization that can be shared
* among clients.
*/
this.rooms = {};
this.io = io;
/* For proper use in callback functions */
var self = this;
/* Accept new WebSocket clients */
io.on('connection', function(socket) {
// Skipped some irrelevant Socket.io callbacks
**// Use the csvFilePath to store the name of a user-defined CSV file
var csvFilePath = null;**
/* Helper function to tell the client that the CSV file is now ready for them
* to use. They are also sent a copy of the data
*/
var csvFileReady = function(csvFilePath) {
// Let the client know that the CSV file is now ready to be used on
// the server
socket.emit("csvDataReady");
// Prepare to parse the CSV file
var csvData = [];
const rl = readline.createInterface({
input: fs.createReadStream(csvFilePath),
crlfDelay: Infinity
});
// Print any error messages we encounter
rl.on('error', function (err) {
console.log("Error while parsing CSV file: " + csvFilePath);
console.log(err);
});
// Read each line of the CSV file one at a time and parse it
var columnHeaders = [];
var firstColumnName;
rl.on('line', function (data) {
var dataColumns = data.split(",");
// If we haven't saved any column names yet, do so first
if (columnHeaders.length == 0) {
columnHeaders = dataColumns;
firstColumnName = columnHeaders[0];
}
// Process each individual line of data in the CSV file
else {
var dataObj = {};
var i;
for (i = 0; i < dataColumns.length; i++) {
var key = columnHeaders[i];
var value = dataColumns[i];
dataObj[key] = value
}
csvData.push(dataObj);
}
});
// All lines are read, file is closed now.
rl.on('close', function () {
// On certain OSs, like Windows, an extra, blank line may be read
// Check for this and remove it if it exists
var lastObservation = csvData[csvData.length-1];
var lastObservationKeys = Object.keys(lastObservation);
if (lastObservationKeys.length = 1 && lastObservation[lastObservationKeys[0]] == "") {
csvData.pop();
}
// Provide the CSV data to the client
socket.emit("csvDataReadComplete", csvData, firstColumnName);
});
};
**/* Allows the client to specify a CSV file already on the server to use */
socket.on("setCSV", function(csvName) {
console.log("setCSV CALLED");
csvFilePath = "data/" + csvName;
csvFileReady(csvFilePath);
console.log("CSV FILE SET: " + csvFilePath);
});**
// Skipped some more irrelevant callbacks
/* a client/ a room. If the room doesn't next exist yet,
* initiate it and send the new room to the client. Otherwise, send
* the client the current state of the room.
*/
socket.on('join', function(roomName, user, pipeline, args) {
console.log("Join called for " + pipeline + " pipeline; room " + roomName);
socket.roomName = roomName;
socket.user = user;
socket.join(roomName);
console.log("CSV FILE PATH: " + csvFilePath);
var pipelineArgsCopy = [];
if (!self.rooms[roomName]) {
var room = {};
room.name = roomName;
room.count = 1;
room.points = new Map();
room.similarity_weights = new Map();
if (pipeline == "sirius" || pipeline == "centaurus") {
room.attribute_points = new Map();
room.attribute_similarity_weights = new Map();
room.observation_data = [];
room.attribute_data = [];
}
/* Create a pipeline client for this room */
console.log("CHECK BEFORE ASYNC: " + csvFilePath);
**// Here's the code snippet I provided above**
**(async () => {
console.log("CHECK AFTER ASYNC1: " + csvFilePath);
// First, grab a valid open port
var port;
while (!port || portsInProcess.indexOf(port) >= 0) {
console.log("CHECK AFTER ASYNC2: " + csvFilePath);
port = await getPort();
console.log(port);
}
console.log("CHECK AFTER ASYNC3: " + csvFilePath);**
portsInProcess.push(port);
console.log("CHECK AFTER ASYNC4: " + csvFilePath);
if (!pipelineAddr) {
var pythonArgs = ["-u"];
if (pipeline in pipelines) {
// A CSV file path should have already been set. This
// file path should be used to indicate where to find
// the desired file
console.log("LAST CHECK: " + csvFilePath);
if (!csvFilePath) {
csvFilePath = pipelines[pipeline].defaultData;
}
console.log("FINAL CSV FILE: " + csvFilePath);
pipelineArgsCopy.push(csvFilePath);
// If the UI supports reading flat text files, tell the
// pipeline where to find the files
if (flatTextUIs.indexOf(pipeline) >= 0) {
pipelineArgsCopy.push(textRawDataMappings[csvFilePath]);
}
// Set the remaining pipeline args
pythonArgs.push(pipelines[pipeline].file);
pythonArgs.push(port.toString());
if (pipeline != "twitter" && pipeline != "elasticsearch") {
pythonArgs = pythonArgs.concat(pipelineArgsCopy);
}
}
else {
pythonArgs.push(pipelines.cosmos.file);
pythonArgs.push(port.toString());
pythonArgs.push(pipelines.cosmos.defaultData);
pythonArgs.push(crescentRawDataPath);
}
// used in case of CosmosRadar
for (var key in args) {
if (args.hasOwnProperty(key)) {
pythonArgs.push("--" + key);
pythonArgs.push(args[key]);
}
}
// Dynamically determine which distance function should be
// used
if (pythonArgs.indexOf("--dist_func") < 0) {
if (pipeline === "twitter" || pipeline === "elasticsearch" ||
csvFilePath.startsWith(textDataPath)) {
pythonArgs.push("--dist_func", "cosine");
}
else {
pythonArgs.push("--dist_func", "euclidean");
}
}
console.log(pythonArgs);
console.log("");
var pipelineInstance = spawn("python2.7", pythonArgs, {stdout: "inherit"});
pipelineInstance.on("error", function(err) {
console.log("python2.7.exe not found. Trying python.exe");
pipelineInstance = spawn("python", pythonArgs,{stdout: "inherit"});
pipelineInstance.stdout.on("data", function(data) {
console.log("Pipeline: " + data.toString());
});
pipelineInstance.stderr.on("data", function(data) {
console.log("Pipeline error: " + data.toString());
});
});
/* Data received by node app from python process,
* ouptut this data to output stream(on 'data'),
* we want to convert that received data into a string and
* append it to the overall data String
*/
pipelineInstance.stdout.on("data", function(data) {
console.log("Pipeline STDOUT: " + data.toString());
});
pipelineInstance.stderr.on("data", function(data) {
console.log("Pipeline error: " + data.toString());
});
room.pipelineInstance = pipelineInstance;
}
/* Connect to the pipeline */
pipelineAddr = pipelineAddr || "tcp://127.0.0.1:" + port.toString();
room.pipelineSocket = zmq.socket('pair');
room.pipelineSocket.connect(pipelineAddr);
pipelineAddr = null;
portsInProcess.splice(portsInProcess.indexOf(port), 1);
/* Listens for messages from the pipeline */
room.pipelineSocket.on('message', function (msg) {
self.handleMessage(room, msg);
});
self.rooms[roomName] = socket.room = room;
invoke(room.pipelineSocket, "reset");
})();
}
else {
socket.room = self.rooms[roomName];
socket.room.count += 1;
if (pipeline == "sirius" || pipeline == "centaurus") {
socket.emit('update', sendRoom(socket.room, true), true);
socket.emit('update', sendRoom(socket.room, false), false);
}
else {
socket.emit('update', sendRoom(socket.room));
}
}
// Reset the csvFilePath to null for future UIs...
// I don't think this is actually necessary since
// csvFilePath is local to the "connections" message,
// which is called for every individual room
csvFilePath = null;
});
// Skipped the rest of the code; it's irrelevant
});
}
Full printouts:
setCSV CALLED
CSV FILE SET: data/text/crescent_topics.csv
Join called for sirius pipeline; room sirius0
CSV FILE PATH: data/text/crescent_topics.csv
CHECK BEFORE ASYNC: data/text/crescent_topics.csv
CHECK AFTER ASYNC1: data/text/crescent_topics.csv
CHECK AFTER ASYNC2: data/text/crescent_topics.csv
58562
CHECK AFTER ASYNC3: null
CHECK AFTER ASYNC4: null
LAST CHECK: null
FINAL CSV FILE: data/highD/Animal_Data_paper.csv
[ '-u',
'pipelines/sirius.py',
'58562',
'data/highD/Animal_Data_paper.csv',
undefined,
'--dist_func',
'euclidean' ]
Since bolding of code doesn't work, just search for the "**" to find the relevant pieces I've marked.
TL;DR There's a lot of communication happening between the client and server to establish an individualized communication that is directly linked to a specific dataset. The user has the ability to upload a custom CSV file to the system, but the code I'm working with right now is just trying to select an existing CSV file on the server, so I omitted the callbacks for the custom CSV file. Once the file has been selected, the client asks to "join" a room/session. The case I'm working with right now assumes that this is a new room/session as opposed to trying to do some shared room/session with another client. (Yes, I know, the code is messy for sharing rooms/sessions, but it works for the most part for now and is not my main concern.) Again, all this code worked just fine before the asynchronous code was added (and using a static port variable), so I don't know what changed so much by adding it.
Since you now included the whole code context, we can see that the issue is that the code after your async IIFE is what is causing the problem.
An async function returns a promise as soon as it hits an await. And, while that await is waiting for its asynchronous operation, the code following the call to the async function runs. In your case, you're essentially doing this:
var csvFilePath = someGoodValue;
(async () => {
port = await getPort();
console.log(csvFilePath); // this will be null
})();
csvFilePath = null; // this runs as soon as the above code hits the await
So, as soon as you hit your first await, the async function returns a promise and the code following it continues to run, hitting the line of code that resets your csvFilePath.
There are probably cleaner ways to restructure your code, but a simple thing you could do is this:
var csvFilePath = someGoodValue;
(async () => {
port = await getPort();
console.log(csvFilePath); // this will be null
})().finally(() => {
csvFilePath = null;
});
Note: .finally() is supported in node v10+. If you're using an older version, you can reset the path in both .then() and .catch().
Or, as your comment says, maybe you can just remove the resetting of the csvFilePath entirely.
I realized after some silly tests I tried that I'm resetting csvFilePath to null outside the asynchronous call, which is what is causing the error... Oops!

Configuring mock-cli

I'm trying to use mock-cli to stub process.arv in mocha tests for a cli app. I want to test that a message is console.logged when an incorrect argument ("imit") is passed to process.argv (as defined by commands).
I'm trying to adapt the example from the documentation but i don't think i have set everything up correctly.
it passes when i comment out "stdin: require('../mocks/fakeInputStream'), // Hook up a fake input stream" though i know it's not working correctly
it fails with TypeError: sourceStream.on is not a function when run as described below
Can someone see what I'm missing?
/index.js
var commands = ['init'];
function getGitHeadArgs() {
return process.argv.slice(2, process.argv.length);
}
if (getGitHeadArgs().length) {
if (!commands.includes(getGitHeadArgs()[0])) {
console.log("Silly Githead! That's not a githead command");
}
eval(getGitHeadArgs()[0])();
} else {
console.log("You didn't tell githead to do anything!");
}
/testIndex.js
var assert = require('assert');
var index = require('../index.js');
var mockCli = require("mock-cli");
describe("incorrect argument", function() {
it("imit throws an error if an invalid command is raised", function() {
var argv = ['node', '../index.js', 'imit']; // Fake argv
var stdio = {
stdin: require('../mocks/fakeInputStream'), // Hook up a fake input stream
stdout: process.stdout, // Display the captured output in the main console
stderr: process.stderr // Display the captured error output in the main console
};
var kill = mockCli(argv, stdio, function onProcessComplete(error, result) {
var exitCode = result.code; // Process exit code
var stdout = result.stdout; // UTF-8 string contents of process.stdout
var stderr = result.stderr; // UTF-8 string contents of process.stderr
assert.equal(exitCode, 0);
assert.equal(stdout, "Silly Githead! That's not a githead command\n");
assert.equal(stderr, '');
});
// Execute the CLI task
require('../index.js');
// Kill the task if still running after one second
setTimeout(kill, 1000);
});
Is ../mocks/fakeInputStream a valid path?
Is the object at ../mocks/fakeInputStream a valid instance of ReadableStream?
The source code is avalible at GitHub.
Make sure you meet the requirements for the captureStdin(sourceStream, callback) function.
The module uses that function to capture your fakeInputStream and pipe it into a captureStream.

Code works locally but not on AWS lambda

the following lamdba code works perfectly fine when testing locally using Alex-app-server but when published and tested on AWS Lambda, it gets within the else statement and prints the console.log('OUT PUBLISH') But it doesn't publish the 'lambda/channelnumber' nor does it send the correct response back to me or print 'IN PUBLISH'
Any ideas why its just completing the bottom half of the else statement and not touching the publish function?
Code Snippet where I believe the problem lies
function (request, response) {
var channelNumber = request.slot('CHANNELNUMBER');
if (_.isEmpty(channelNumber)) {
var prompt = 'I didn\'t hear a channel code. Tell me a channel code.';
response.say(prompt).shouldEndSession(true);
return true;
} else {
//Doesn't publish any of this?????
thingShadows.publish('lambda/channelNumber', channelNumber, function () {
var prompt1 = 'Okay.';
response.say(prompt1).shouldEndSession(true);
console.log('in publish');
});
////But prints this??
console.log('out publish');
return true;
}
}
Full Code
'use strict';
module.change_code = 1;
var Alexa = require('alexa-app');
var skill = new Alexa.app('smartmote');
var awsIot = require('aws-iot-device-sdk');
var deviceName = "tv";
var _ = require('lodash');
var path = require('path');
var host = "XXXXXXXXXXXXXXXXXXXX.iot.us-east-1.amazonaws.com";
//App id is the skill being used.
var app_id = "amzn1.ask.skill.YYYYYYYYYYYYYYYYYYYYY";
var thingShadows = awsIot.thingShadow({
keyPath: path.join(__dirname, '/Raspi.private.key'),
certPath: path.join(__dirname, '/Raspi.cert.pem'),
caPath: path.join(__dirname, '/root-CA.crt'),
clientId: deviceName,
region: "us-east-1",
});
var reprompt = 'I didn\'t hear a channel, tell me a channel number or name to change to that channel';
skill.launch(function (request, response) {
var prompt = 'To change channel, tell me a channel number.';
response.say(prompt).reprompt(reprompt).shouldEndSession(true);
});
skill.intent('ChannelNumberIntent', {
'slots': {
'CHANNELNUMBER': 'CHANNELID'
},
'utterances': ['{|Change|put} {|the|on} {|channel} {|to} {-|CHANNELNUMBER}']
},
function (request, response) {
var channelNumber = request.slot('CHANNELNUMBER');
if (_.isEmpty(channelNumber)) {
var prompt = 'I didn\'t hear a channel code. Tell me a channel code.';
response.say(prompt).shouldEndSession(true);
return true;
} else {
thingShadows.publish('lambda/channelNumber', channelNumber, function () {
console.log('in pub');
var prompt1 = 'Okay.';
response.say(prompt1).shouldEndSession(true);
callback();
});
console.log('out pub');
return true;
}
}
);
module.exports = skill;
This is most likely because of the asynchronous nature of your code.
You haven't told us what thingShadows.publish() does, but it appears to take a callback function as its second argument. Presumably this function will be called when publish() has finished doing whatever it does.
When running locally I would imagine that the output you see is (in this order):
out publish
in publish
Notice that out publish gets called before in publish. This is because the publish method is asynchronous, so execution will continue as soon as it is called. In your case, you are calling return immediately after calling publish, which probably means your lambda job is ending before it has a chance to log in publish.
You haven't provided enough information about the rest of your lambda code/setup to provide a full answer, but you need to make sure that you are waiting for your publish method to have finished before continuing. One way to achieve this is to use the callback object that is passed to your lambda handler:
exports.myHandler = function(event, context, callback) {
// Other code
thingShadows.publish('lambda/channelNumber', channelNumber, function () {
var prompt1 = 'Okay.';
response.say(prompt1).shouldEndSession(true);
console.log('in publish');
// When the publish method is complete, we can call `callback`
// to tell lambda we are done
callback();
});
}

Robotjs error: Invalid key code specified

I have the following code in index.js
var settingsFile = "config.json";
var settings = JSON.parse(require("fs").readFileSync(settingsFile));
const net = require('net');
const robot = require("robotjs");
const fs = require("fs");
var client;
var customKeys = {
"scroll_up":'robot.scrollMouse(50, "up");',
"scroll_down":"robot.scrollMouse(50,'down');"
}
function startCommunication(address,port) {
client = net.connect({port: port,host:address}, () => {
// 'connect' listener
console.log('connected to server!');
//client.write('world!\r\n');
});
client.on('data', (data) => {
console.log(data.toString());
var string = data.toString();
console.log(settings.keys[string.substr(1)]);
if(string.substr(0,1) == "d") {
robot.keyToggle(settings.keys[string.substr(1)],"down");
} else {
robot.keyToggle(settings.keys[string.substr(1)],"up");
}
//client.end();
});
client.on('end', () => {
console.log('disconnected from server');
});
}
startCommunication(settings.address,settings.port);
i also have this code in config.json, aka what is parsed into the settings variable.
{
"port":5555,
"address":"192.168.1.118",
"keys":{
"KEY_A":"a",
"KEY_B":"b",
"KEY_X":"x",
"KEY_Y":"y",
"KEY_L":"y",
"KEY_R":"t",
"KEY_DUP":"up",
"KEY_DDOWN":"down",
"KEY_DLEFT":"left",
"KEY_DRIGHT":"right",
"KEY_START":"z",
"KEY_SELECT":"q"
}
}
What The Problem is is that when i get down to either robotjs.togglekeys statement i get the error
Error: Invalid key code specified.
This means that, as there error clearly states, it is getting an invalid keycode. I am guessing that is is some stupid mistake that I made. data in my testing is equal to "dKEY_DRIGHT". the variable string is equal to that but i need to get rid of the d in order for it to work. when i do the live console i am able to get the data that I need by using the same code but somethig goes wrong when it is being ran in the file. anything helps :)
you may try using node-key-sender to send keys presses to your operational system.
Install it with npm install --save-dev node-key-sender.
And send a key to the keyboard using:
var ks = require('node-key-sender');
ks.sendKey('up');
All the values of you config ('a', 'b', ...) are accepted by the lib. You can send them directly.
Check the documentation page for more information: https://www.npmjs.com/package/node-key-sender.

Node.js - Organising code and closures - SFTP/Inotify

I was hoping I could get some advice on why my nodejs program is behaving in the way it is.
I am using two modules, node-sftp and node-inotify. I have setup node-inotify to watch a directory and call a function when something is written there, the function being an sftp upload.
Now the problem I have is that processing one file at a time is fine but when I drop 4 files in one go there, the function is called four times but only one sftp upload goes through.
Do I need to order my code in a particular way to ensure that the sftp upload occurs x times, is this something to do with closures perhaps?
This is a basic version of my code...
"event_handler" is called when something happens on a "watched" directory
"check_event" figures out if this type of event is one we want, in this case it's a "write"
"ftp_to_server" prepare connection details
"do_ftp" basically uses the node-sftp module to perform the sftp upload
event_handler = function(event){
var supplier;
check_event(event, supplier, type, ftp_to_server);
};
=================
function check_event(event, handler)
{
if (event.type === 'xxxxxx') {
var file_to_process_name = 'abc';
var file_to_process_dir = 'abc';
var remote_dir = 'abc';
handler(file_to_process_name, file_to_process_dir, remote_dir);
}
}
function ftp_to_server(file_to_process_name, file_to_process_dir, remote_dir) {
var connection_details = conf.ftp.connections
do_ftp(connection_details, file_to_process_name, file_to_process_dir, remote_dir);
}
function do_ftp(connection_details, file_to_process_name, file_to_process_dir, remote_dir) {
var credentials = {
// FTP settings here
};
var local_file = file_to_process_dir + file_to_process_name;
var remote_file = remote_dir + file_to_process_name;
connection = new sftp(credentials, function(err) {
if (err){
throw err;
}
connection.writeFile(remote_file, fs.readFileSync(local_file, "utf8"), null, function(err) {
if (err) {
throw err;
}
console.info('FTP PUT DONE');
});
});
};
Your "connection = new sftp(credentials, function(err) {"
should be
var connection = new sftp(credentials, function(err) {
The way you currently have it coded, "connection" is a global and you are writing over it.

Categories