Javascript Await Changes Local Variables? [closed] - javascript

Closed. This question is not reproducible or was caused by typos. It is not currently accepting answers.
This question was caused by a typo or a problem that can no longer be reproduced. While similar questions may be on-topic here, this one was resolved in a way less likely to help future readers.
Closed 3 years ago.
Improve this question
Anyone able to explain what I'm doing wrong with my use of asynchronous functions in Javascript?
Basically, I must use an asynchronous in my Node.js code to grab an open port for me to use. There is a local variable that is being set outside of the asynchronous call that I can access/use just fine until I await for the asynchronous function to return. After that, the local variable is undefined.
(async () => {
console.log("CHECK AFTER ASYNC1: " + csvFilePath);
// First, grab a valid open port
var port;
while (!port || portsInProcess.indexOf(port) >= 0) {
console.log("CHECK AFTER ASYNC2: " + csvFilePath);
port = await getPort();
console.log(port);
}
console.log("CHECK AFTER ASYNC3: " + csvFilePath);
portsInProcess.push(port);
// ... more code below...
Checks #1 and 2 are fine for the csvFilePath variable, but check #3 shows that it's undefined. The port number, however, is fine. This leads me to believe that there's some weirdness with asynchronous function calls in Javascript that ONLY affects local variables; the global variables I use further down are just fine. Unfortunately here, I cannot make the csvFilePath variable global since that will introduce race conditions on that variable too (which I'm preventing elsewhere; the while loop is to help prevent race conditions on the port number, which is basically unused in my simple tests on localhost).
Just in case it's helpful, here's the output I'm getting:
CHECK AFTER ASYNC1: data/text/crescent_topics.csv
CHECK AFTER ASYNC2: data/text/crescent_topics.csv
58562
CHECK AFTER ASYNC3: null
It might also be worth mentioning it's really only those first few lines of code to dynamically grab an open port that are the lines of code I added. The code that I had before which used a fixed port number worked just fine (including this csvFilePath variable remaining stable).
My understanding of the await functionality was that it makes the asynchronous function act more or less synchronously, which is what seems to be happening here; the code I have farther down that uses the port number is not running until after the port number is set. (But even if that wasn't the case, why is the csvFilePath variable being unset since I'm not altering it or using it in any way here?)
EDIT: Here's some more code to provide additional context
var spawn = require('child_process').spawn;
var fs = require("fs");
var async = require('async');
var zmq = require('zmq');
var readline = require('readline');
const getPort = require('get-port');
/* Export the Nebula class */
module.exports = Nebula;
/* Location of the data for the Crescent dataset */
var textDataPath = "data/text/";
var crescentRawDataPath = textDataPath + "crescent_raw";
var crescentTFIDF = textDataPath + "crescent tfidf.csv";
var crescentTopicModel = textDataPath + "crescent_topics.csv";
/* Location of the data for the UK Health dataset */
var ukHealthRawDataPath = textDataPath + "uk_health_raw";
var ukHealthTFIDF = textDataPath + "uk_health.csv";
/* Map CSV files for text data to raw text location */
var textRawDataMappings = {};
textRawDataMappings[crescentTFIDF] = crescentRawDataPath;
textRawDataMappings[crescentTopicModel] = crescentRawDataPath;
textRawDataMappings[ukHealthTFIDF] = ukHealthRawDataPath;
textRawDataMappings[textDataPath + "uk_health_sm.csv"] = ukHealthRawDataPath;
/* The pipelines available to use */
var flatTextUIs = ["cosmos", "composite", "sirius", "centaurus"];
var pipelines = {
andromeda: {
file: "pipelines/andromeda.py",
defaultData: "data/highD/Animal_Data_study.csv"
},
cosmos: {
file: "pipelines/cosmos.py",
defaultData: textDataPath + "crescent tfidf.csv"
},
sirius: {
file: "pipelines/sirius.py",
defaultData: "data/highD/Animal_Data_paper.csv"
},
centaurus: {
file: "pipelines/centaurus.py",
defaultData: "data/highD/Animal_Data_paper.csv"
},
twitter: {
file: "pipelines/twitter.py",
},
composite: {
file: "pipelines/composite.py",
defaultData: textDataPath + "crescent tfidf.csv"
},
elasticsearch: {
file: "pipelines/espipeline.py",
args: []
}
};
/* The locations of the different types of datasets on the server */
var textDataFolder = "data/text/";
var highDDataFolder = "data/highD/";
var customCSVFolder = "data/customCSV/";
var sirius_prototype = 2;
// An array to track the ports being processed to eliminate race conditions
// as much as possible
var portsInProcess = [];
var nextSessionNumber = 0;
var usedSessionNumbers = [];
/* Nebula class constructor */
function Nebula(io, pipelineAddr) {
/* This allows you to use "Nebula(obj)" as well as "new Nebula(obj)" */
if (!(this instanceof Nebula)) {
return new Nebula(io);
}
/* The group of rooms currently active, each with a string identifier
* Each room represents an instance of a visualization that can be shared
* among clients.
*/
this.rooms = {};
this.io = io;
/* For proper use in callback functions */
var self = this;
/* Accept new WebSocket clients */
io.on('connection', function(socket) {
// Skipped some irrelevant Socket.io callbacks
**// Use the csvFilePath to store the name of a user-defined CSV file
var csvFilePath = null;**
/* Helper function to tell the client that the CSV file is now ready for them
* to use. They are also sent a copy of the data
*/
var csvFileReady = function(csvFilePath) {
// Let the client know that the CSV file is now ready to be used on
// the server
socket.emit("csvDataReady");
// Prepare to parse the CSV file
var csvData = [];
const rl = readline.createInterface({
input: fs.createReadStream(csvFilePath),
crlfDelay: Infinity
});
// Print any error messages we encounter
rl.on('error', function (err) {
console.log("Error while parsing CSV file: " + csvFilePath);
console.log(err);
});
// Read each line of the CSV file one at a time and parse it
var columnHeaders = [];
var firstColumnName;
rl.on('line', function (data) {
var dataColumns = data.split(",");
// If we haven't saved any column names yet, do so first
if (columnHeaders.length == 0) {
columnHeaders = dataColumns;
firstColumnName = columnHeaders[0];
}
// Process each individual line of data in the CSV file
else {
var dataObj = {};
var i;
for (i = 0; i < dataColumns.length; i++) {
var key = columnHeaders[i];
var value = dataColumns[i];
dataObj[key] = value
}
csvData.push(dataObj);
}
});
// All lines are read, file is closed now.
rl.on('close', function () {
// On certain OSs, like Windows, an extra, blank line may be read
// Check for this and remove it if it exists
var lastObservation = csvData[csvData.length-1];
var lastObservationKeys = Object.keys(lastObservation);
if (lastObservationKeys.length = 1 && lastObservation[lastObservationKeys[0]] == "") {
csvData.pop();
}
// Provide the CSV data to the client
socket.emit("csvDataReadComplete", csvData, firstColumnName);
});
};
**/* Allows the client to specify a CSV file already on the server to use */
socket.on("setCSV", function(csvName) {
console.log("setCSV CALLED");
csvFilePath = "data/" + csvName;
csvFileReady(csvFilePath);
console.log("CSV FILE SET: " + csvFilePath);
});**
// Skipped some more irrelevant callbacks
/* a client/ a room. If the room doesn't next exist yet,
* initiate it and send the new room to the client. Otherwise, send
* the client the current state of the room.
*/
socket.on('join', function(roomName, user, pipeline, args) {
console.log("Join called for " + pipeline + " pipeline; room " + roomName);
socket.roomName = roomName;
socket.user = user;
socket.join(roomName);
console.log("CSV FILE PATH: " + csvFilePath);
var pipelineArgsCopy = [];
if (!self.rooms[roomName]) {
var room = {};
room.name = roomName;
room.count = 1;
room.points = new Map();
room.similarity_weights = new Map();
if (pipeline == "sirius" || pipeline == "centaurus") {
room.attribute_points = new Map();
room.attribute_similarity_weights = new Map();
room.observation_data = [];
room.attribute_data = [];
}
/* Create a pipeline client for this room */
console.log("CHECK BEFORE ASYNC: " + csvFilePath);
**// Here's the code snippet I provided above**
**(async () => {
console.log("CHECK AFTER ASYNC1: " + csvFilePath);
// First, grab a valid open port
var port;
while (!port || portsInProcess.indexOf(port) >= 0) {
console.log("CHECK AFTER ASYNC2: " + csvFilePath);
port = await getPort();
console.log(port);
}
console.log("CHECK AFTER ASYNC3: " + csvFilePath);**
portsInProcess.push(port);
console.log("CHECK AFTER ASYNC4: " + csvFilePath);
if (!pipelineAddr) {
var pythonArgs = ["-u"];
if (pipeline in pipelines) {
// A CSV file path should have already been set. This
// file path should be used to indicate where to find
// the desired file
console.log("LAST CHECK: " + csvFilePath);
if (!csvFilePath) {
csvFilePath = pipelines[pipeline].defaultData;
}
console.log("FINAL CSV FILE: " + csvFilePath);
pipelineArgsCopy.push(csvFilePath);
// If the UI supports reading flat text files, tell the
// pipeline where to find the files
if (flatTextUIs.indexOf(pipeline) >= 0) {
pipelineArgsCopy.push(textRawDataMappings[csvFilePath]);
}
// Set the remaining pipeline args
pythonArgs.push(pipelines[pipeline].file);
pythonArgs.push(port.toString());
if (pipeline != "twitter" && pipeline != "elasticsearch") {
pythonArgs = pythonArgs.concat(pipelineArgsCopy);
}
}
else {
pythonArgs.push(pipelines.cosmos.file);
pythonArgs.push(port.toString());
pythonArgs.push(pipelines.cosmos.defaultData);
pythonArgs.push(crescentRawDataPath);
}
// used in case of CosmosRadar
for (var key in args) {
if (args.hasOwnProperty(key)) {
pythonArgs.push("--" + key);
pythonArgs.push(args[key]);
}
}
// Dynamically determine which distance function should be
// used
if (pythonArgs.indexOf("--dist_func") < 0) {
if (pipeline === "twitter" || pipeline === "elasticsearch" ||
csvFilePath.startsWith(textDataPath)) {
pythonArgs.push("--dist_func", "cosine");
}
else {
pythonArgs.push("--dist_func", "euclidean");
}
}
console.log(pythonArgs);
console.log("");
var pipelineInstance = spawn("python2.7", pythonArgs, {stdout: "inherit"});
pipelineInstance.on("error", function(err) {
console.log("python2.7.exe not found. Trying python.exe");
pipelineInstance = spawn("python", pythonArgs,{stdout: "inherit"});
pipelineInstance.stdout.on("data", function(data) {
console.log("Pipeline: " + data.toString());
});
pipelineInstance.stderr.on("data", function(data) {
console.log("Pipeline error: " + data.toString());
});
});
/* Data received by node app from python process,
* ouptut this data to output stream(on 'data'),
* we want to convert that received data into a string and
* append it to the overall data String
*/
pipelineInstance.stdout.on("data", function(data) {
console.log("Pipeline STDOUT: " + data.toString());
});
pipelineInstance.stderr.on("data", function(data) {
console.log("Pipeline error: " + data.toString());
});
room.pipelineInstance = pipelineInstance;
}
/* Connect to the pipeline */
pipelineAddr = pipelineAddr || "tcp://127.0.0.1:" + port.toString();
room.pipelineSocket = zmq.socket('pair');
room.pipelineSocket.connect(pipelineAddr);
pipelineAddr = null;
portsInProcess.splice(portsInProcess.indexOf(port), 1);
/* Listens for messages from the pipeline */
room.pipelineSocket.on('message', function (msg) {
self.handleMessage(room, msg);
});
self.rooms[roomName] = socket.room = room;
invoke(room.pipelineSocket, "reset");
})();
}
else {
socket.room = self.rooms[roomName];
socket.room.count += 1;
if (pipeline == "sirius" || pipeline == "centaurus") {
socket.emit('update', sendRoom(socket.room, true), true);
socket.emit('update', sendRoom(socket.room, false), false);
}
else {
socket.emit('update', sendRoom(socket.room));
}
}
// Reset the csvFilePath to null for future UIs...
// I don't think this is actually necessary since
// csvFilePath is local to the "connections" message,
// which is called for every individual room
csvFilePath = null;
});
// Skipped the rest of the code; it's irrelevant
});
}
Full printouts:
setCSV CALLED
CSV FILE SET: data/text/crescent_topics.csv
Join called for sirius pipeline; room sirius0
CSV FILE PATH: data/text/crescent_topics.csv
CHECK BEFORE ASYNC: data/text/crescent_topics.csv
CHECK AFTER ASYNC1: data/text/crescent_topics.csv
CHECK AFTER ASYNC2: data/text/crescent_topics.csv
58562
CHECK AFTER ASYNC3: null
CHECK AFTER ASYNC4: null
LAST CHECK: null
FINAL CSV FILE: data/highD/Animal_Data_paper.csv
[ '-u',
'pipelines/sirius.py',
'58562',
'data/highD/Animal_Data_paper.csv',
undefined,
'--dist_func',
'euclidean' ]
Since bolding of code doesn't work, just search for the "**" to find the relevant pieces I've marked.
TL;DR There's a lot of communication happening between the client and server to establish an individualized communication that is directly linked to a specific dataset. The user has the ability to upload a custom CSV file to the system, but the code I'm working with right now is just trying to select an existing CSV file on the server, so I omitted the callbacks for the custom CSV file. Once the file has been selected, the client asks to "join" a room/session. The case I'm working with right now assumes that this is a new room/session as opposed to trying to do some shared room/session with another client. (Yes, I know, the code is messy for sharing rooms/sessions, but it works for the most part for now and is not my main concern.) Again, all this code worked just fine before the asynchronous code was added (and using a static port variable), so I don't know what changed so much by adding it.

Since you now included the whole code context, we can see that the issue is that the code after your async IIFE is what is causing the problem.
An async function returns a promise as soon as it hits an await. And, while that await is waiting for its asynchronous operation, the code following the call to the async function runs. In your case, you're essentially doing this:
var csvFilePath = someGoodValue;
(async () => {
port = await getPort();
console.log(csvFilePath); // this will be null
})();
csvFilePath = null; // this runs as soon as the above code hits the await
So, as soon as you hit your first await, the async function returns a promise and the code following it continues to run, hitting the line of code that resets your csvFilePath.
There are probably cleaner ways to restructure your code, but a simple thing you could do is this:
var csvFilePath = someGoodValue;
(async () => {
port = await getPort();
console.log(csvFilePath); // this will be null
})().finally(() => {
csvFilePath = null;
});
Note: .finally() is supported in node v10+. If you're using an older version, you can reset the path in both .then() and .catch().
Or, as your comment says, maybe you can just remove the resetting of the csvFilePath entirely.

I realized after some silly tests I tried that I'm resetting csvFilePath to null outside the asynchronous call, which is what is causing the error... Oops!

Related

I am have a problem reading some data using the serial.read() command

I am periodically reading the temperature and Humidity values from a DHT22 sensor in a green house tunnel.
The sensor is attached to a Arduino Pro Mini. The Pro Mini also has a nF24l01 transceiver attached to it, and the readings are transmitted to another nF24L01/Arduino Pro Mini in my office.
The Arduino is connected to a desktop PC via a USB serial cable.
The intention is to write the received Temperatue and Humidity readings to a file in a CSV format.
I am receiving all the data over the radio link which in-turn is feed to my PC via my USB port. I am running Node with a file called index.js.
Below is the code from the Arduino connected to the PC. It is the receiver side of the radio link.
[code]
/*
See documentation at https://nRF24.github.io/RF24
See License information at root directory of this library
Author: Brendan Doherty (2bndy5)
*/
/**
A simple example of sending data from 1 nRF24L01 transceiver to another.
String message = "";
This example was written to be used on 2 devices acting as "nodes".
Use the Serial Monitor to change each node's behavior.
*/
#include <SPI.h>
#include <printf.h>
#include <nRF24L01.h>
#include <RF24.h>
struct dataStruct {
float HumH;
float TempC;
} myData;
bool newData = false;
RF24 radio(9, 10); // using pin 7 for the CE pin, andradio.read(&data, sizeof(MyData)); pin 8 for the CSN pin
uint8_t address[][6] = {"1Node", "2Node"};
bool radioNumber = 1; // 0 uses address[0] to transmit, 1 uses address[1] to transmit
bool role = false; // true = TX role, false = RX role
void setup() {
Serial.begin(115200);
if (!radio.begin()) {
Serial.println(F("radio hardware is not responding!!"));
while (1) {} // hold in infinite loop
}
radio.setPALevel(RF24_PA_HIGH); // RF24_PA_MAX is default.
radio.setPayloadSize(sizeof(dataStruct)); // float datatype occupies 4 bytes
radio.openWritingPipe(address[radioNumber]); // always uses pipe 0
radio.openReadingPipe(1, address[!radioNumber]); // using pipe 1
radio.startListening(); // put radio in RX mode
// For debugging info
printf_begin(); // needed only once for printing details
radio.printDetails(); // (smaller) function that prints raw register values
radio.printPrettyDetails(); // (larger) function that prints human readable data
} // end of setup
void getData() {
if (radio.available()) {
//Serial.println("Radio is available******");
radio.read(&myData, sizeof(dataStruct));
newData = true;
}
//Serial.println("Radio is NOT available******");
}
void showData() {
if (newData == true) {
String message = "";
message = message + "{\"humidity\": ";
message = message + myData.HumH;
message = message + ", \"temperature\": ";
message = message + myData.TempC;
message = message + "}";
Serial.println(message);
newData = false;
}
}
void loop() {
getData();
showData();
}
[/code]
Below is a screen shot of the serial output of the Arduino Pro Mini connected to my PC shown what is being received from the green house and what is being sent to the PC.
Arduino Serial port screen shot
The index2.js code is listed below
const SerialPort = require('serialport');
//const Readline = new SerialPort.parsers.Readline('\n');
const port = new SerialPort('/dev/ttyUSB0', {
baudRate: 115200
});
const fs = require('fs');
const { endianness } = require('os');
const { exit } = require('process');
const { Console } = require('console');
//const logIntervalMinutes = 1;
let lastMoment = new Date();
function tryParseJson(str) {
try {
JSON.parse(str);
} catch (e) {
console.log("JSON error")
return false;
}
return JSON.parse(str);
}
console.log('Initialising...');
port.on('open', function () {
console.log('Opened port...');
port.on('data', function (data) {
const sensorData = tryParseJson(data);
console.log('Data: ' + data);
const moment = new Date();
fs.appendFile('log.txt', `\n${sensorData.temperature} , ${sensorData.humidity} , ${moment}`, function (err) {
if (err) {
console.log('Your Jason has failed to get a complete string...');
} else {
console.log('Logged data: ', moment);
};
});
});
});
When I run node index2.js and look at the log.txt file I see that sometimes the temp/Hum values are listed as undefined as show in the screen shot below.
log.txt
After a bit of debugging I saw the following in the console.log() as show in the screen shot below.
Console.log() screen shot with program running.
So my problem is that every now and again, the fs.append can't determine the value of sensorData.temperature and sensorData.humidity. The fs.append still appends a record to the log.txt file but the 1st two fields have undefined in them.
fs.appendFile('log.txt', `\n${sensorData.temperature} , ${sensorData.humidity} , ${moment}`, function (err) {
if (err) {
console.log('Your Jason has failed to get a complete string...');
} else {
console.log('Logged data: ', moment);
};
});
It appears that function tryParseJson(str) sometimes only gets some of the data and not the full JSON object. see code below,
function tryParseJson(str) {
try {
JSON.parse(str);
} catch (e) {
console.log("JSON error")
return false;
}
return JSON.parse(str);
I see that catch (e) gets called and my console.log("JSON error") gets printed.
I need some help to work out how to resolve this..
I did some changes to my code to check the contents of the const sensorData = tryParseJson(data) as shown below.
const sensorData = tryParseJson(data);
if (sensorData.temperature == undefined || sensorData.humidity == undefined){
//console.log('Temperature or Humidity is undefined');
} else {
and then used a IF statement to append or not append to the log.txt file.
Everything is now working, however there is still one minor issue.
I noticed something in the log.txt file. If the value of the temp or humidity is say 25.00, then the file will have 25. will no trailing zero's. This will not happen say if the temp is 25.4
if I check the values of sensorData in this line of code const sensorData = tryParseJson(data); the values are correct. They seem to change with fs.appendFile
Any idea why?

Watson Assistant context is not updated

I use watson assistant v1
My problem is that every time I make a call to the code in Nodejs, where I return the context, to have a coordinated conversation, the context is only updated once and I get stuck in a node of the conversation
this is my code
client.on('message', message => {
//general variables
var carpetaIndividual = <../../../>
var cuerpoMensaje = <....>
var emisorMensaje = <....>
//detect if context exists
if(fs.existsSync(carpetaIndividual+'/contexto.json')) {
var watsonContexto = require(carpetaIndividual+'/contexto.json');
var variableContexto = watsonContexto;
} else {
var variableContexto = {}
}
//conection with Watson Assistant
assistant.message(
{
input: { text: cuerpoMensaje },
workspaceId: '<>',
context: variableContexto,
})
.then(response => {
let messageWatson = response.result.output.text[0];
let contextoWatson = response.result.context;
console.log('Chatbot: ' + messageWatson);
//Save and create JSON file for context
fs.writeFile(carpetaIndividual+'/contexto.json', JSON.stringify(contextoWatson), 'utf8', function (err) {
if (err) {
console.error(err);
}
});
//Send messages to my application
client.sendMessage(emisorMensaje, messageWatson)
})
.catch(err => {
console.log(err);
});
}
client.initialize();
the context.json file is updated, but when it is read the code only reads the first update of the context.json file and not the other updates
This will be because you are using require to read the .json file. For all subsequent requires of an already-required file, the data is cached and reused.
You will need to use fs.readfile and JSON.parse
// detect if context exists
if (fs.existsSync(carpetaIndividual+'/contexto.json')) {
var watsonContexto = fs.readFileSync(carpetaIndividual+'/contexto.json');
// Converting to JSON
var variableContexto = JSON.parse(watsonContexto);
} else {
var variableContexto = {}
}
There is another subtle problem with your code, in that you are relying on
your async call to fs.writeFile completing before you read the file. This will be the case most of the time, but as you don't wait for the fs.writeFile to complete there is the chance that you may try to read the file, before it is written.

Create File in Google Cloud Bucket from Firebase Cloud Function

I have a function that monitors a node in a Realtime database and once a new child is written to the node the function simply needs to create a html document in a Google Cloud bucket. The HTML document will have a unique name and will contain some data from the node. It's all fairly straightforward, however I can't actually create and write to the document. I've tried 3 methods so far (outlined in the code below), none of these methods work.
const {Storage} = require('#google-cloud/storage');
const functions = require('firebase-functions');
const admin = require('firebase-admin');
admin.initializeApp();
const fs = require('fs');
const {StringStream} = require('#rauschma/stringio')
const instanceId = 'my-project-12345';
const bucketName = 'my-bucket';
exports.processCertification = functions.database.instance(instanceId).ref('/t/{userId}/{testId}')
.onCreate((snapshot, context) => {
const dataJ = snapshot.toJSON();
var testResult = "Invalid";
if(dataJ.r == 1) {testResult = "Positive";}
else if(dataJ.r == 2) {testResult = "Negative";}
console.log('Processing certificate:', context.params.testId, testResult);
var storage = new Storage({projectId: instanceId});
const fileName = context.params.testId + '.html';
const fileContents = "<html><head></head><body>Result: " + testResult + "</body></html>"
const options = {resumable:false, metadata:{contentType:'text/html'}};
const bucket = storage.bucket(bucketName);
const file = bucket.file(fileName);
console.log('Saving to:' + bucketName + '/' + fileName);
if(false) {
// Test 1. the file.save method
// Errors with:
// (node:2) MetadataLookupWarning: received unexpected error = URL is not defined code = UNKNOWN
file.save(fileContents, options, function(err) {
if (!err) {console.log("Save created object at " + bucketName + "/" + fileName);}
else {console.log("Save Failed " + err);}
});
} else if(true) {
// Test 2. the readStream.pipe method
// No errors, doesn't output error message, doesn't output finish message, no file created
fs.createReadStream(fileContents)
.pipe(file.createWriteStream(options))
.on('error', function(err) {console.log('WriteStream Error');})
.on('finish', function() {console.log('WriteStream Written');});
} else {
// Test 3. the StringStream with readStream.pipe method
// Errors with:
// (node:2) MetadataLookupWarning: received unexpected error = URL is not defined code = UNKNOWN
const writeStream = storage.bucket(bucketName).file(fileName).createWriteStream(options);
writeStream.on('finish', function(){console.log('WriteStream Written');}).on('error', function(err){console.log('WriteStream Error');});
const readStream = new StringStream(fileContents);
readStream.pipe(writeStream);
}
console.log('Function Finished');
return 0;
});
In all cases the "Processing certificate" and "Saving to" outputs appear, I also get the "Function Finished" message every time. The errors (or in one case no response) is written against each of the tests in the code.
My next step will be to create the file locally and then use upload() method, however each of these methods seem like they should work, plus the only error message I have is talking about URL errors so I suspect trying to use upload() method would run into the same problems as well.
I'm using Node.JS v8.17.0 and the following packages
"dependencies": {
"#google-cloud/storage": "^5.0.0",
"#rauschma/stringio": "^1.4.0",
"firebase-admin": "^8.10.0",
"firebase-functions": "^3.6.1"
}
Any advice is most welcome
In each case, you are not working with promises correctly. For database triggers (and all other background triggers), you must return a promise that resolves when all of the asynchronous work is complete in a function. Right now, you're not doing anything at all with promises, while each of the APIs you're calling are all asynchronous. Your function is just returning 0 immediately without waiting for the upload to complete, and Cloud Functions is simply terminating and cleaning up before anything can happen.
I suggest choosing one of the methods that returns a promise with the upload is complete (probably file.save()), then return that promise from the function.

JavaScript issue with import/export modules and Globally defined variables

(note, I'm using Node.js to create a telnet server and handle user input)
I'm having issues using variables/arrays in javascript inside functions in other scripts.
Let me set a simpler example:
var connections = []
is an array in my main.js
Broadcast.js is a function I put in a separate module and attempt to use the broadcast() function in my main.js.
The error I get is stating that connections is undefined. How do get Broadcast.js able to see the connections array in the main.js
~~
For example in my main.js I set an array to handle clients connecting to a server
//point to Telnet library
const Telnet = require('ranvier-telnet');
const logger = require('./logger.js')
var outspeak = []
var connections = []
var clients = []
let server = new Telnet.TelnetServer(rawSocket => {
let telnetSocket = new Telnet.TelnetSocket();
//attaching socket
telnetSocket.attach(rawSocket);
//setting telnet options
telnetSocket.telnetCommand(Telnet.Sequences.WILL, Telnet.Options.OPT_EOR);
//giving clients a name
telnetSocket.name = rawSocket.remoteAddress + ":" + rawSocket.remotePort
//pushing client names to array
clients.push(telnetSocket.name);
//pushing client connections to an array
connections.push(rawSocket);
console.log(`${telnetSocket.name} has connected`)
logger(`${telnetSocket.name} has connected`)
broadcast(telnetSocket.name + " connected.")
telnetSocket.on('data', function (data) {
//broadcast (telnetSocket.name + ">" + data, telnetSocket);
}
function broadcast (message, sender) {
connections.forEach(function (connection) {
//don't want to send it to sender
if (connection === sender) return;
connection.write(`${message} \n`);
});
}
Now inside my main script, I could called that array/push to that array, read from that array, as long as I type out the function inside the main.js file.
And it can easily use the broadcast function.
Now I want to make it more advance and make reduce my lines on my main.js
but once I separate the broadcast function into it's own module.
use strict'
//broadcast function
function broadcast (message, sender) {
connections.forEach(function (connection) {
//don't want to send it to sender
if (connection === sender) return;
connection.write(`${message} \n`);
});
}
module.exports = broadcast
I get a connection undefined error any time I try to invoke that broadcast function. It's like my global variable/array can't be seen by broadcast.js function.
this is how I'm invoking it
// handle input
telnetSocket.on('data', function (data) {
broadcast (telnetSocket.name + ">" + data, telnetSocket);
});
And yes, const broadcast = require('./broadcast.js'); as been added to the file at the top.
Here's the broken code complete:
'use strict'
//point to Telnet library
const Telnet = require('ranvier-telnet');
const logger = require('./logger.js');
const broadcast = require('./broadcast.js');
var connections = []
var clients = []
//had to call message as global variable
//Asan's timestamp functionm
//telnetstuff
console.log("Starting...");
let server = new Telnet.TelnetServer(rawSocket => {
let telnetSocket = new Telnet.TelnetSocket();
//attaching socket
telnetSocket.attach(rawSocket);
//setting telnet options
telnetSocket.telnetCommand(Telnet.Sequences.WILL, Telnet.Options.OPT_EOR);
//giving clients a name
telnetSocket.name = rawSocket.remoteAddress + ":" + rawSocket.remotePort
//pushing client names to array
clients.push(telnetSocket.name);
//pushing client connections to an array
connections.push(rawSocket);
console.log(`${telnetSocket.name} has connected`)
logger(`${telnetSocket.name} has connected`)
broadcast(telnetSocket.name + " connected.")
// handle input
telnetSocket.on('data', function (data) {
broadcast (telnetSocket.name + ">" + data, telnetSocket);
});
//removing client/connection from array
rawSocket.on('end', function () {
clients.splice(clients.indexOf(telnetSocket), 1);
connections.splice(connections.indexOf(rawSocket), 1);
broadcast(telnetSocket.name + " has left.\n");
logger(telnetSocket.name + " has left.");
console.log(telnetSocket.name + " has left.");
});
}).netServer
server.listen(4000);
console.log('ServerRunning...');
logger('>Server started.');
What I'm missing here? Also I apologize in advance this is my first question ever asked and I've gone through as much I could today to even figure out how to ask my question, maybe I'm not using correct lingo/terms? any help is appreciative.
refactor\broadcast.js:5
connections.forEach(function (connection) {
^
ReferenceError: connections is not defined
In nodejs, when you declare a variable not inside any function definitions, it is scoped to the file only. (This is different from browser javascript.) If you want something to be accessible from outside, you need to export it:
module.exports.connections = connections;
Then import it into the other file:
const connections = require(myFile);
This will work as long as you don't try to set the value of the variable in either file, but if you do that they'll end up pointing to separate objects. But mutating it, calling methods on it, etc should work fine.

Copy file from addon to profile folder

I'm trying to copy a sqlite database from the data folder in my extension directory, to the profile folder, in order to use it.
So for now, I'm trying with that:
const {Cc, Ci, Cu} = require("chrome");
const {NetUtils} = Cu.import("resource://gre/modules/NetUtil.jsm");
const data = require('sdk/self').data;
Cu.import("resource://gre/modules/Services.jsm");
Cu.import("resource://gre/modules/FileUtils.jsm");
var file = Cc["#mozilla.org/file/directory_service;1"].
getService(Ci.nsIProperties).
get("TmpD", Ci.nsIFile);
file.append("searchEngines.sqlite");
file.createUnique(Ci.nsIFile.NORMAL_FILE_TYPE, 0666);
// Then, we need an output stream to our output file.
var ostream = Cc["#mozilla.org/network/file-output-stream;1"].createInstance(Ci.nsIFileOutputStream);
ostream.init(file, -1, -1, 0);
// Finally, we need an input stream to take data from.
var iStreamData = NetUtil.ioService.newChannel(data.url("searchEngines.sqlite"), null, null).open();
let istream = Cc["#mozilla.org/io/string-input-stream;1"].createInstance(Ci.nsIStringInputStream);
istream.setData(iStreamData, iStreamData.length);
NetUtil.asyncCopy(istream, ostream, function(aResult) {
console.log(aResult); // return 0
})
console.log(FileUtils.getFile("ProfD", ["searchEngines.sqlite"]).exists()); // return false
let dbConn = Services.storage.openDatabase(file);
The file seems to exist since the console.log(file.exists()) return FALSE and is not populated (the console.log(aResult) return 0).
Where is my mistake, and is there a better way to do that?
Besides that it uses sync I/O (opening the channel with .open instead of .asyncOpen), the NetUtil.asyncCopy operation is still async, meaning the code
NetUtil.asyncCopy(istream, ostream, function(aResult) {
console.log(aResult); // return 0
})
console.log(FileUtils.getFile("ProfD", ["searchEngines.sqlite"]).exists()); // return false
let dbConn = Services.storage.openDatabase(file);
will try to open the file before the copy likely finishes!
However, file.exists() will be likely true, because you already opened the file for writing. It's just that the file is still blank because the data copy isn't done (or even started) yet. (Actually, it is true, because you're checking searchEngines.sqlite in ProfD and not TmpD, but if you correct that the previous statement would apply).
You can only use the file when/after your callback to .asyncCopy is done, e.g.
NetUtil.asyncCopy(istream, ostream, function(aResult) {
console.log(aResult);
console.log(FileUtils.getFile("ProfD", ["searchEngines.sqlite"]).exists()); // return false
let dbConn = Services.storage.openDatabase(file);
// ...
});
PS: You might want to .asyncOpen the channel, then use NetUtil.asyncFetch and pass the resulting stream to .asyncCopy to be truly async for smallish files, since this caches the contents in memory first.
For large files you could create a variant of the NetUtil.asyncFetch implementation that feeds the .outputStream end directly to NetUtils.asyncCopy. That is a bit more complicated, so I won't be writing this up in detail until somebody is truly interested in this and ask the corresponding question.
Edit, so here is how I'd write it:
const data = require('sdk/self').data;
Cu.import("resource://gre/modules/Services.jsm");
Cu.import("resource://gre/modules/NetUtil.jsm");
function copyDataURLToFile(url, file, callback) {
NetUtil.asyncFetch(url, function(istream) {
var ostream = Cc["#mozilla.org/network/file-output-stream;1"].
createInstance(Ci.nsIFileOutputStream);
ostream.init(file, -1, -1, Ci.nsIFileOutputStream.DEFER_OPEN);
NetUtil.asyncCopy(istream, ostream, function(result) {
callback && callback(file, result);
});
});
}
var file = Services.dirsvc.get("TmpD", Ci.nsIFile);
file.append("searchEngines.sqlite");
copyDataURLToFile(data.url("searchEngine.sqlite"), file, function(file, result) {
console.log(result);
console.log(file.exists());
console.log(file.fileSize);
});
Try using OS.File it's much more straight forward.
Cu.import("resource://gre/modules/FileUtils.jsm");
Cu.import("resource://gre/modules/osfile.jsm")
var fromPath = FileUtils.getFile("ProfD", ["searchEngines.sqlite"]).path;
var toPath = FileUtils.getFile("TmpD", ["searchEngines.sqlite"]).path;;
var promise = OS.File.copy(fromPath, toPath);
var dbConn;
promise.then(
function(aStat) {
alert('success will now open connection');
dbConn = Services.storage.openDatabase(toPath);
},
function(aReason) {
console.log('promise rejected', aReason);
alert('copy failed, see console for details');
}
);

Categories