I'm quite annoyed by a problem with google script and javascript.
I have a problem to send datarange values from my google script to be treated by my javascript function.
Here is my code.
code.gs extract:
function getSheetData(ss,sh){
// Create sheet object
var ass = SpreadsheetApp.openById(ss);
SpreadsheetApp.setActiveSpreadsheet(ass);
var ash = SpreadsheetApp.openById(ss).getSheetByName(sh);
ash = SpreadsheetApp.getActive();
var result = ash.getDataRange().getValues();
Logger.log("getSheetData(ss,sh) result : "+result);
// return result after JSON strinfigy
return JSON.stringify(result);
}
JavaScript.html extract :
function readTb(fn) {
var result = google.script.run.getSheetData(bdData, tbData);
console.dir('readTb result : ' + result);
fn(result);
}
function buildSelect(range) {
console.log('Range = ');
console.dir(range);
if (range.length > 0) {
buildOption('', 'Choose an order');
for (i = 0; i < range.length; i++) {
var row = range[i];
buildOption(row[0], row[0] + " ~ " + row[2] + " ~ " + row[5]
+ " ~ " + row[7]);
}
} else {
buildOption('', 'No order to display');
}
}
function buildOption(data) {
console.log(data);
}
/**
/* Retrieve orders and format to fill select numCommande input
/*
*/
function listCommandesPesee() {
console.log('Call for orders seeking');
// Read DB and retrieve data
// feed function variables with CONST
bdData = BDDATA_OLD;
tbData = TBCOMMANDES;
// Call readTb to retrieve data, then buildSelect to format result
var promise1 = new Promise(readTb);
promise1.then(buildSelect);
console.dir(promise1);
}
By now, Logger.log in GS IDE shows right data:
[16-07-27 08:01:22:040 PDT] résultat de getSheetData(ss,sh) : Ligne,ID,Produit,Date,Fournisseur,Numéro Camion,Silo,Cellule,Ilot,Poids coop,Poids net livré,N° analyse qualité,Cellule destination ,Transport
...
on page load, listCommandesPesee is called, which call readTb().
But even with json stringify, I still have an undefined value in var result (readTb function) and so range in buildSelect doesn't have any length property.
JSON.Stringify was working when I was testing and before I add promise and chain (I do think)
Because I'm stuck on this. I'll be grateful for any help.
Thanks
The return from the server, can not be received by the same function that is using google.script.run.
Currently:
function readTb(fn) {
var result = google.script.run.getSheetData(bdData, tbData);
console.dir('Résultat de lecture des données : ' + result);
fn(result);
}
Should Be:
function readTb() {
var result = google.script.run
.withSuccessHandler(mySuccessFnc)
.getSheetData(bdData, tbData);
}
function mySuccessFnc(resultReturned) {
console.dir('Résultat de lecture des données : ' + resultReturned);
};
Related
I need to get a value from excel file when status column value is “Y” and I wanted to return the value from Name Column to the calling function and excel sheet contains the data is as follows
Name Number status
YYYY 1234 N
XXXXX 3456 Y
Function I have written like this
var Excel = require(‘exceljs’);
var workbook = new Excel.Workbook();
var selectStatus = ’’;
module.exports = function() {
return actor({
trimSelectName: function() {
workbook.xlsx.readFile("E:/testData.xlsx")
.then(function(sheetName) {
// use workbook
i = 1;
try {
var workSheet = workbook.getWorksheet("trim");
workSheet.eachRow({
includeEmpty: false
}, function(row, rowNumber) {
if (i == 1) {
i = 0;
} else {
currRow = workSheet.getRow(rowNumber);
console.log("Name :" + currRow.getCell(1).value + ", Number :" + currRow.getCell(2).value +
"Select Status :" + currRow.getCell(3).value);
selectStatus = currRow.getCell(3).value;
if (selectStatus == "Y") {
return selectStatus;
}
}
});
} catch (Error) {
console.log(Error);
}
});
},
});
};
But I am trying to the print value from the calling function, I am always getting it as undefined
Calling function:
const selected = trimDataSelection.trimSelectName();
Could you please let me know where could be the issue?
As I see your function returns actor object, I assume you are using steps_file generated by codeceptjs which is used to extend "I" object in order to add your custom functions. So if you want to invoke your custom function from scenario you should call it like this: const selected = I.trimSelectName()
I am having an issue getting the javascript script for the executeScript nifi process to work and would appreciate help with this. The goal is to pass a flowfile which will contain a json object. I need to parse this json without knowing the content/fields prior and pass this along to write it out to the flowfile that is being passed out to the next process that is MergeContent and counts the number flowfiles.
Tried testing the script and got the following error:
nifi.script.ExecuteScript - ExecuteScript[id=bd6842e9-e3a4-4d88-a59d-
7da1d74d109b] ExecuteScript[id=bd6842e9-e3a4-4d88-a59d-7da1d74d109b]
failed to process due to
org.apache.nifi.processor.exception.ProcessException:
javax.script.ScriptException: <eval>:21:17 Expected : but found value
let value = json[key];
^ in <eval> at line number 21 at column number 17; rolling
back session: org.apache.nifi.processor.exception.ProcessException:
javax.script.ScriptException: <eval>:21:17 Expected : but found value
I am not very familiar with javascript so would appreciate the help.
flowFile = session.get();
if (flowFile != null) {
var StreamCallback =
Java.type("org.apache.nifi.processor.io.StreamCallback");
var IOUtils = Java.type("org.apache.commons.io.IOUtils");
var StandardCharsets = Java.type("java.nio.charset.StandardCharsets");
var transformed_message = {};
var error = false;
var line = "ops_testQueue";
flowFile = session.write(flowFile, new StreamCallback(function
(inputStream, outputStream) {
var content = IOUtils.toString(inputStream,
StandardCharsets.UTF_8); // message or content
var message_content = {};
try {
message_content = JSON.parse(content);
if(Array.isArray(message_content)){
}
Object.keys(message_content).forEach((key) => {
var value = json[key];
result.push(key + '=' + value);
var jkey = "," + "\"" + key + "\"" + '=' + value
});
line = line + jkey +
" value=" + "1"
+ " " + Date.now() * 1000000;
// Write output content
if (transformed_message) {
outputStream.write(line.getBytes(StandardCharsets.UTF_8));
}
} catch (e) {
error = true;
outputStream.write(content.getBytes(StandardCharsets.UTF_8));
}
}));
if (transformed_message.post_state) {
flowFile = session.putAttribute(flowFile, "type",
transformed_message.type);
}
if (error) {
session.transfer(flowFile, REL_FAILURE)
} else {
session.transfer(flowFile, REL_SUCCESS)
}
}
EDIT:
input to executeScript:
{"pID":"1029409411108724738",
"contentType":"text",
"published":"2018-08-14 16:48:23Z",
"crawled":"2018-08-14 12:48:33-04:00",
"ID":"765"}
output from executeScript:
ops_testQueue,"ID"=765 value=1 1534265314969999870
Am I missing something?
I saw a couple of things here:
I don't know if Nashorn (Java's JS Engine) supports the full lambda
syntax, I was able to get it to work by making the lambda a function
(see script below).
You refer to a json variable to get the value from a key, but I think you want message_content.
result is not defined, so you get an error when you push to it.
Here's an edited version of your script that I got to work the way I think you want it (but please correct me if I'm wrong):
flowFile = session.get();
if (flowFile != null) {
var StreamCallback =
Java.type("org.apache.nifi.processor.io.StreamCallback");
var IOUtils = Java.type("org.apache.commons.io.IOUtils");
var StandardCharsets = Java.type("java.nio.charset.StandardCharsets");
var transformed_message = {};
var error = false;
var line = "ops_testQueue";
flowFile = session.write(flowFile, new StreamCallback(function
(inputStream, outputStream) {
var content = IOUtils.toString(inputStream,
StandardCharsets.UTF_8); // message or content
var message_content = {};
try {
message_content = JSON.parse(content);
if(Array.isArray(message_content)){
}
var jkey = "";
Object.keys(message_content).forEach(function(key) {
var value = message_content[key];
//result.push(key + '=' + value);
jkey = "," + "\"" + key + "\"" + '=' + value
});
line = line + jkey +
" value=" + "1"
+ " " + Date.now() * 1000000;
// Write output content
if (transformed_message) {
outputStream.write(line.getBytes(StandardCharsets.UTF_8));
}
} catch (e) {
error = true;
log.error(e);
outputStream.write(content.getBytes(StandardCharsets.UTF_8));
}
}));
if (transformed_message.post_state) {
flowFile = session.putAttribute(flowFile, "type",
transformed_message.type);
}
if (error) {
session.transfer(flowFile, REL_FAILURE)
} else {
session.transfer(flowFile, REL_SUCCESS)
}
}
So we have large data in JSON format.
We want to save it to a class (table) in our Parse app.
I wrote a JS script which can read the file and go through the JSON data.
But when is do the saving it all gets messed up. Its loops in the first one for ever. I understand that there is something called promise bt I don't understand how to use it? Can anyone help. My code is given below.
function processJson(result) {
object = JSON.parse(result);
verbose.textContent = "Read " + object.results.length + " objects";
var count = object.results.length;
var countAc = 0;
logger("To save: " + count);
i = 0;
while (i < count) {
if (object.results[i].areaType == 'ac') {
save(i).then(function (object) {
i = i + 1;
logger("Success: " + object.id);
});
} else {
logger("ac not found");
i = i + 1;
}
}
}
function save(i) {
logger("ac found");
var constituency = new Constituency();
constituency.set("points", object.results[i].points);
constituency.set("areaType", object.results[i].areaType);
constituency.set("name", object.results[i].name);
constituency.set("state", object.results[i].state);
constituency.set("index", object.results[i].index);
constituency.set("pc", object.results[i].pc);
constituency.set("center", object.results[i].center);
constituency.set("oldObjectId", object.results[i].objectId);
return constituency.save();
/*constituency.save().then(function(obj) {
// the object was saved successfully.
i = i + 1;
logger("Success: " + obj.id);
}, function(error) {
// the save failed.
logger(error.message);
i = i + 1;
});*/
}
I would do something like that:
function processJson(result) {
var object = JSON.parse(result);
for (var i = 0; i < object.results.legnth; i++){
var parseObject = createParseObjectFromJSONObject(object.results[i]);
parseObject.save(null).then(function(object){
console.log("object saved: " + object.id);
},function(error){
console.log("error: " + error);
});
}
}
function createParseObjectFromJSONObject(jsonObject){
var constituency = new Constituency();
constituency.set("points", jsonObject.points);
constituency.set("areaType", jsonObject.areaType);
constituency.set("name", jsonObject.name);
constituency.set("state", jsonObject.state);
constituency.set("index", jsonObject.index);
constituency.set("pc", jsonObject.pc);
constituency.set("center", jsonObject.center);
constituency.set("oldObjectId", jsonObject.objectId);
return constituency;
}
You can do it even better..
You can first push all the parse objects into array and then call saveAll to save all the parse objects in one request. This solution is good for < 1000 records .. if you have more than 1000 then you can do paging (first 1000 and saveAll, other 1000 and saveAll ....)
In this version your code will look like this:
function processJson(result) {
var object = JSON.parse(result);
var allObjects = [];
for (var i = 0; i < object.results.legnth; i++){
var parseObject = createParseObjectFromJSONObject(object.results[i]);
allObjects.push(parseObject);
}
// outside the loop we are ready to save all the objects in
// allObjects array in one service call!
if (allObjects.length > 0){
Parse.Object.saveAll(allObjects).then(function(){
console.log("all objects were saved!");
// all object ids are now available under the allObjects array..
},function(error){
console.log("error: " + error);
});
}
}
function createParseObjectFromJSONObject(jsonObject){
var constituency = new Constituency();
constituency.set("points", jsonObject.points);
constituency.set("areaType", jsonObject.areaType);
constituency.set("name", jsonObject.name);
constituency.set("state", jsonObject.state);
constituency.set("index", jsonObject.index);
constituency.set("pc", jsonObject.pc);
constituency.set("center", jsonObject.center);
constituency.set("oldObjectId", jsonObject.objectId);
return constituency;
}
Good Luck :)
I am trying to open the serial port 2 on my beagle bone, using the following code:
var b = require('bonescript');
var x = '/dev/ttyO2';
var SerialPort = require("serialport").SerialPort;
var serialPort = new SerialPort('/dev/ttyO2', {
baudrate: 115200,
parser: b.serialParsers.readline("\n")
});
The complete code:
var b = require('bonescript');
var x = '/dev/ttyO2';
var SerialPort = require("serialport").SerialPort;
var serialPort = new SerialPort('/dev/ttyO2', {
baudrate: 115200,
parser: b.serialParsers.readline("\n")
});
b.pinMode("P9_17", b.OUTPUT);
var countTry =2;
var i = 0; // to loop over the array
var waiting_interval = 3000; // waiting for every slave to reply
var deli;
var slaves = ["S1", "S2" , "S3", "S4", "S5", "S6"];
var counter=[0 , 0 , 0 , 0 ,0 ,0];
var slave_exists = false;
serialPort.on('open',function onSerialOpen(){
console.log("opened");
serialPort.on('data', function listenToSlaves(data){
i--;
if(data.search("END" + slaves[i]) ==0){
console.log("ENDED");
slave_exists = true;
counter[i]=0;
}
else{
// if(data!="END" + slaves[i]){
if(data.search("END" + slaves[i])!==0){
deli = data.indexOf(":");
var parameter = data.substring(0, deli);
var value = data.substring(deli +1);
console.log("parameter is: " + parameter + " - Value is: " + value);
}
}
if(slave_exists){
counter[i] =0;
}
i++;
});
writeToSlaves();
});
function writeToSlaves(){
//If the previous slave (the slave before the one I am sending to
//in the next step doesnt exist, add the counter or consideer
//it not existing)
if(!slave_exists){
counter[i-1]+=1;
if(counter[i-1]>=countTry){
console.log("--------counter[" + i + "]: " + counter[i]);
// in case that the slave returned no data after trying
//to send him several times
console.log(slaves[i-1] + " doesn't exist");
}
}
//sending to the following slave
b.digitalWrite("P9_17", b.HIGH);
serialPort.write(slaves[i], function(){ slave_exists = false;});
b.digitalWrite("P9_17", b.LOW);
console.log("I wrote to slave: " + i);
if(i<slaves.length - 1) i++;
else i=0;
setTimeout(writeToSlaves, waiting_interval);
}
but I am always facing this error:events.js:72
throw er; // Unhandled 'error' event
^
Error: Cannot open /dev/ttyO2
I am running another file first (the code down), the I try to rerun the previous code, and it runs perfect. I need to do that whenever I want to run the first code!
The code that runs from the first time is here:
( I tried the following code alone, it writes to the serial port but doesnt recieve, no event at recieption):
var b = require('bonescript');
var rxport = '/dev/ttyO2';
var txport = '/dev/ttyO2';
var options = { baudrate: 115200, parity: 'even', parser: b.serialParsers.readline('\n') };
var teststring = "This is the string I'm sending out as a test";
b.serialOpen(rxport, options, onRxSerial);
function onRxSerial(x) {
console.log('rx.eventrx= ' + x.event);
if(x.err) throw('***FAIL*** ' + JSON.stringify(x));
if(x.event == 'open') {
//readReapeatedly();
b.serialOpen(txport, options, onTxSerial);
}
if(x.event == 'data') {
console.log("I am receiving on rxport");
console.log('rx (' + x.data.length +
') = ' + x.data.toString('ascii'));
}
}
function onTxSerial(x) {
console.log('tx.event = ' + x.event);
if(x.err) throw('***FAIL*** ' + JSON.stringify(x));
if(x.event == 'open') {
writeRepeatedly();
}
if(x.event == 'data') {
// console.log('tx (' + x.data.length +
// ') = ' + x.data.toString('ascii'));
console.log(x.data);
}
}
function printJSON(x) {
console.log(JSON.stringify(x));
}
function writeRepeatedly() {
console.log("write to serial");
b.serialWrite(txport, teststring, onSerialWrite);
console.log("I have sent data");
}
function onSerialWrite(x) {
console.log("Iam in the onSerialWrite function");
if(x.err) console.log('onSerialWrite err = ' + x.err);
if(x.event == 'callback') {setTimeout(writeRepeatedly, 5000);
console.log("HERE");
}
}
The problem was solved.
In /boot/uboot/uEnv.txt, Update the line:"#cape_enable=capemgr.enable_partno= " to be:
"cape_enable=capemgr.enable_partno=BB-UART1,BB-UART2,BB-UART4, BB-UART5 "
or add the last line to the mentioned file. In some cases, you need to try this line instead of the mentioned:
"optargs=capemgr.enable_partno=BB-UART1,BB-UART2,BB-UART4, BB-UART5" (this is my case - but it disabled the HDMI interface of my BBB).
You can specify the UART you want to enable.
A helpful webpage is here.
I write some simple app for windows 8 Metro UI with javascript. Because natural method from microsoft to use Sqlite with Javascript in Metro UI. I use 'doo' wrapper:
dooWrapper SQLite (github)
I create a method :
function addSomething(name) {
var dbPath = Windows.Storage.ApplicationData.current.localFolder.path + '\\a_db.sqlite';
SQLite3JS.openAsync(dbPath).then(function (db) {
return db.runAsync("INSERT INTO STH (nazwa) VALUES (:name)", { name: name }).
done(function () {
console.log('Add sth : ' + name);
db.close();
}, function (error) {
if (db) {
db.close();
}
console.log('ERROR Adding sth' + error.message);
})
});
}
I get error 'database is locked' I read about this error in documentation. But I have one question is other solution to add more rows without create 'insert' function with collections argument something like that : insert (array) ? I just want to use that function n-times without this error. That's possible?
Yes,it possible...i also got this error before....For that you just need to establish the database connection once...i have used this in my app and its working fine.
If there is no need of closing your db then then open database once like..
Add this code to default.js file
var myDatabase; //Global Variable
var dbPath = Windows.Storage.ApplicationData.current.localFolder.path + '\\db.sqlite';
//Create Table
SQLite3JS.openAsync(dbPath).then(function(db) {
myDatabase=db;
return db.runAsync('CREATE TABLE Item (name TEXT, price REAL, id INT PRIMARY KEY)');
});
Then you just need to use below code
// For Insert
return myDatabase.runAsync('INSERT INTO Item (name, price, id) VALUES ("'+ array[i].name+'", "48484", 1);
For array
var dbPromises = [];
var testArray = [];
//only for test purpose
//You can pass your array here directly
for (var a = 0; a < 300; a++) {
var obj = {
name: "Mango"+a,
price: 100+a,
id: a
};
testArray.push(obj);
}
for (var i = 0; i < testArray.length; i++) {
var query = 'INSERT OR REPLACE INTO Item (name, price, id) VALUES ("' + testArray[i].name + '",' + testArray[i].price + ',' + testArray[i].id + ')';
dbPromises.push(globalDatabase.allAsync(query));
}
WinJS.Promise.join(dbPromises).then(function () {
debugger;
}, function(err) {
debugger;
});
Above code is used only for less array size...bcz its taking too much time for insertion...
For fasst execution you should replace just below code
for (var i = 0; i < testArray.length; i++) {
var val = '("' + testArray[i].name + '",' + testArray[i].price + ',' + testArray[i].id + '),';
query = query + val;
if ((i + 1) % 300 == 0 || (i + 1) == testArray.length) {
query = query.replace(/,$/, "");
dbPromises.push(globalDatabase.allAsync(query));
query = 'INSERT OR REPLACE INTO Item (name, price, id) VALUES ';
}
}