findSuccess method itemsList is always undefined - javascript

I am new to Tizen working on a small application I am not able to figure out what is the problem.
When I am using these lines before it was working fine but now
var audioOnly = new tizen.AttributeFilter('type', 'EXACTLY', 'AUDIO');
tizen.content.find(findSuccess, findError, null, audioOnly);
Here is code for findSuccess which add lines in log
findSuccess(itemsList){
console.log('total items:'+itemsList);
console.log(itemsLis.name+'etc..');
}
In findSuccess method itemsList is always undefined, no object fetched even when there are files in the device. All settings are proper permissions for read and write is set in config.xml file.
This is Tizen webapi code

I tried with below code.
function findSuccess(items) {
for ( var i in items) {
console.log('Item title: ' + items[i].title);
console.log('Item URI: ' + items[i].contentURI);
console.log('Item type: ' + items[i].type);
}
}
function onError(error) {
console.log('Error: ' + error);
}
// Function to get list of all certain media files
function getSelectedMediaList() {
var mediasource = null;
mediasource = tizen.content;
var type = 'AUDIO';
var filter = new tizen.AttributeFilter("type", "EXACTLY", type);
try {
mediasource.find(findSuccess, onError, null, filter);
} catch (exc) {
console.log("findItems exception:" + exc.message);
}
}
getSelectedMediaList();
Don't forget to add privileges in config.xml
<tizen:privilege name="http://tizen.org/privilege/content.write"/>
<tizen:privilege name="http://tizen.org/privilege/content.read"/>

Related

'RequestDeviceOptions': The provided value cannot be converted to a sequence

I am using a modified version of the sample code for Bluetooth device-information using the Web Bluetooth API.
The Bluetooth device was tested independently using NRF Connect app on my iPhone and everything works as intended. I used the UARTService ID as indicated on the NRF Connect app and used it to set OptionalServices. See the following code snippet.
let UARTService = "6e400001-b5a3-f393-e0a9-e50e24dcca9e"
let UARTCharRX = "6e400002-b5a3-f393-e0a9-e50e24dcca9e"
let UARTCharTX = "6e400003-b5a3-f393-e0a9-e50e24dcca9e"
let options = {};
//options.services = UARTService;
if (document.querySelector('#allDevices').checked) {
options.acceptAllDevices = true;
options.optionalServices = UARTService;
} else {
options.filters = filters;
}
console.log('Requesting Bluetooth Device.........');
console.log('with ' + JSON.stringify(options));
navigator.bluetooth.requestDevice(options)
.then(device => {
console.log('> Name: ' + device.name);
console.log('> Id: ' + device.id);
console.log('> Connected: ' + device.gatt.connected);
return device;
}).then(device => {
return device.gatt.connect();
}).then(function (server) {
console.log("Connected ? " + server.connected);
return server.getPrimaryService(UARTService);
}).then(function (result) {
console.log("DEvice information ", result);
}).catch(error => {
console.log('Argh! ' + error);
});
I get the following error:
Browser is WebBluetoothEnabled
device-info.js:37 Requesting Bluetooth Device.........
device-info.js:38 with {"acceptAllDevices":true,"optionalServices":"6e400001-b5a3-f393-e0a9-e50e24dcca9e"}
device-info.js:57 Argh! TypeError: Failed to execute 'requestDevice' on 'Bluetooth': Failed to read the 'optionalServices' property from 'RequestDeviceOptions': The provided value cannot be converted to a sequence.
Searching for 'RequestDeviceOptions': The provided value cannot be converted to a sequence. on stackoverflow and github issues was without answers.
I suspected a wrong Service UUID but on checking twice revealed it was correct.
optionalServices needs to be a list:
options.optionalServices = [ UARTService ];

nodeJS + MonogoDB - TypeError: Cannot read property 'collection' of null

i started to work with nodeJS and mongoDB and just n the start, after i connect to the db, i get this error.
i read every article on the internet and still dont work...
i open aone connection over mongoDB and try to insert data, the code:
var insertUserMovieRating = function(successCallback, errCallback, movieName, rating) {
var url = config.db.databaseType + '://' + config.user + ':' + config.password + '#' + config.db.server + ':' + config.db.port + '/' + config.db.databaseName;
MongoClient.connect(url, function(err, db) {
if (err) {
errCallback(err)
}
if (db != null) {
console.log("insert");
var collection = db.collection(config.db.databaseName);
collection.update({
title: movieName
}, {
'$push': {
'rating': rating
}
}, {
upsert: true
}, function(err, res) {
if (err) errCallback(err);
successCallback("ok");
// db.close();
});
}
});
};
how i solve this problem? i seriousthat i sitting on that 5 hours and still dont solve this, i really appreciate every help that i will get here, thanks!
SOLVED: the solution was to really write the value of connection to URL var and not by other variable.
Thanks to all people who helped me!
I think you have an issue with db.collection(config.db.databaseName). Your database is not the same as the collections it holds. It looks like you aren't actually getting a handle on the proper collection.
Maybe try something like
var collection = db.movies; // or whatever the name of your collection is
For issues like these I'd always suggest opening up the MongoDB shell and figuring out the syntax and getting it to work in there. Once you have the solution you can just paste it into your Node.js code.
I think you have missed to passed your collection name and you need to error handling if collection is not defined with updating record in a collection.
var insertUserMovieRating = function(successCallback, errCallback, movieName, rating) {
var url = config.db.databaseType + '://' + config.user + ':' + config.password + '#' + config.db.server + ':' + config.db.port + '/' + config.db.databaseName;
//url = 'mongodb://admin:password#localhost:27017/db'
//make sure your url look like as above
MongoClient.connect(url, function(err, db) {
if (err) {
errCallback(err)
}
if (db) {
console.log("insert");
var collectionName = 'movies'; // or whatever the name of your collection name , its similar like your table name in SQL
var collection = db.collection(collectionName);
if(!collection){
errCallback('Collection is not defined in database')
}
collection.update({
title: movieName
}, {
'$push': {
'rating': rating
}
}, {
upsert: true
}, function(err, res) {
if (err) errCallback(err);
successCallback("ok");
// db.close();
});
}
});
};
Please check documentation post to here and here.
Hope this will help you !!

Error: CALL_AND_RETRY_LAST Allocation failed - Javascript heap out of memory

Im currently learning Node JS and Javascript. Im trying to develop an application to read and download Mangas.
First i want to build up a Database. Here is where i encounter the problem.
When i run my program on my server which has 4GB of RAM (to fill my DB) i get the Fatal Error Javascript heap out of memory.
When i run the same program on my local computer with 8GB of RAM, everything works as its supposed to.
Here is the code where i fill up my DB with Manga Chapters.
function insertChapters(callback){
sql_selectAll("Mangas", function (selectError, selectResult) {
if(!selectError){
selectResult.forEach(function (mangaItem, mangaIndex) {
gin.mangafox.chapters(mangaItem.Title)
.then(chapters =>{
chapters.forEach(function (chapterItem) {
var Chapter = {
Title: chapterItem.name,
NR: chapterItem.chap_number,
URL: chapterItem.src,
MangaID: mangaItem.MangaID,
MangaName: mangaItem.Title,
VolumeNR: chapterItem.volume
};
sql_insertInto("Chapters", Chapter, function (insertError, insertResult) {
if(!insertError){
var insertedChapter =
"------------------------------------------------------------------------\n" +
" Added new Chapter: " + Chapter.NR + " For: " + mangaItem.Title + "\n" +
"------------------------------------------------------------------------\n";
callback(null,insertedChapter ,insertResult);
}
else{
if(insertError.code === "ER_DUP_ENTRY") {
var dupEntry = "------------------------------------------------------------------------\n" +
" Duplicate Entry: Chapter: " + Chapter.NR + " For: " + mangaItem.Title + "\n" +
"------------------------------------------------------------------------\n"
callback(null, dupEntry, null);
}
else{
callback(insertError, null, null);
}
}
})
})
})
.catch(fetchChapterError => {
callback(fetchChapterError, null, null);
})
})
}
else{
callback(selectError, null, null);
}
});
}`
I dont really know how to solve this problem, because im not sure what the problem is:
Is the problem simply that i dont have enough RAM in my server?
Do i have a problem with my code? Am i leaking memory somewhere?
Is it possible that my code needs that much memory?
Thank you so much in advance, i appreciate every help i can get.
EDIT:
function sql_selectAll(tableName, callback){
var sql = 'SELECT * FROM `' + tableName + '`';
connection.query(sql, function (err, selectAllResult) {
callback(err, selectAllResult);
})
}
function sql_insertInto(tableName, insertionObject, callback) {
var sql = 'insert into ' + tableName + ' set ?';
connection.query(sql, insertionObject, function (err, insertResult) {
callback(err, insertResult);
});
}
You are calling the mangafox endpoint for every SQL result simultaneously, rather than one at a time or in chunks. You can try using async/await for this. I'm not familiar with the sql API you are using, but assuming that the methods sql_selectAll and sql_insertInto return promises if they aren't given a callback, you can rewrite your function to something like this:
async function insertChapters(callback) {
try {
const mangas = await sql_selectAll("Mangas");
for (const mangaItem of mangas) {
const chapters = await gin.mangafox.chapters(mangaItem.Title);
for (const chapterItem of chapters) {
const Chapter = {
Title: chapterItem.name,
NR: chapterItem.chap_number,
URL: chapterItem.src,
MangaID: mangaItem.MangaID,
MangaName: mangaItem.Title,
VolumeNR: chapterItem.volume
};
try {
const insertResult = await sql_insertInto("Chapters", Chapter);
const insertedChapter =
"------------------------------------------------------------------------\n" +
" Added new Chapter: " + Chapter.NR + " For: " + mangaItem.Title + "\n" +
"------------------------------------------------------------------------\n";
callback(null, insertedChapter, insertResult);
} catch (error) {
if (error.code === "ER_DUP_ENTRY") {
const dupEntry = "------------------------------------------------------------------------\n" +
" Duplicate Entry: Chapter: " + Chapter.NR + " For: " + mangaItem.Title + "\n" +
"------------------------------------------------------------------------\n";
callback(null, dupEntry, null);
} else {
throw error;
}
}
}
}
} catch (error) {
callback(error, null, null);
}
}
Notice the await keywords - these are allowed in async functions and essentially tell the JS engine to pause execution of the async function and do something else until the awaited promise resolves.
Also notice that you can use regular old fashioned try/catch blocks when handling promises using the await keyword! I didn't do anything special to handle the fetchChapterError because it will be handled by the outermost catch block! Also, for the insertion errors, if it's not a duplicate entry, we can just rethrow the error and let that get caught by the outermost catch block as well.
If your SQL functions do not return promises, then on Node version 8 (latest) you can use util.promisify:
const util = require('util');
sql_selectAll = util.promisify(sql_selectAll);
If you are not using Node 8, then you can use another promisify implementation (for example, check out bluebird), or you can write your own pretty easily (read the MDN article on promises).

Rx.js, using Subject to multicast from Observable

If there are any Rx.js experts out there? I'm trying to multicast an observable by using a subject, as per the instructions on any number of websites, including rx.js docs.
var mainDataSource = Rx.Observable.from(summaries[0].added)
//add a timestamp to each live entry as it passes through
.timestamp()
.map(function(scriptElement) {
var array = [scriptElement, scriptElement.timestamp]; return array;
})
//check contents of the array
.do(function(array) { console.log(array); });
var multicaster = new Rx.Subject();
var subSource = mainDataSource.subscribe(multicaster);
//attach the inline observer to the multicaster subject
multicaster.subscribe(
function (x) { console.log('Value published to inlineScriptObserver: ' + x); },
function (e) { console.log('onError: ' + e.message); },
function () { console.log('inlineScriptObserver onCompleted'); }
);
//attach the external observer to the multicaster subject
multicaster.subscribe(
function (x) { console.log('Value published to externalScriptObserver: ' + x); },
function (e) { console.log('onError: ' + e.message); },
function () { console.log('externalScriptObserver onCompleted'); }
);
And the output I'm getting is as follows:
[Object, 1493425651491]
inlineScriptObserver onCompleted
externalScriptObserver onCompleted
So the Subject and the Observable are clearly connected as the onCompleted event is being transmitted from one to the other. However I am getting no data travelling alongside. The data in the correct format is there at the end of the Observable but it is not printing in the console from the Subject's Observer.
What am I missing? It's eluding me.
OK, it may be bad form answering your own question but in case anyone else comes along with the same problem...
The docs I read must have been outdated, relating to rx.js 4 not 5, or something. As of today, and according to this page,
https://github.com/ReactiveX/rxjs/blob/master/doc/subject.md
The correct syntax for the above example is as follows:
var multicaster = new Rx.Subject();
var mainDataSource = Rx.Observable.from(summaries[0].added)
//add a timestamp to each live entry as it passes through
.timestamp()
//log the timestamp for testing purposes
.do(function(scriptElement) { console.log("mainDataSource Timestamp: " + scriptElement.timestamp); })
//include the timestamp in array and deliver that array to subscribers
.map(function(scriptElement) { var array = [scriptElement, scriptElement.timestamp]; return array; })
//check contents of the array
do(function(array) { console.log(array); });
var multicastedDataSource = mainDataSource.multicast(multicaster);
//attach the inline observer to the multicaster subject
multicastedDataSource.subscribe(val => console.log(val), null, () => console.log('inlineScriptObserver complete'));
//attach the external observer to the multicaster subject
multicastedDataSource.subscribe(val => console.log(val), null, () => console.log('externalScriptObserver complete'));
multicastedDataSource.connect();
The key differences being the use of multicast() rather than subscribe on the observable and then the requirement to connect() to the subject piping the multicasted observable as well as having the observers subscribing.
No wonder my older rx.js book was so cheap on Amazon...
Either subscribe before firing an events or use ReplaySubject. See the working fiddle:
var mainDataSource = Rx.Observable.from([1, 2, 3])
//add a timestamp to each live entry as it passes through
.timestamp()
.map(function(scriptElement) {
var array = [scriptElement, scriptElement.timestamp]; return array;
})
//check contents of the array
.do(function(array) { console.log(array); });
var multicaster = new Rx.ReplaySubject();
var subSource = mainDataSource.subscribe(multicaster);
//attach the inline observer to the multicaster subject
multicaster.subscribe(
function (x) { console.log('Value published to inlineScriptObserver: ' + x); },
function (e) { console.log('onError: ' + e.message); },
function () { console.log('inlineScriptObserver onCompleted'); }
);
//attach the external observer to the multicaster subject
multicaster.subscribe(
function (x) { console.log('Value published to externalScriptObserver: ' + x); },
function (e) { console.log('onError: ' + e.message); },
function () { console.log('externalScriptObserver onCompleted'); }
);
The output is:
[Object, 1493467831996]
[Object, 1493467831999]
[Object, 1493467832000]
Value published to inlineScriptObserver: [object Object],1493467831996
Value published to inlineScriptObserver: [object Object],1493467831999
Value published to inlineScriptObserver: [object Object],1493467832000
inlineScriptObserver onCompleted
Value published to externalScriptObserver: [object Object],1493467831996
Value published to externalScriptObserver: [object Object],1493467831999
Value published to externalScriptObserver: [object Object],1493467832000
externalScriptObserver onCompleted

[stompit STOMP client]Failover not working properly with STOMP producer

I am using stompit STOMP client. github - https://github.com/gdaws/node-stomp.
I am using ConnectFailover API for reconnect management. I have below code:
var stompit = require('stompit')
var reconnectOptions = {
'maxReconnects': 100,
'randomize' : false
};
var connManager = new stompit.ConnectFailover("failover:(stomp://mqbroker.nyc:61613,stomp://failovermqbroker.nyc:61613)", reconnectOptions);
connManager.on('error', function(error) {
var connectArgs = error.connectArgs;
var address = connectArgs.host + ':' + connectArgs.port;
console.error('Could not connect to ' + address + ' : ' + error.message);
});
connManager.on('connecting', function(connector) {
var address = connector.serverProperties.remoteAddress.transportPath;
console.log('Connecting to ' + address);
});
var totalMsgs = 50;
var count = 0;
var delayMs = 10000;
connManager.connect(function(error, client, reconnect) {
if (error) {
console.log("terminal error, given up reconnecting: " + error);
return;
}
client.on('error', function(error) {
// destroy the current client
client.destroy(error);
// calling reconnect is optional and you may not want to reconnect if the
// same error will be repeated.
reconnect();
});
var sendParams = {
'destination' : '/queue/myqueue',
'persistent' : 'true'
}
function sendMsg (){
setTimeout( function () {
console.log ('sending message ' + (count));
client.send(sendParams).end('Hello number ' + (count));
if (count++ < totalMsgs) {
sendMsg(count);
}
else {
client.send(sendParams).end('DISCONNECT');
client.disconnect();
console.log("Done.");
}
}, delayMs);
}
sendMsg();
});
The problem is that When the client gets disconnected from message broker, The producer keeps executing the sendMsg code and this causes loss of 2-3 messages in between. I want the client to stop executing when in disconnected state and resume when it is connected to failover instance.
Am I using the API incorrectly? What will be correct way to achieve this?
Have hacked at it for some time but this API lacks little documentation on how to use the features. Appreciate all the help.
Thanks,
xabhi
There is no problem with API but the with setTimeout code. I should clear the timeout when the client sees a connection failure.

Categories