I'm trying to build my own little neural network library.
Everytime I run my code, it builds a new network with random weights.
Then I train it like 20,000 times:
const trainingData = [
{
data: [0,1],
target: [1]
},
{
data: [1,0],
target: [1]
},
{
data: [1,1],
target: [0]
},
{
data: [0,0],
target: [0]
}
]
const nn = new FNN(2,6,12,6,1)
for(let i = 0; i < 20000; i++) {
const index = Math.floor(Math.random() * 4)
nn.train(trainingData[index].data, trainingData[index].target)
}
console.log(nn.query([0,1]))
console.log(nn.query([1,0]))
console.log(nn.query([0,0]))
console.log(nn.query([1,1]))
As you can see I'm trying to solve the XOR problem with 4/5 layer (I know it's a bit overkill).
But when I run my code a couple of times the outputs are sometimes wrong:
$ deno run sketch.ts
[ 0.9808040222512294 ]
[ 0.9808219014520584 ]
[ 0.009098634709446591 ]
[ 0.009259505045600065 ]
$ deno run sketch.ts
[ 0.984698425823075 ]
[ 0.9844728486048201 ]
[ 0.010107497773167363 ]
[ 0.010367109588917735 ]
$ deno run sketch.ts
[ 0.9856540170897103 ]
[ 0.5163204342937323 ] <-- this should be 1
[ 0.02873017555538064 ]
[ 0.516320908619891 ] <-- this should be 0
What could be the problem here?
Is it because of the random weights?
It's interesting, that the wrong outputs are always really close to each other.
I'm using the sigmoid function and a learning rate of 0.2.
The random weights are between -1 and 1 (Math.random() * 2 - 1)
Related
I'm trying to migrate a Python code to Javascript and I'm facing same issues on my calculations.
I also have no documentation to base on except this code:
var commonDenominator = ((myClassMap map (sub) -> sub reduce ((item, denominator = 0) -> item * perClassMultiplier[$$])) reduce ($$+$)) as Number
From what I understand, it's trying to read the myClassMap and trying to find the common denominator of everything.
I'm trying to convert but my code is doing a greatCommonDenominator and my calculations are wrong in the order of 100, I believe. Maybe I'm wrong...
Entry:
myClassMap -> [ [ 1, 150 ], [ 1, 0 ], [ 1.5, 8 ], [ 1.5, 0 ] ]
perClassMultiplier -> [ 1, 1, 1.5, 1.5 ]
Code
myClassMap.map(entry => {
const commonDenominator = greaterCommonDenominator(entry[0], entry[1]);
if(data.commonDenominator == null | commonDenominator > data.commonDenominator) {
data.commonDenominator = commonDenominator;
}
});
const greaterCommonDenominator = function(a, b) {
if (!b) {
return a;
}
return greaterCommonDenominator(b, a % b);
}
Any help? I want to make sure the code does the same calculation
I am trying to calculate and create an Array with Average Prices for different stocks.
For every stock, I have the Data in this format:
{
prices: [
[
1634304009628,
0.7118076876774715
],
[
1634307586874,
0.7063647246346818
],
[
1634311139365,
0.7049706990925925
],
[
1634313858611,
0.7085543691926037
],
[
1634318343009,
0.7057442983161784
]
]
}
For every stock API call I get the data like how I posted above, it has 2 values timestamp and the second one is the price. Now let's say I want the average trend for 5 stocks I will get the data in 5 different arrays and I just want to somehow make an average out of those 5 arrays in one to find the trend.
For the final result, I want the Array to be in the same format just with the calculated average altogether (the goal is to identify the trend direction).
What would be the best way to do that? I am Using React
first create and array of prices only and then using reduce you can just do this:
let myObj = {
prices: [
[
1634304009628,
0.7118076876774715
],
[
1634307586874,
0.7063647246346818
],
[
1634311139365,
0.7049706990925925
],
[
1634313858611,
0.7085543691926037
],
[
1634318343009,
0.7057442983161784
]
]
};
const average = arr => arr.reduce( ( p, c ) => p + c, 0 ) / arr.length;
const pricesArr = myObj.prices.map(function(value,index) { return value[1]; })
const result = average( pricesArr );
console.log(result);
I'm trying to get an array filled with the info back from some requests made to different REST APIs.
I thought about using Promise.all to do that but for some reason, it yields an Array with a bunch of undefined inside.
[ undefined, undefined, undefined, undefined ]
Here's my code:
var _ = require("lodash");//Used exclusively to check if the result from the request is an object
var ccxt = require("ccxt");//External library used to make the requests
let pairs = ["ETH/EUR", "BTC/EUR", "LTC/EUR", "BCH/EUR"]; //Array on which the Promise.all is based
function test(p) {
for (var i = 0; i < ccxt.exchanges.length; i++) { //Looping through all the rest APIs
let exchange = new ccxt[ccxt.exchanges[i]](); //Defining each API to make the requests
if (exchange.hasFetchOrderBook) {
exchange //Beginning of the request
.fetchOrderBook(p)
.then(order => {
if (_.isObject(order) && order.bids[0][1]) {
let now = Math.floor(new Date());
order.mkt = exchange.name;
order.pair = p;
order.ping = now - order.timestamp;
return order; //Return the result of the request
}
})
.catch(e => {});
}
}
}
Promise.all(pairs.map(test)) //Making the requests based on the Pairs Array
.then(res => {
console.log(res); //Logging the results ==> [undefined, undefined, undefined, undefined] for some reason...
})
.catch(e => {
console.log(e);
});
I know that the requests are correctly being made since if I console.log the order within the loop, I get the correct results -- Example of the result when logging:
{ bids:
[ [ 12009.52, 0.0468 ],
[ 12008.5, 0.0227 ],
[ 12007.48, 30.9321 ],
[ 12006.46, 0.0537 ],
[ 12005.45, 0.0157 ],
[ 12004.43, 7.1659 ],
[ 12003.41, 0.0164 ],
[ 12002.39, 23.4159 ],
[ 12001.38, 0.0284 ],
[ 12000.36, 0.0132 ],
[ 11999.34, 0.0194 ],
[ 11998.33, 0.0034 ],
[ 11997.31, 7.526 ],
[ 2445.72, 34.075 ],
[ 2445.17, 25.4842 ],
[ 2444.96, 0.1118 ],
[ 2444.75, 23.288 ],
[ 2444, 0.0247 ],
[ 2443.8, 0.192 ],
[ 765.51, 0.0828 ] ],
asks:
[ [ 12048.74, 2.523 ],
[ 12049.77, 0.0159 ],
[ 12050.79, 0.029 ],
[ 12051.82, 0.0061 ],
[ 12052.84, 0.0181 ],
[ 12053.87, 0.0164 ],
[ 12054.89, 0.0355 ],
[ 12055.92, 0.0042 ],
[ 13419.62, 0.0063 ],
[ 13420.64, 0.0174 ],
[ 13421.78, 0.0143 ],
[ 13422.92, 0.026 ],
[ 13424.06, 0.0055 ],
[ 13425.2, 14.4552 ],
[ 13426.23, 0.0065 ],
[ 13427.25, 0.0057 ],
[ 13428.39, 0.0147 ],
[ 13429.53, 4.0375 ],
[ 13430.56, 23.9541 ],
[ 13431.58, 0.0137 ] ],
timestamp: 1512845715447,
datetime: '2017-12-09T18:55:15.447Z',
mkt: 'LakeBTC',
pair: 'BTC/EUR',
ping: 0 }
So I guess that the problems I'm dealing with has to do with the asynchronous character of the function... but I'm not sure how I can make it synchronous.
Again, just to try to clarify my question: The objective is to get an array with 4 different types of object (one per pair --> array) so that I can operate on each.
Just to make it clearer, here's an illustration of what I'm trying to achieve:
[
[
Object1,
Object2,
Object3,
etc...
],
[
Object1,
Object2,
Object3,
etc...
],
[
Object1,
Object2,
Object3,
etc...
],
[
Object1,
Object2,
Object3,
etc...
]
]
Why is Promise.all returning the array without waiting on the requests'results?
I hope that was clear enough! If not let mw know! :P
Thanks in advance for your help!
Your test function does return a undefined. You need to return a promise for the result:
function test(p) {
return Promise.all(ccxt.exchanges.map(api => { //Looping through all the rest APIs
//^^^^^^^^^^^^^^^^^^
let exchange = new ccxt[api](); //Defining each API to make the requests
if (exchange.hasFetchOrderBook) {
return exchange //Beginning of the request
.fetchOrderBook(p)
.then(order => {
if (_.isObject(order) && order.bids[0][1]) {
let now = Math.floor(new Date());
order.mkt = exchange.name;
order.pair = p;
order.ping = now - order.timestamp;
return order; //Return the result of the request
}
// else undefined
})
.catch(e => {}); // undefined
}
// else undefined
}));
}
Of course your promises still fulfill with undefined when the if conditions do not apply or an error happens.
To show jobs sent by users to a cluster I have the following code (simplified):
var split = require('split');
var Client = require('ssh2').Client;
var conn = new Client();
var globalRes;
var table = [["Head_1","Head_2"]];
module.exports = {
renderTable: function(req, res) {
conn.connect({host: 'xx.xx.xx.xx', port: 22, username: 'xxxxx', password: 'xxxxx'});
globalRes = res;
table = [["Head_1","Head_2"]];
conn.on('ready', function() { conn.shell(doSomething);} );
}
}
function doSomething(err, stream) {
stream.on('close', function() {
conn.end();
globalRes.render('index', { HTMLtable: table });
console.log(table);
} );
stream.pipe(split()).on('data', buildTable);
stream.write('qstat -s z\n');
stream.end('exit\n');
}
function buildTable(line) {
var newLine = [1, 2];
if(line.substring(0,6) == "job-ID") {
table.push(newLine);
}
return;
}
It opens a ssh connection, after receiving the data it renders the page.
Problem: Only the first time it executes right, in our example console.log shows:
[ [ 'Head_1', 'Head_2' ], [ 1, 2 ] ] <<< which is fine.
But from the first time on, every time the page is load the code piles on over the 'table' data structure. In other words, if we reload the page for a second time (or load the same page for first time in another browser) console.log shows:
[ [ 'Head_1', 'Head_2' ], [ 1, 2 ], [ 1, 2 ] ]
[ [ 'Head_1', 'Head_2' ], [ 1, 2 ], [ 1, 2 ] ]
So, not only 'table' is getting an additional row [1,2] (it shouldn't) but the command console.log is executed twice.
If we load the page a third time, now we get:
[ [ 'Head_1', 'Head_2' ], [ 1, 2 ], [ 1, 2 ], [ 1, 2 ] ]
[ [ 'Head_1', 'Head_2' ], [ 1, 2 ], [ 1, 2 ], [ 1, 2 ] ]
[ [ 'Head_1', 'Head_2' ], [ 1, 2 ], [ 1, 2 ], [ 1, 2 ] ]
'table' is growing as the times that console.log gets executed.
Thanks in advance for any help you can provide.
Ok, problem solved. I'm posting just in case other person has a similar problem and this may shed some light.
It was due to my Javascript bad coding practices. It works if you include everything inside the function renderTable, this is ZERO (0) global variables.
All functions, all variables (IMPORTANT: including the 'require()'s) inside:
module.exports = {
renderTable: function(req, res) {
<<<< Put everything here.
}
}
In fact, this way the variable 'globalRes' is unnecessary, just use 'res' as it should be.
Hope this helps. I'm not going to mark my own answer as correct so if anyone wants to get technical and explain the specific reason why it failed I'll check that answer as correct.
Is there any way we can query and get location data using mongodb geospatial query that matches the following criteria?
Getting all locations that are part of intersection between two boxes or in general two polygons.
For example below can we get in query output only those locations that are within the yellow area which actually is the common area for the purple and red geometric objects [ polygons ] ?
My study of mongodb document so far
http://docs.mongodb.org/manual/reference/operator/query/geoWithin/
This provides results that are within one or more polygons [ I am looking for the intersection of these individual polygon results as output ]
Use case
db.places.find( {
loc: { $geoWithin: { $box: [ [ 0, 0 ], [ 100, 100 ] ] } }
} )
Above query provides results within a rectangle geometric area [ I am looking for locations that are common to two such individual queries ]
db.places.find( {
loc: { $geoWithin: { $box: [ [ 0, 0 ], [ 100, 100 ] ] } }
} )
db.places.find( {
loc: { $geoWithin: { $box: [ [ 50, 50 ], [ 90, 120 ] ] } }
} )
So looking at this with a fresh mind the answer is staring me in the face. The key thing that you have already stated is that you want to find the "intersection" of two queries in a single response.
Another way to look at this is you want all of the points bound by the first query to then be "input" for the second query, and so on as required. That is essentially what an intersection does, but the logic is actually literal.
So just use the aggregation framework to chain the matching queries. For a simple example, consider the following documents:
{ "loc" : { "type" : "Point", "coordinates" : [ 4, 4 ] } }
{ "loc" : { "type" : "Point", "coordinates" : [ 8, 8 ] } }
{ "loc" : { "type" : "Point", "coordinates" : [ 12, 12 ] } }
And the chained aggregation pipeline, just two queries:
db.geotest.aggregate([
{ "$match": {
"loc": {
"$geoWithin": {
"$box": [ [0,0], [10,10] ]
}
}
}},
{ "$match": {
"loc": {
"$geoWithin": {
"$box": [ [5,5], [20,20] ]
}
}
}}
])
So if you consider that logically, the first result will find the points that fall within the bounds of the initial box or the first two items. Those results are then acted on by the second query, and since the new box bounds start at [5,5] that excludes the first point. The third point was already excluded, but if the box restrictions were reversed then the result would be the same middle document only.
How this works in quite unique to the $geoWithin query operator as compared to various other geo functions:
$geoWithin does not require a geospatial index. However, a geospatial index will improve query performance. Both 2dsphere and 2d geospatial indexes support $geoWithin.
So the results are both good and bad. Good in that you can do this type of operation without an index in place, but bad because once the aggregation pipeline has altered the collection results after the first query operation the no further index can be used. So any performance benefit of an index is lost on merging the "set" results from anything after the initial Polygon/MultiPolygon as supported.
For this reason I would still recommend that you calculate the intersection bounds "outside" of the query issued to MongoDB. Even though the aggregation framework can do this due to the "chained" nature of the pipeline, and even though resulting intersections will get smaller and smaller, your best performance is a single query with the correct bounds that can use all of the index benefits.
There are various methods for doing that, but for reference here is an implementation using the JSTS library, which is a JavaScript port of the popular JTS library for Java. There may be others or other language ports, but this has simple GeoJSON parsing and built in methods for such things as getting the intersection bounds:
var async = require('async');
util = require('util'),
jsts = require('jsts'),
mongo = require('mongodb'),
MongoClient = mongo.MongoClient;
var parser = new jsts.io.GeoJSONParser();
var polys= [
{
type: 'Polygon',
coordinates: [[
[ 0, 0 ], [ 0, 10 ], [ 10, 10 ], [ 10, 0 ], [ 0, 0 ]
]]
},
{
type: 'Polygon',
coordinates: [[
[ 5, 5 ], [ 5, 20 ], [ 20, 20 ], [ 20, 5 ], [ 5, 5 ]
]]
}
];
var points = [
{ type: 'Point', coordinates: [ 4, 4 ] },
{ type: 'Point', coordinates: [ 8, 8 ] },
{ type: 'Point', coordinates: [ 12, 12 ] }
];
MongoClient.connect('mongodb://localhost/test',function(err,db) {
db.collection('geotest',function(err,geo) {
if (err) throw err;
async.series(
[
// Insert some data
function(callback) {
var bulk = geo.initializeOrderedBulkOp();
bulk.find({}).remove();
async.each(points,function(point,callback) {
bulk.insert({ "loc": point });
callback();
},function(err) {
bulk.execute(callback);
});
},
// Run each version of the query
function(callback) {
async.parallel(
[
// Aggregation
function(callback) {
var pipeline = [];
polys.forEach(function(poly) {
pipeline.push({
"$match": {
"loc": {
"$geoWithin": {
"$geometry": poly
}
}
}
});
});
geo.aggregate(pipeline,callback);
},
// Using external set resolution
function(callback) {
var geos = polys.map(function(poly) {
return parser.read( poly );
});
var bounds = geos[0];
for ( var x=1; x<geos.length; x++ ) {
bounds = bounds.intersection( geos[x] );
}
var coords = parser.write( bounds );
geo.find({
"loc": {
"$geoWithin": {
"$geometry": coords
}
}
}).toArray(callback);
}
],
callback
);
}
],
function(err,results) {
if (err) throw err;
console.log(
util.inspect( results.slice(-1), false, 12, true ) );
db.close();
}
);
});
});
Using the full GeoJSON "Polygon" representations there as this translates to what JTS can understand and work with. Chances are any input you might receive for a real application would be in this format as well rather than applying conveniences such as $box.
So it can be done with the aggregation framework, or even parallel queries merging the "set" of results. But while the aggregation framework may do it better than merging sets of results externally, the best results will always come from computing the bounds first.
In case anyone else looks at this, as of mongo version 2.4, you can use $geoIntersects to find the intersection of GeoJSON objects, which supports intersections of two polygons, among other types.
{
<location field>: {
$geoIntersects: {
$geometry: {
type: "<GeoJSON object type>" ,
coordinates: [ <coordinates> ]
}
}
}
}
There is a nice write up on this blog.