Feed forward Neural Network not working in JS - javascript

I am working on a project in Javascript where after the user has specified the size of the neural network and the weights, my program will create a feed forward NN that can predict the outcome of an input set based on already defined weights. My program does not train the network through backpropagation or anything else, it just takes in predefined weights to feed it through the network.
I have trained a neural network in Simbrain (Java based NN software) through backprop and i have exported the weights that resulted from training the network in Simbrain to a JS file that will input those weights when creating the NN.
My problem is that even though my neural network performs the way it's supposed to on most NN (even those with more than 3 layers) (see picture 1), but it keeps getting very different outputs from what it should be getting, according to the NN in Simbrain (see picture 2), my question therefore is, what am I doing wrong?
I have already made sure that both Simbrain and my own program use the same sigmoidal function (f(x) = 1 / (1 + 1 / e^x)), so the problem isn't that.
Any help, including help that isn't directly related to my problem but rather to neural networks in general, is greatly appreciated. Thank you for reading this far.
The constructors for the NN
function NeuralNetwork(layers /*2d array*/, weight3 /*3d array*/) {
/*
for the second test case, layers and weight3 are respectively:
[
[0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5],
[0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0],
[0,0,0]
]; , and
[
[
[0.8026458866414493,-4.88941104413936,-4.226087540974577,-4.530573464438122,-1.4556229469839264,-4.31689275950382,-2.598025750964335,-1.4024486094322206,-4.2746136202736595,-1.114811712353249],
[-0.7018557602796472,-1.5899008962198744,-0.16868647158179298,-1.5305564633774085,-1.4933903630534928,-1.0700257428899125,-4.302200807112217,-1.6005131049772037,0.1368813977388942,-1.5986983805280093],
[-0.29876796358044566,-2.4380450712541997,-1.5397608205098134,-2.3059188256916676,-1.5036183940283618,-1.7713981036865771,-1.2577757481948733,-1.7750243327951924,-1.2961124127198986,-1.6383194273199955],
[-0.6170049336232494,-3.3128244088884964,-3.331978879084264,-3.1456639968607307,-1.2964187225461807,-2.9267790861009417,-1.9560748559032006,-1.3973642251104967,-3.4449373490550164,-1.2039858563847703],
[0.28760582772383936,-0.029647036107864125,2.401661305865335,0.15614131284929608,0.9967571587559184,1.3920493637059834,2.053160398236636,1.560406973436222,2.5003616706837324,1.1406280698546443],
[-1.431355486584377,-0.5254956135639378,0.585966810464151,-0.47056989178558917,-0.34719568935262807,3.0873904709552016,2.7466680699367627,-0.3183084147224785,0.4307418811280014,-0.4347019809871141],
[4.730305578112657,2.794618188382498,2.7725054494795303,2.7971993062957767,3.0121313133902494,4.52697776884291,0.1861088251573733,3.3324377979102677,3.4776335904379945,3.220162438476049],
[1.3365670221393215,4.151102261635236,4.448937517824129,3.818527635050038,1.1622076847092544,5.056756438356793,-4.811867833736578,1.4279903810573407,5.067869165923729,1.2084930462786259],
[-7.653884362627585,3.4481239221814506,-1.3517036269721663,2.9744225300717084,-3.4121450698649567,-6.262463291313358,10.0,-5.000134578924327,-1.701089610696345,-4.510549176789293]
], [
[0.738937123642736,1.0027735638897857,-0.4895642945264585,-0.4966409487023605,-3.411429095495459,-0.645660237498346,0.4795890293073677,-1.1530391624279934,-0.5844011923153196,-0.18971906794059312,0.24259889837466253],
[-1.3289019929201453,-1.3846783936643814,-3.1027185772051857,-3.051664033849648,0.8551718079643078,-1.0150243513426276,-1.1322190191703165,-0.8017220694962384,-3.3343695886290257,-1.5355207800814192,-1.1708434001066501],
[-1.2376066287024123,-1.3769667430967405,-2.301168906630486,-2.325621305132306,1.9450338961713791,-0.6571756279826005,-1.1591625309611004,-0.40914317621655133,-2.489673013800612,-1.3075292847341096,-0.9491990659165409],
[-1.2735133823287095,-1.2998934938770366,-2.7684415870018477,-2.795194685738332,0.6119467566598199,-0.9188836490585277,-1.1200651160455346,-0.6609228479081031,-2.9594533679041617,-1.4427187863617232,-1.0109174567197428],
[-1.3424373302583281,-1.5343976853123078,-0.9412451891990541,-0.9181715664124199,0.39356360921646094,-1.0424607517601023,-1.502583319884496,-1.0152736611391637,-0.9513379926677091,-0.8563445028619694,-1.2613129065351194],
[1.479041505318804,1.810007067391299,-2.8023717750684107,-2.7812453328904354,-2.3035371159432065,0.03115788970802594,1.9657984684801135,-0.06598157999814092,-3.127064931997624,-0.823555626227005,1.264759264824068],
[-2.888035414770285,-2.994031589353307,3.6767014622022303,3.6660677492715528,-9.181646436584106,-3.263884016795336,-3.88349373084228,-3.7712237821378527,4.041549967683737,-0.687691881794572,-3.4265218341885384],
[-1.778832083041489,-2.12273644922548,-1.0963638066243724,-1.030260217185849,1.1187575804787673,-1.2409528854652865,-2.1738688894809637,-1.2743917544089247,-1.107812865029798,-1.0629428830636494,-1.7751739289407722],
[-1.004719873122501,-1.0443553807939716,-2.577499515628139,-2.5776692043229663,2.9709867841159,-0.5274719498845833,-0.843890283520379,-0.20671418403893513,-2.7179118826886026,-1.306839176041484,-0.7570735658813299],
[-1.6272666988539848,-1.827183409776364,-0.9438773753269729,-0.9435987081211876,0.7660203923230526,-1.2259095997120846,-2.031459716170598,-1.3231868095404185,-0.9964657871022223,-0.9165692090752512,-1.58444425796673]
], [
[3.725973720696792,-1.6646641113226457,-2.690673963411094],
[3.800277429998268,-2.085738176798833,-2.63291827658551],
[3.9016422516515155,2.0672177107134138,-3.855168383990615],
[3.8880037099581446,2.1381928663964778,-3.8429910059416983],
[-1.162825551537172,2.35558424769462,-2.2926681297161076],
[2.9764021713068542,0.3197129292281053,-2.8308532112377027],
[3.820130175045188,-1.7806950056811943,-3.106860347494969],
[2.967938499849539,0.8691409343754017,-3.130904551603304],
[4.0701841361555715,2.228471656300074,-4.053369245616366],
[3.2231153003534962,0.9718378272580407,-2.658010203549737],
[3.4876764892680967,-1.2622479268130227,-2.870582831864283]
]
];
I copied these directly from the simbrain weights matrices.
*/
this.layers = [];
for(var i = 0; i < layers.length; i++) { // the creation of the layers
this.layers.push(new Layer(layers[i]));
}
for(var f = 0; f < layers.length - 1; f++) { // the creation of the connections between neurons of different layers, uses predefined weights
this.layers[f].connectLayers(layers[f + 1], weight3[f]);
}
/*this.getChances = function(neuron, layer) { // this function isn't important
var total = 0;
for(var g = 0; g < layer.neurons.length; g++) {
total += layer.neurons[g].value;
}
var prob = layer.neurons[neuron].value / total * 100;
var decOdds = 100 / prob;
return [(prob).toFixed(2), decOdds.toFixed(2)];
} */
this.reset = function(values) { // reset the neuron values
for (var i = 0; i < this.layers.length; i++) {
for (var t = 0; t < this.layers[i].neurons.length; t++) {
this.layers[i].neurons[t].value = values[i][t];
}
}
}
this.run = function() { // the main function that will make the NN predict
for (var t = 0; t < this.layers.length; t++) { // for each layer
if(t !== 0) { // if not the first layer (input shouldn't be activated)
this.activate(t);
}
if(t !== this.layers.length - 1) { // if not the last layer (the output cannot preActivate a layer that comes after it)
this.preActivate(t); // this affects the layer that comes after it
}
}
}
this.preActivate = function(t) { // multiply the weights by the value of the neurons and add the result to the next layer (pre activation)
for (var p = 0; p < this.layers[t].neurons.length; p++) { // for the neurons in the current layer
for (var v = 0; v < this.layers[t].neurons[p].weights.length; v++) { // for the weights of the current neuron (the amount of weights is equal to the amount of neurons in the next layer)
this.layers[t + 1].neurons[v].value += this.layers[t].neurons[p].weights[v].value * this.layers[t].neurons[p].value; // increment the neurons in the next layer
}
}
}
this.activate = function(t) { // take the sigmoid for each neuron in the current layer (activation)
for (var hp = 0; hp < this.layers[t].neurons.length; hp++) {
this.layers[t].neurons[hp].value = this.sigmoid(this.layers[t].neurons[hp].value);
}
}
this.sigmoid = function(x) { // the sigmoidal function
return 1 / (1 + 1 / pow(Math.E, x));
}
}
function Layer(neurons /*1d array*/, weight2 /*2d array*/) { // create a new layer
this.neurons = [];
for(var i = 0; i < neurons.length; i++) {
this.neurons.push(new Neuron(neurons[i]));
}
this.connectLayers = function(targetLayer /*1d array*/, weight2 /*2d array*/) { // create the connections
for(var f = 0; f < this.neurons.length; f++) { // for each neuron in its own layer
for(var t = 0; t < targetLayer.length; t++) { // for each neuron in the next layer
this.neurons[f].weights.push(new Weight(weight2[f][t])); // get the weight from the predefined set and make it the value of that weights
}
}
}
}
function Neuron(value /*float*/) {
this.value = value;
this.weights = [];
this.x; // for drawing on a canvas
this.y; // for drawing on a canvas
}
function Weight(value /*float*/) {
this.value = value;
this.x; // for drawing on a canvas
this.y; // for drawing on a canvas
}

Related

Optimizing process determining license usage in time

I am trying to find an efficient way to go through a big amount of data to determine how many units are processed at once.
So the data that I am receiving are just simple pairs:
{timestamp: *, solvetime: *}
What I need, is to see how many things are processed at each second.
To help you visualize what I mean: here is an example of data that I receive:
{{timestamp: 5, solvetime: 3},{timestamp: 7, solvetime: 5},{timestamp: 8, solvetime: 2},{timestamp: 12, solvetime: 10},{timestamp: 14, solvetime: 7}}
The chart below should help you understand how it looks in time:
https://i.stack.imgur.com/LEIhW.png
This is a simple case where the final calculation contains every second, but if the timeframe is much wider I show only 205 different times in this timeframe. E.g. if the time btw the first and the last timestamp is 20500 seconds I would calculate the usage for every second and divide the time into 205 parts - 100 seconds each and show only the second with the highest usage.
What I am doing right now is to iterate through all the pairs of input data and create a map of all the seconds, once I have it I go through this map again to find the highest usage in each time period (of 205 time periods I divide the whole time-frame in) and append it to the map of 205 timestamps.
It's working correctly, but it's very very slow and I feel like there is some better way to do it, a table might be faster but it is still not too efficient is it?
Here is the actual code that does it:
// results contain all the timestamps and solvetimes
// Timeframe of the chart
var start = Math.min.apply(Math, msgDetailsData.results.map((o) => { return o.timestamp; }))
var end = Math.max.apply(Math, msgDetailsData.results.map((o) => { return o.timestamp; }))
// map of all seconds in desired range (keys) the values are counter ofprocesses run in a given second
let mapOfSecondsInRange = new Map();
for (let i = start; i <= end; i++) {
mapOfSecondsInRange.set(i, 0);
}
// we go through every proces and add +1 to the value of each second in which the task was active
for (let element of msgDetailsData.results) {
var singleTaskStart = element.timestamp - Math.ceil(element.solveTime);
if (singleTaskStart < start) {
for (let i = singleTaskStart; i < start; i++) {
mapOfSecondsInRange.set(i, 0);
}
start = singleTaskStart;
}
for (let i = singleTaskStart; i < element.timestamp; i++) {
mapOfSecondsInRange.set(i, mapOfSecondsInRange.get(i) + 1);
}
}
// Preparation for the final map - all the seconds in the range divided into 205 parts.
const numberOfPointsOnChart = 205;
var numberOfSecondsForEachDataPoint = Math.floor((end - start) / numberOfPointsOnChart) + 1;
var leftoverSeconds = ((end - start) % numberOfPointsOnChart) + 1;
var highestUsageInGivenTimeframe = 0;
var timestampOfHighestUsage = 0;
let mapOfXXXDataPoints = new Map();
var currentElement = start;
for (let i = 0; i < numberOfPointsOnChart; i++) {
if (leftoverSeconds === 0) {
numberOfSecondsForEachDataPoint = numberOfSecondsForEachDataPoint - 1;
}
if (currentElement <= end) {
for (let j = 0; j < numberOfSecondsForEachDataPoint; j++) {
if (j === 0) {
highestUsageInGivenTimeframe = mapOfSecondsInRange.get(currentElement);
timestampOfHighestUsage = currentElement;
}
else {
if (mapOfSecondsInRange.get(currentElement) > highestUsageInGivenTimeframe) {
highestUsageInGivenTimeframe = mapOfSecondsInRange.get(currentElement);
timestampOfHighestUsage = currentElement;
}
}
currentElement = currentElement + 1;
}
mapOfXXXDataPoints.set(timestampOfHighestUsage, highestUsageInGivenTimeframe);
leftoverSeconds = leftoverSeconds - 1;
}
}

Efficiently find twin edges in Half-Edge (DCEL) data structure?

I am using a HalfEdge data structure to represent the connectivity between the faces on my mesh.
I am importing an external model, and I am constructing the HalfEdge structure during the import process. However, with meshes with many triangles, the construction process takes up too much time.
Specifically, it appears that the process of linking the half-edges take up the most time.
I would like to get some advice on how to improve my algorithm.
Below is the code I am using to initialize my data structure. The first for-loop creates a Face with the vertices data, while pushing the HalfEdges that compose the Face into a separate array to be used momentarily after.
The second for-loop is reponsible for looking into the array of all HalfEdges, and finding matching pairs (i.e., the two that are twins of one another).
I logged out the time before and after each process, and noticed that the second loop is what slows everything down.
Here are the time stamps
start constructing DCEL 14:55:22
start making faces 14:55:22
end making faces 14:55:22
/* this is where it takes long.. almost 6 seconds on a mesh with 13000 triangles */
start linking halfEdges 14:55:22
end linking halfEdges 14:55:28
end constructing DCEL 14:55:28
And here is the actual code
console.log('start constructing DCEL', new Date().toTimeString());
// initialize Half-Edge data structure (DCEL)
const initialFaceColor = new THREE.Color(1, 1, 1);
const { position } = geometry.attributes;
const faces = [];
const edges = [];
let newFace;
console.log('start making faces', new Date().toTimeString());
for (let faceIndex = 0; faceIndex < (position.count / 3); faceIndex++) {
newFace = new Face().create(
new THREE.Vector3().fromBufferAttribute(position, faceIndex * 3 + 0),
new THREE.Vector3().fromBufferAttribute(position, faceIndex * 3 + 1),
new THREE.Vector3().fromBufferAttribute(position, faceIndex * 3 + 2),
faceIndex);
edges.push(newFace.edge);
edges.push(newFace.edge.next);
edges.push(newFace.edge.prev);
newFace.color = initialFaceColor;
faces.push(newFace);
}
console.log('end making faces', new Date().toTimeString());
console.log('start linking halfEdges', new Date().toTimeString());
/**
* Find and connect twin Half-Edges
*
* if two Half-Edges are twins:
* Edge A TAIL ----> HEAD
* = =
* Edge B HEAD <---- TAIL
*/
let currentEdge;
let nextEdge;
for (let j = 0; j < edges.length; j++) {
currentEdge = edges[j];
// this edge has a twin already; skip to next one
if (currentEdge.twin !== null) continue;
for (let k = j + 1; k < edges.length; k++) {
nextEdge = edges[k];
// this edge has a twin already; skip to next one
if (nextEdge.twin !== null) continue;
if (currentEdge.head().equals(nextEdge.tail())
&& currentEdge.tail().equals(nextEdge.head())) {
currentEdge.setTwin(nextEdge);
}
}
}
console.log('end linking halfEdges', new Date().toTimeString());
console.log('end constructing DCEL', new Date().toTimeString());
How can I optimize the process of searching for twin edges?
I'd try to hash and look up the edges, e.g. like that:
function hash(p1, p2) {
return JSON.stringify(p1)+JSON.stringify(p2);
}
const lookup = {}
for (let j = 0; j < edges.length; j++) {
lookup[hash(edge.head(), edge.tail())] = edge;
}
for (let j = 0; j < edges.length; j++) {
const twin = lookup[hash(edge.tail(), edge.head())];
!edge.twin && twin && !twin.twin && edge.setTwin(twin);
}

How to set a variable once a day in Javascript

Coniser a variable list = ["A", "B",...] as list of strings. I want to use a Javascript programm that picks three strings from this list once a day and writes it into a HTML field.
Currently I use
function getRandom(arr, n) {
var result = new Array(n),
len = arr.length,
taken = new Array(len);
if (n > len)
throw new RangeError("getRandom: more elements taken than available");
while (n--) {
var x = Math.floor(Math.random() * len);
result[n] = arr[x in taken ? taken[x] : x];
taken[x] = --len in taken ? taken[len] : len;
}
return result;
}
smallList = getRandom(list, 3);
var htmlTags = [
"tag1",
"tag2",
"tag3"
];
for (var i = 0; i < htmlTags.length; i++) {
document.getElementById(htmlTags[i]).innerHTML = smallList[i];
}
Now this list gets new entries every time I refresh the website. Is there a way that smallList is only set once a day/hour/min/ in a pedriod of time only using javascript?
So you want to:
Pick three values from your list and show them on your web page
Each day, pick three new values to show for the whole day
Everyone who visits the page should see the same values regardless of client
As others have suggested, it would be a better candidate for a server-side task than client-side.
For example, you might have a server page which checks for the existence of a value stored in cache. The cache would be set to 24 hours. If the cache is not available, then a new cache object is created and given a half-life of 24 hours. Inside the cache, you could also store the values you wish to retrieve.
Then, you could retrieve the cache and output the values. The particular implementation of the cache would depend on your server-side language.
OKAY: Via Stored Values (COOKIE, SESSION, LOCAL STORAGE, MEMORY):
per user you'd have to use a cookie, session, or write to local storage in the browser.
for all users you'd have to use a server variable somewhere like a database, file, or memory.
you'd set the value to expire in a day and regenerate if expired. this is the answer you will get from most people because it is the only way they know how to solve this, single set value polled from all locations.
BETTER: Via Deterministic Pseudo-Random Number Generator Seeded with Date:
or if you are really ambitious and don't want to rely on a value that you set somewhere, you could use or write a:
deterministic pseudo-random number generator that you seed off of the date. Since deterministic pseudo-random generators produce reproducible "randoms" from the same seed, seeding the date gives you a unique seed per day, hence a unique random per day.
function RC4(seed) {
this.s = new Array(256);
this.i = 0;
this.j = 0;
for (var i = 0; i < 256; i++) {
this.s[i] = i;
}
if (seed) {
this.mix(seed);
}
};
RC4.getStringBytes = function(string) {
var output = [];
for (var i = 0; i < string.length; i++) {
var c = string.charCodeAt(i);
var bytes = [];
do {
bytes.push(c & 0xFF);
c = c >> 8;
} while (c > 0);
output = output.concat(bytes.reverse());
}
return output;
};
RC4.prototype._swap = function(i, j) {
var tmp = this.s[i];
this.s[i] = this.s[j];
this.s[j] = tmp;
};
RC4.prototype.mix = function(seed) {
var input = RC4.getStringBytes(seed);
var j = 0;
for (var i = 0; i < this.s.length; i++) {
j += this.s[i] + input[i % input.length];
j %= 256;
this._swap(i, j);
}
};
RC4.prototype.next = function() {
this.i = (this.i + 1) % 256;
this.j = (this.j + this.s[this.i]) % 256;
this._swap(this.i, this.j);
return this.s[(this.s[this.i] + this.s[this.j]) % 256];
};
function RNG(seed) {
if (seed == null) {
seed = '' + Math.random() + Date.now();
} else if (typeof seed === "function") {
// Use it as a uniform number generator
this.uniform = seed;
this.nextByte = function() {
return ~~(this.uniform() * 256);
};
seed = null;
} else if (Object.prototype.toString.call(seed) !== "[object String]") {
seed = JSON.stringify(seed);
}
this._normal = null;
if (seed) {
this._state = new RC4(seed);
} else {
this._state = null;
}
}
RNG.prototype.nextByte = function() {
return this._state.next();
};
RNG.prototype.uniform = function() {
var BYTES = 7; // 56 bits to make a 53-bit double
var output = 0;
for (var i = 0; i < BYTES; i++) {
output *= 256;
output += this.nextByte();
}
return output / (Math.pow(2, BYTES * 8) - 1);
};
RNG.prototype.random = function(n, m) {
if (n == null) {
return this.uniform();
} else if (m == null) {
m = n;
n = 0;
}
return n + Math.floor(this.uniform() * (m - n));
};
RNG.$ = new RNG();
Date.prototype.yyyymmdd = function() {
var mm = this.getMonth() + 1; // getMonth() is zero-based
var dd = this.getDate();
return [this.getFullYear(), !mm[1] && '0', mm, !dd[1] && '0', dd].join(''); // padding
};
// Using the Date like so will give you the same random between 40 and 50 for the same day
var rng = new RNG((new Date).yyyymmdd()); rng.random(40, 50);
// Test with dates
var rng = new RNG('20180301'); rng.random(40, 50);
var rng = new RNG('20180302'); rng.random(40, 50);
var rng = new RNG('20180301'); rng.random(40, 50);
Store the list in localStorage or a Cookie. Also store the timestamp.
Use setTimeout(function(){...}, n) to examine the timestamp and update the values as needed.
If the page refreshes or is loaded anew, then perform the check on what is stored. If nothing exists, create your list and set the timestamp. If data does exist, then compare the timestamp and update the list as needed.
If you need the list to be consistent across users, then everything needs to be stored, examined and calculated on the server-side.
localStorage.savedData = {
timestamp: new Date(),
dataList: ['a','b','c']
}
To get the values from localStorage:
// you don't have to create variables, you can just use localStorage.[property] to get compare any value
let ts = localStorage.timestamp; // Date object
let dl = localStorage.dataList; // Array of values
For more information on localStorage see (or search the web) -> https://www.w3schools.com/html/html5_webstorage.asp

How can I use JS WebAudioAPI for beat detection?

I'm interested in using the JavaScript WebAudioAPI to detect song beats, and then render them in a canvas.
I can handle the canvas part, but I'm not a big audio guy and really don't understand how to make a beat detector in JavaScript.
I've tried following this article but cannot, for the life of me, connect the dots between each function to make a functional program.
I know I should show you some code but honestly I don't have any, all my attempts have failed miserably and the relevant code it's in the previously mentioned article.
Anyways I'd really appreciate some guidance, or even better a demo of how to actually detect song beats with the WebAudioAPI.
Thanks!
The main thing to understand about the referenced article by Joe Sullivan is that even though it gives a lot of source code, it's far from final and complete code. To reach a working solution you will still need both some coding and debugging skills.
This answer draws most of its code from the referenced article, original licensing applies where appropriate.
Below is a naïve sample implementation for using the functions described by the above article, you still need to figure out correct thresholds for a functional solution.
The code consists of preparation code written for the answer:
reading a local file over the FileReader API
decoding the file as audio data using the AudioContext API
and then, as described in the article:
filtering the audio, in this example with a low-pass filter
calculating peaks using a threshold
grouping interval counts and then tempo counts
For the threshold I used an arbitrary value of .98 of the range between maximum and minimum values; when grouping I added some additional checks and arbitrary rounding to avoid possible infinite loops and make it an easy-to-debug sample.
Note that commenting is scarce to keep the sample implementation brief because:
the logic behind processing is explained in the referenced article
the syntax can be referenced in the API docs of the related methods
audio_file.onchange = function() {
var file = this.files[0];
var reader = new FileReader();
var context = new(window.AudioContext || window.webkitAudioContext)();
reader.onload = function() {
context.decodeAudioData(reader.result, function(buffer) {
prepare(buffer);
});
};
reader.readAsArrayBuffer(file);
};
function prepare(buffer) {
var offlineContext = new OfflineAudioContext(1, buffer.length, buffer.sampleRate);
var source = offlineContext.createBufferSource();
source.buffer = buffer;
var filter = offlineContext.createBiquadFilter();
filter.type = "lowpass";
source.connect(filter);
filter.connect(offlineContext.destination);
source.start(0);
offlineContext.startRendering();
offlineContext.oncomplete = function(e) {
process(e);
};
}
function process(e) {
var filteredBuffer = e.renderedBuffer;
//If you want to analyze both channels, use the other channel later
var data = filteredBuffer.getChannelData(0);
var max = arrayMax(data);
var min = arrayMin(data);
var threshold = min + (max - min) * 0.98;
var peaks = getPeaksAtThreshold(data, threshold);
var intervalCounts = countIntervalsBetweenNearbyPeaks(peaks);
var tempoCounts = groupNeighborsByTempo(intervalCounts);
tempoCounts.sort(function(a, b) {
return b.count - a.count;
});
if (tempoCounts.length) {
output.innerHTML = tempoCounts[0].tempo;
}
}
// http://tech.beatport.com/2014/web-audio/beat-detection-using-web-audio/
function getPeaksAtThreshold(data, threshold) {
var peaksArray = [];
var length = data.length;
for (var i = 0; i < length;) {
if (data[i] > threshold) {
peaksArray.push(i);
// Skip forward ~ 1/4s to get past this peak.
i += 10000;
}
i++;
}
return peaksArray;
}
function countIntervalsBetweenNearbyPeaks(peaks) {
var intervalCounts = [];
peaks.forEach(function(peak, index) {
for (var i = 0; i < 10; i++) {
var interval = peaks[index + i] - peak;
var foundInterval = intervalCounts.some(function(intervalCount) {
if (intervalCount.interval === interval) return intervalCount.count++;
});
//Additional checks to avoid infinite loops in later processing
if (!isNaN(interval) && interval !== 0 && !foundInterval) {
intervalCounts.push({
interval: interval,
count: 1
});
}
}
});
return intervalCounts;
}
function groupNeighborsByTempo(intervalCounts) {
var tempoCounts = [];
intervalCounts.forEach(function(intervalCount) {
//Convert an interval to tempo
var theoreticalTempo = 60 / (intervalCount.interval / 44100);
theoreticalTempo = Math.round(theoreticalTempo);
if (theoreticalTempo === 0) {
return;
}
// Adjust the tempo to fit within the 90-180 BPM range
while (theoreticalTempo < 90) theoreticalTempo *= 2;
while (theoreticalTempo > 180) theoreticalTempo /= 2;
var foundTempo = tempoCounts.some(function(tempoCount) {
if (tempoCount.tempo === theoreticalTempo) return tempoCount.count += intervalCount.count;
});
if (!foundTempo) {
tempoCounts.push({
tempo: theoreticalTempo,
count: intervalCount.count
});
}
});
return tempoCounts;
}
// http://stackoverflow.com/questions/1669190/javascript-min-max-array-values
function arrayMin(arr) {
var len = arr.length,
min = Infinity;
while (len--) {
if (arr[len] < min) {
min = arr[len];
}
}
return min;
}
function arrayMax(arr) {
var len = arr.length,
max = -Infinity;
while (len--) {
if (arr[len] > max) {
max = arr[len];
}
}
return max;
}
<input id="audio_file" type="file" accept="audio/*"></input>
<audio id="audio_player"></audio>
<p>
Most likely tempo: <span id="output"></span>
</p>
I wrote a tutorial here which shows how to do this with the javascript Web Audio API.
https://askmacgyver.com/blog/tutorial/how-to-implement-tempo-detection-in-your-application
Outline of Steps
Transform Audio File into an Array Buffer
Run Array Buffer Through Low Pass Filter
Trim a 10 second Clip from the Array Buffer
Down Sample the Data
Normalize the Data
Count Volume Groupings
Infer Tempo from Groupings Count
This code below does the heavy lifting.
Load Audio File Into Array Buffer and Run Through Low Pass Filter
function createBuffers(url) {
// Fetch Audio Track via AJAX with URL
request = new XMLHttpRequest();
request.open('GET', url, true);
request.responseType = 'arraybuffer';
request.onload = function(ajaxResponseBuffer) {
// Create and Save Original Buffer Audio Context in 'originalBuffer'
var audioCtx = new AudioContext();
var songLength = ajaxResponseBuffer.total;
// Arguments: Channels, Length, Sample Rate
var offlineCtx = new OfflineAudioContext(1, songLength, 44100);
source = offlineCtx.createBufferSource();
var audioData = request.response;
audioCtx.decodeAudioData(audioData, function(buffer) {
window.originalBuffer = buffer.getChannelData(0);
var source = offlineCtx.createBufferSource();
source.buffer = buffer;
// Create a Low Pass Filter to Isolate Low End Beat
var filter = offlineCtx.createBiquadFilter();
filter.type = "lowpass";
filter.frequency.value = 140;
source.connect(filter);
filter.connect(offlineCtx.destination);
// Render this low pass filter data to new Audio Context and Save in 'lowPassBuffer'
offlineCtx.startRendering().then(function(lowPassAudioBuffer) {
var audioCtx = new(window.AudioContext || window.webkitAudioContext)();
var song = audioCtx.createBufferSource();
song.buffer = lowPassAudioBuffer;
song.connect(audioCtx.destination);
// Save lowPassBuffer in Global Array
window.lowPassBuffer = song.buffer.getChannelData(0);
console.log("Low Pass Buffer Rendered!");
});
},
function(e) {});
}
request.send();
}
createBuffers('https://askmacgyver.com/test/Maroon5-Moves-Like-Jagger-128bpm.mp3');
You Now Have an Array Buffer of the Low Pass Filtered Song (And Original)
It's comprised of a number of entries, sampleRate (44100 multiplied by the number of seconds of the song).
window.lowPassBuffer // Low Pass Array Buffer
window.originalBuffer // Original Non Filtered Array Buffer
Trim a 10 Second Clip from the Song
function getClip(length, startTime, data) {
var clip_length = length * 44100;
var section = startTime * 44100;
var newArr = [];
for (var i = 0; i < clip_length; i++) {
newArr.push(data[section + i]);
}
return newArr;
}
// Overwrite our array buffer to a 10 second clip starting from 00:10s
window.lowPassFilter = getClip(10, 10, lowPassFilter);
Down Sample Your Clip
function getSampleClip(data, samples) {
var newArray = [];
var modulus_coefficient = Math.round(data.length / samples);
for (var i = 0; i < data.length; i++) {
if (i % modulus_coefficient == 0) {
newArray.push(data[i]);
}
}
return newArray;
}
// Overwrite our array to down-sampled array.
lowPassBuffer = getSampleClip(lowPassFilter, 300);
Normalize Your Data
function normalizeArray(data) {
var newArray = [];
for (var i = 0; i < data.length; i++) {
newArray.push(Math.abs(Math.round((data[i + 1] - data[i]) * 1000)));
}
return newArray;
}
// Overwrite our array to the normalized array
lowPassBuffer = normalizeArray(lowPassBuffer);
Count the Flat Line Groupings
function countFlatLineGroupings(data) {
var groupings = 0;
var newArray = normalizeArray(data);
function getMax(a) {
var m = -Infinity,
i = 0,
n = a.length;
for (; i != n; ++i) {
if (a[i] > m) {
m = a[i];
}
}
return m;
}
function getMin(a) {
var m = Infinity,
i = 0,
n = a.length;
for (; i != n; ++i) {
if (a[i] < m) {
m = a[i];
}
}
return m;
}
var max = getMax(newArray);
var min = getMin(newArray);
var count = 0;
var threshold = Math.round((max - min) * 0.2);
for (var i = 0; i < newArray.length; i++) {
if (newArray[i] > threshold && newArray[i + 1] < threshold && newArray[i + 2] < threshold && newArray[i + 3] < threshold && newArray[i + 6] < threshold) {
count++;
}
}
return count;
}
// Count the Groupings
countFlatLineGroupings(lowPassBuffer);
Scale 10 Second Grouping Count to 60 Seconds to Derive Beats Per Minute
var final_tempo = countFlatLineGroupings(lowPassBuffer);
// final_tempo will be 21
final_tempo = final_tempo * 6;
console.log("Tempo: " + final_tempo);
// final_tempo will be 126

Canvas drawing using query/angular

Here's what I'm trying to achieve. Draw a circle(first circle) on the screen on a mouse click. Then draw successive circles on successive mouse clicks and connect each to the first circle.
I've managed to get till here.
Now the task is if any of the circles have the same y-coordinate as the first one, the connection is a straight line, else it should be a s-curve/inverted s-curve depending on whether the next circle is above or below the first one based on its y-axis.
It may be assumed that all successive circle will be on the right of the first circle.
Here's my code
var app = angular.module('plunker', []);
app.controller('MainController', function($scope) {
var canvas=document.getElementById("canvas");
var ctx=canvas.getContext("2d");
var cw=canvas.width;
var ch=canvas.height;
function reOffset(){
var BB=canvas.getBoundingClientRect();
offsetX=BB.left;
offsetY=BB.top;
}
var offsetX,offsetY;
reOffset();
window.onscroll=function(e){ reOffset(); }
var isDown=false;
var startX,startY;
var radius=10;
var lastX,lastY;
ctx.fillStyle='red';
$("#canvas").mousedown(function(e){handleMouseDown(e);});
function drawCircle(cx,cy){
if(lastX){
ctx.globalCompositeOperation='destination-over';
ctx.beginPath();
ctx.moveTo(lastX,lastY);
ctx.lineTo(cx,cy);
ctx.stroke();
ctx.globalCompositeOperation='source-over';
}else{
lastX=cx;
lastY=cy;
}
ctx.beginPath();
ctx.arc(cx,cy,radius,0,Math.PI*2);
ctx.closePath();
ctx.fill();
}
function handleMouseDown(e){
// tell the browser we're handling this event
e.preventDefault();
e.stopPropagation();
mx=parseInt(e.clientX-offsetX);
my=parseInt(e.clientY-offsetY);
drawCircle(mx,my);
}
});
Here's a link to the plunk that will demonstrate the behavior
http://plnkr.co/edit/rYVLgB14IutNh1F4MN6T?p=preview
Any help appreciated.
I don't know exactly which kind of s-curve are you interested in. As I understand it will be always only two points to connect, the first point and the rest, and you are looking for some sort of quadratic curve to do so. Under this situation you can build a s-curve by joining two ctx.quadraticCurveTo calls.
ctx.beginPath();
ctx.moveTo(lastX,lastY);
ctx.quadraticCurveTo(
(lastX+cx)/2, lastY,
(lastX+cx)/2, (lastY+cy)/2
);
ctx.quadraticCurveTo(
(lastX+cx)/2, cy,
cx, cy
);
ctx.lineWidth = 3;
http://plnkr.co/edit/t10cMPcUtX5ifkWi2LBF?p=preview
To make each of your connectors avoid existing circles, you must use a pathfinding algorithm (A* for example).
Pathfinding algorithms will give you a set of points from Circle1 to Circle2 that avoid all other circles.
Then you can use that set of points to build a connector between those circles using a Spline. See this very good answer by Stackoverflow's Ken Fyrstenberg on how to draw a spline. Make sure you keep the tension on the spline tight (closer to zero) so that your spline connector doesn't stray too far from the unobstructed path:
how to draw smooth curve through N points using javascript HTML5 canvas?
This is a nice script implementing the A* algorithm by Brian Grinstead:
https://github.com/bgrins/javascript-astar/
And here's a Demo of Brian Grinstead's A* script:
http://www.briangrinstead.com/files/astar/
To avoid a link-only answer, I'm attaching Brian's script from GitHub below...
But seriously...if GitHub disappears many of us subscribers are in trouble!
// javascript-astar 0.4.0
// http://github.com/bgrins/javascript-astar
// Freely distributable under the MIT License.
// Implements the astar search algorithm in javascript using a Binary Heap.
// Includes Binary Heap (with modifications) from Marijn Haverbeke.
// http://eloquentjavascript.net/appendix2.html
(function(definition) {
/* global module, define */
if(typeof module === 'object' && typeof module.exports === 'object') {
module.exports = definition();
} else if(typeof define === 'function' && define.amd) {
define([], definition);
} else {
var exports = definition();
window.astar = exports.astar;
window.Graph = exports.Graph;
}
})(function() {
function pathTo(node){
var curr = node,
path = [];
while(curr.parent) {
path.push(curr);
curr = curr.parent;
}
return path.reverse();
}
function getHeap() {
return new BinaryHeap(function(node) {
return node.f;
});
}
var astar = {
/**
* Perform an A* Search on a graph given a start and end node.
* #param {Graph} graph
* #param {GridNode} start
* #param {GridNode} end
* #param {Object} [options]
* #param {bool} [options.closest] Specifies whether to return the
path to the closest node if the target is unreachable.
* #param {Function} [options.heuristic] Heuristic function (see
* astar.heuristics).
*/
search: function(graph, start, end, options) {
graph.cleanDirty();
options = options || {};
var heuristic = options.heuristic || astar.heuristics.manhattan,
closest = options.closest || false;
var openHeap = getHeap(),
closestNode = start; // set the start node to be the closest if required
start.h = heuristic(start, end);
openHeap.push(start);
while(openHeap.size() > 0) {
// Grab the lowest f(x) to process next. Heap keeps this sorted for us.
var currentNode = openHeap.pop();
// End case -- result has been found, return the traced path.
if(currentNode === end) {
return pathTo(currentNode);
}
// Normal case -- move currentNode from open to closed, process each of its neighbors.
currentNode.closed = true;
// Find all neighbors for the current node.
var neighbors = graph.neighbors(currentNode);
for (var i = 0, il = neighbors.length; i < il; ++i) {
var neighbor = neighbors[i];
if (neighbor.closed || neighbor.isWall()) {
// Not a valid node to process, skip to next neighbor.
continue;
}
// The g score is the shortest distance from start to current node.
// We need to check if the path we have arrived at this neighbor is the shortest one we have seen yet.
var gScore = currentNode.g + neighbor.getCost(currentNode),
beenVisited = neighbor.visited;
if (!beenVisited || gScore < neighbor.g) {
// Found an optimal (so far) path to this node. Take score for node to see how good it is.
neighbor.visited = true;
neighbor.parent = currentNode;
neighbor.h = neighbor.h || heuristic(neighbor, end);
neighbor.g = gScore;
neighbor.f = neighbor.g + neighbor.h;
graph.markDirty(neighbor);
if (closest) {
// If the neighbour is closer than the current closestNode or if it's equally close but has
// a cheaper path than the current closest node then it becomes the closest node
if (neighbor.h < closestNode.h || (neighbor.h === closestNode.h && neighbor.g < closestNode.g)) {
closestNode = neighbor;
}
}
if (!beenVisited) {
// Pushing to heap will put it in proper place based on the 'f' value.
openHeap.push(neighbor);
}
else {
// Already seen the node, but since it has been rescored we need to reorder it in the heap
openHeap.rescoreElement(neighbor);
}
}
}
}
if (closest) {
return pathTo(closestNode);
}
// No result was found - empty array signifies failure to find path.
return [];
},
// See list of heuristics: http://theory.stanford.edu/~amitp/GameProgramming/Heuristics.html
heuristics: {
manhattan: function(pos0, pos1) {
var d1 = Math.abs(pos1.x - pos0.x);
var d2 = Math.abs(pos1.y - pos0.y);
return d1 + d2;
},
diagonal: function(pos0, pos1) {
var D = 1;
var D2 = Math.sqrt(2);
var d1 = Math.abs(pos1.x - pos0.x);
var d2 = Math.abs(pos1.y - pos0.y);
return (D * (d1 + d2)) + ((D2 - (2 * D)) * Math.min(d1, d2));
}
},
cleanNode:function(node){
node.f = 0;
node.g = 0;
node.h = 0;
node.visited = false;
node.closed = false;
node.parent = null;
}
};
/**
* A graph memory structure
* #param {Array} gridIn 2D array of input weights
* #param {Object} [options]
* #param {bool} [options.diagonal] Specifies whether diagonal moves are allowed
*/
function Graph(gridIn, options) {
options = options || {};
this.nodes = [];
this.diagonal = !!options.diagonal;
this.grid = [];
for (var x = 0; x < gridIn.length; x++) {
this.grid[x] = [];
for (var y = 0, row = gridIn[x]; y < row.length; y++) {
var node = new GridNode(x, y, row[y]);
this.grid[x][y] = node;
this.nodes.push(node);
}
}
this.init();
}
Graph.prototype.init = function() {
this.dirtyNodes = [];
for (var i = 0; i < this.nodes.length; i++) {
astar.cleanNode(this.nodes[i]);
}
};
Graph.prototype.cleanDirty = function() {
for (var i = 0; i < this.dirtyNodes.length; i++) {
astar.cleanNode(this.dirtyNodes[i]);
}
this.dirtyNodes = [];
};
Graph.prototype.markDirty = function(node) {
this.dirtyNodes.push(node);
};
Graph.prototype.neighbors = function(node) {
var ret = [],
x = node.x,
y = node.y,
grid = this.grid;
// West
if(grid[x-1] && grid[x-1][y]) {
ret.push(grid[x-1][y]);
}
// East
if(grid[x+1] && grid[x+1][y]) {
ret.push(grid[x+1][y]);
}
// South
if(grid[x] && grid[x][y-1]) {
ret.push(grid[x][y-1]);
}
// North
if(grid[x] && grid[x][y+1]) {
ret.push(grid[x][y+1]);
}
if (this.diagonal) {
// Southwest
if(grid[x-1] && grid[x-1][y-1]) {
ret.push(grid[x-1][y-1]);
}
// Southeast
if(grid[x+1] && grid[x+1][y-1]) {
ret.push(grid[x+1][y-1]);
}
// Northwest
if(grid[x-1] && grid[x-1][y+1]) {
ret.push(grid[x-1][y+1]);
}
// Northeast
if(grid[x+1] && grid[x+1][y+1]) {
ret.push(grid[x+1][y+1]);
}
}
return ret;
};
Graph.prototype.toString = function() {
var graphString = [],
nodes = this.grid, // when using grid
rowDebug, row, y, l;
for (var x = 0, len = nodes.length; x < len; x++) {
rowDebug = [];
row = nodes[x];
for (y = 0, l = row.length; y < l; y++) {
rowDebug.push(row[y].weight);
}
graphString.push(rowDebug.join(" "));
}
return graphString.join("\n");
};
function GridNode(x, y, weight) {
this.x = x;
this.y = y;
this.weight = weight;
}
GridNode.prototype.toString = function() {
return "[" + this.x + " " + this.y + "]";
};
GridNode.prototype.getCost = function(fromNeighbor) {
// Take diagonal weight into consideration.
if (fromNeighbor && fromNeighbor.x != this.x && fromNeighbor.y != this.y) {
return this.weight * 1.41421;
}
return this.weight;
};
GridNode.prototype.isWall = function() {
return this.weight === 0;
};
function BinaryHeap(scoreFunction){
this.content = [];
this.scoreFunction = scoreFunction;
}
BinaryHeap.prototype = {
push: function(element) {
// Add the new element to the end of the array.
this.content.push(element);
// Allow it to sink down.
this.sinkDown(this.content.length - 1);
},
pop: function() {
// Store the first element so we can return it later.
var result = this.content[0];
// Get the element at the end of the array.
var end = this.content.pop();
// If there are any elements left, put the end element at the
// start, and let it bubble up.
if (this.content.length > 0) {
this.content[0] = end;
this.bubbleUp(0);
}
return result;
},
remove: function(node) {
var i = this.content.indexOf(node);
// When it is found, the process seen in 'pop' is repeated
// to fill up the hole.
var end = this.content.pop();
if (i !== this.content.length - 1) {
this.content[i] = end;
if (this.scoreFunction(end) < this.scoreFunction(node)) {
this.sinkDown(i);
}
else {
this.bubbleUp(i);
}
}
},
size: function() {
return this.content.length;
},
rescoreElement: function(node) {
this.sinkDown(this.content.indexOf(node));
},
sinkDown: function(n) {
// Fetch the element that has to be sunk.
var element = this.content[n];
// When at 0, an element can not sink any further.
while (n > 0) {
// Compute the parent element's index, and fetch it.
var parentN = ((n + 1) >> 1) - 1,
parent = this.content[parentN];
// Swap the elements if the parent is greater.
if (this.scoreFunction(element) < this.scoreFunction(parent)) {
this.content[parentN] = element;
this.content[n] = parent;
// Update 'n' to continue at the new position.
n = parentN;
}
// Found a parent that is less, no need to sink any further.
else {
break;
}
}
},
bubbleUp: function(n) {
// Look up the target element and its score.
var length = this.content.length,
element = this.content[n],
elemScore = this.scoreFunction(element);
while(true) {
// Compute the indices of the child elements.
var child2N = (n + 1) << 1,
child1N = child2N - 1;
// This is used to store the new position of the element, if any.
var swap = null,
child1Score;
// If the first child exists (is inside the array)...
if (child1N < length) {
// Look it up and compute its score.
var child1 = this.content[child1N];
child1Score = this.scoreFunction(child1);
// If the score is less than our element's, we need to swap.
if (child1Score < elemScore){
swap = child1N;
}
}
// Do the same checks for the other child.
if (child2N < length) {
var child2 = this.content[child2N],
child2Score = this.scoreFunction(child2);
if (child2Score < (swap === null ? elemScore : child1Score)) {
swap = child2N;
}
}
// If the element needs to be moved, swap it, and continue.
if (swap !== null) {
this.content[n] = this.content[swap];
this.content[swap] = element;
n = swap;
}
// Otherwise, we are done.
else {
break;
}
}
}
};
return {
astar: astar,
Graph: Graph
};
});

Categories