Connecting Multiple PannerNodes to output - javascript

I am trying to connect multiple PannerNodes to an output but I have a problem with proper connecting them (while testing I have no audio). Audio is multichannel so I want to process each channel separately and audio is form html video element.
My main problem is that I have 6 different PannerNodes each one have one output with 2 channels and I don't know how to properly connect them to the destination to have a stereo output (or any output but my system is supporting only stereo).
Here is what I am doing:
Creating audioContext, MediaElementSource, etc. and first connection.
const context = new AudioContext();
var source = context.createMediaElementSource(video);
var dest = context.createMediaStreamDestination();
//Spliter channels L, R, SL, SR, C, LFE
let splitter = new ChannelSplitterNode(context, {numberOfOutputs: 6});
// I didn't used that later because I don't know if that is necessary and how to implement it properly (with that line commented it code is still not working correctly)
let channel_merger = new ChannelMergerNode(context, {numberOfInputs: 2});
let listener = context.listener;
source.connect(splitter, 0, 0);
Setup the listener
if(listener.forwardX) {
listener.forwardX.setValueAtTime(0, audioCtx.currentTime);
listener.forwardY.setValueAtTime(0, audioCtx.currentTime);
listener.forwardZ.setValueAtTime(-1, audioCtx.currentTime);
listener.upX.setValueAtTime(0, audioCtx.currentTime);
listener.upY.setValueAtTime(1, audioCtx.currentTime);
listener.upZ.setValueAtTime(0, audioCtx.currentTime);
} else {
listener.setOrientation(0,0,-1,0,1,0);
}
Creates PannerNodes and connects it to a splitter
var FrontLeft, FrontCenter, FrontRight, SurroundLeft, SurroundRight, Sub = null;
var pannerNodesObjects = [FrontLeft, FrontCenter, FrontRight, SurroundLeft, SurroundRight, Sub];
function set_pannerNode(node, panningModel /* 'HRTF' */, distanceModel /* Possible values are "linear", "inverse" and "exponential". The default value is "inverse". */, refDistance, maxDistance, rolloffFactor, coneInnerAngle, coneOuterAngle, coneOuterGain, x, y, z /* position */, n /* index of the pannerNodesObjects */) {
node = context.createPanner();
// Seting options
node.panningModel = panningModel;
node.distanceModel = distanceModel;
node.refDistance = refDistance;
node.maxDistance = maxDistance;
node.rolloffFactor = rolloffFactor;
node.coneInnerAngle = coneInnerAngle;
node.coneOuterAngle = coneOuterAngle;
node.coneOuterGain = coneOuterGain;
// Setting position
node.positionX = x;
node.positionY = y;
node.positionZ = z;
pannerNodesObjects[n] = node;
}
for (i=0; i< pannerNodesObjects.length; i++){
[nx, ny, nz] = set_rotation(distanceFromScreen,screenCenterY,angleList[i],i); // Here I get a position of the PannerNode
set_pannerNode(pannerNodesObjects[i],'HRTF',"exponential", 1,100,2,360,0,0, nx, ny, nz, i);
splitter.connect(pannerNodesObjects[i], i);
}
Connect PannerNodes to destination
for (i=0; i< pannerNodesObjects.length; i++){
pannerNodesObjects[i].connect(dest);
}
After that I am updating each PannerNode position in a loop. But I have no audio while playing the video so I probably done something wrong with AudioNodes connection.

Related

Building an Array from with JavaScript

I am trying to capture the Sound Frequency value from the Device Microphone from a Web Browser, utilizing the Web Audio API. The Instructions only covers how to play a sound file and manipulate its output, ect.
I need to listen to a Humming Sound from a person and figure out the Sound frequency, which I will then convert to a musical note. I need to build an array with all the frequencies that are generated from a Single Musical Note that I play from a digital Piano keyboard. I will then get the mode of all the frequencies to get the actual Musical note.
I am able to display the frequency and notes that I detect onto the Console.log(), however, I am not able to build the array, get the mode, and then clear-out the array when I play a new note. My array remains active because the code is on an event listening mode and remains active to listen for the next note.
This is my code:
var arrayList = [];
function getModes(array) {
var frequency = []; // array of frequency.
var maxFreq = 0; // holds the max frequency.
var modes = [];
for (var i in array) {
frequency[array[i]] = (frequency[array[i]] || 0) + 1; // increment frequency.
if (frequency[array[i]] > maxFreq) { // is this frequency > max so far ?
maxFreq = frequency[array[i]]; // update max.
}
}
for (var k in frequency) {
if (frequency[k] == maxFreq) {
modes.push(k);
}
}
return modes;
}
function updatePitch(time) {
var cycles = new Array;
analyser.getFloatTimeDomainData(buf);
var ac = autoCorrelate(buf, audioContext.sampleRate);
// TODO: Paint confidence meter on canvasElem here.
if (DEBUGCANVAS) { // This draws the current waveform, useful for debugging
waveCanvas.clearRect(0, 0, 512, 256);
waveCanvas.strokeStyle = "red";
waveCanvas.beginPath();
waveCanvas.moveTo(0, 0);
waveCanvas.lineTo(0, 256);
waveCanvas.moveTo(128, 0);
waveCanvas.lineTo(128, 256);
waveCanvas.moveTo(256, 0);
waveCanvas.lineTo(256, 256);
waveCanvas.moveTo(384, 0);
waveCanvas.lineTo(384, 256);
waveCanvas.moveTo(512, 0);
waveCanvas.lineTo(512, 256);
waveCanvas.stroke();
waveCanvas.strokeStyle = "black";
waveCanvas.beginPath();
waveCanvas.moveTo(0, buf[0]);
for (var i = 1; i < 512; i++) {
waveCanvas.lineTo(i, 128 + (buf[i] * 128));
}
waveCanvas.stroke();
}
if (ac == -1) {
detectorElem.className = "vague";
pitchElem.innerText = "--";
noteElem.innerText = "-";
detuneElem.className = "";
detuneAmount.innerText = "--";
} else {
detectorElem.className = "confident";
pitch = ac;
pitchElem.innerText = Math.round(pitch);
var note = noteFromPitch(pitch);
// Here is where I am converting the frequency to a note letter
var noteString = noteStrings[note % 12];
console.log(noteString);
// This is where I am building the array range with the notes that I find
// I have a nice array, but it keeps building and I do not know how to clear it for
// the next session.
if (note >=36 && note <= 96) {
if (arrayList) {
arrayList.push(noteString);
}
console.log(noteString);
}
else {
console.log("not note");
var MyNote = getModes(arrayList)
noteElem.innerHTML = MyNote;
arrayList = [];
}
// This function remains active and continues to listen for the next not to
// generate and new note letter
var detune = centsOffFromPitch(pitch, note);
if (detune == 0) {
detuneElem.className = "";
detuneAmount.innerHTML = "--";
} else {
if (detune < 0)
detuneElem.className = "flat";
else
detuneElem.className = "sharp";
detuneAmount.innerHTML = Math.abs(detune);
}
}
if (!window.requestAnimationFrame)
window.requestAnimationFrame = window.webkitRequestAnimationFrame;
rafID = window.requestAnimationFrame(updatePitch);
}
How do I clear the array and use a new array when I play a new note?
thank you for the support...

Is there any way to change file FPS in javascript browser or prepare wav conventer to 60FPS videos?

I'm making web application which stores short audio files that have been cut from large video files. User uploads .mp4 file, chooses sound length and here's a little trick. Cutting audio can only be done in backend (correct me if I'm wrong) and sending 700MB data is not good option, so I use code below to decode audio data from .mp4 and then I send it with start and stop params. Backend (Node.js) use's FFMPEG to cut audio and save's it.
This part works, but i realised that decoded audio from 60FPS video doesn't sound good (not terrible but totally useless in my app). My goal is to avoid third party, especially desktop, apps (like audacity) and allow user to cut revelant part of audio from any mp4 video. Is there any way to convert 60FPS video to 30FPS video (ArrayBuffer) in browser and then decode audio?
fileInput.onchange = event => {
this.file = event.target["files"][0];
//.mp4 file
this.fileURL = URL.createObjectURL(this.file)
let baseAudioContext = new AudioContext();
this.file.arrayBuffer().then(buff => {
baseAudioContext.decodeAudioData(buff,
success => {
console.log(success)
this.bufferToWave(success, 0, success.length);
},
err => console.log(err));
})
}
bufferToWave(abuffer, offset, len) {
var numOfChan = abuffer.numberOfChannels,
length = len * numOfChan * 2 + 44,
buffer = new ArrayBuffer(length),
view = new DataView(buffer),
channels = [], i, sample,
pos = 0;
// write WAVE header
setUint32(0x46464952); // "RIFF"
setUint32(length - 8); // file length - 8
setUint32(0x45564157); // "WAVE"
setUint32(0x20746d66); // "fmt " chunk
setUint32(16); // length = 16
setUint16(1); // PCM (uncompressed)
setUint16(numOfChan);
setUint32(abuffer.sampleRate);
setUint32(abuffer.sampleRate * 2 * numOfChan); // avg. bytes/sec
setUint16(numOfChan * 2); // block-align
setUint16(16); // 16-bit (hardcoded in this demo)
setUint32(0x61746164); // "data" - chunk
setUint32(length - pos - 4); // chunk length
// write interleaved data
for (i = 0; i < abuffer.numberOfChannels; i++)
channels.push(abuffer.getChannelData(i));
while (pos < length) {
for (i = 0; i < numOfChan; i++) { // interleave channels
sample = Math.max(-1, Math.min(1, channels[i][offset])); // clamp
sample = (0.5 + sample < 0 ? sample * 32768 : sample * 32767) | 0; // scale to 16-bit signed int
view.setInt16(pos, sample, true); // update data chunk
pos += 2;
}
offset++ // next source sample
}
// create Blob
//return (URL || webkitURL).createObjectURL(new Blob([buffer], { type: "audio/wav" }));
var u = (URL || webkitURL).createObjectURL(new Blob([buffer], { type: "audio/wav" }));
//temporary part
//downloading file to check quality
//in this part sound is already broken, no need to show backend code
const a = document.createElement('a');
a.style.display = 'none';
a.href = u;
a.download = name;
document.body.appendChild(a);
a.click();
function setUint16(data) {
view.setUint16(pos, data, true);
pos += 2;
}
function setUint32(data) {
view.setUint32(pos, data, true);
pos += 4;
}
}

Three.js BufferGeometry Vertices Not Updating

I am looking to connect plane buffer geometry grid tiles which have real elevation data from IndexedDB. My issue is the data resolution on the STRM elevation is not perfect so the edges between the tiles are not the same. I need to essentially average out all the grid edges between the touching vertices to create a seamless terrain.
When I copy paste the code into the console in the scene it works. However just in the code it doesn't. The sceneRef that is passed is valid and the rest of the codebase using the sceneRef correctly.
The tiles are a 3 x 3 with the current grid tile being the center at 1,1 from range 0,0 - 2,2.
function connectTiles(currGridKey, sceneRef){
console.log("connectTiles");
console.log("currGridKey");
// Current Tile Connection
for (var lat = 0; lat < currGridKey[0]+2; lat++) {
for (var long = 0; long < currGridKey[1]+2; long++) {
const currentTile = sceneRef.getObjectByName(`${lat}-${long}`);
// Current Grid Tile Per Loop
if (currentTile) {
const currentTileVerts = currentTile.geometry.attributes.position.array,
latPlusTile = sceneRef.getObjectByName(`${lat}-${long+1}`),
longPlusTile = sceneRef.getObjectByName(`${lat+1}-${long}`);
// Connect Latitudinally
if (latPlusTile) {
const latPlusTileVerts = latPlusTile.geometry.attributes.position.array;
for (var z = 0; z < currentTileVerts.length; z+=27) {
const newVertHeight = (currentTileVerts[z] + latPlusTileVerts[z]) / 2;
latPlusTileVerts[z] = newVertHeight;
currentTileVerts[z] = newVertHeight;
}
latPlusTile.geometry.attributes.position.needsUpdate = true;
currentTile.geometry.attributes.position.needsUpdate = true;
}
// Connection Longitudinally
if (longPlusTile) {
const longPlusTileVerts = longPlusTile.geometry.attributes.position.array;
for (var x = 0; x < currentTileVerts.length; x+=3) {
const newVertHeight = (currentTileVerts[x] + longPlusTileVerts[x]) / 2;
longPlusTileVerts[x] = newVertHeight;
currentTileVerts[x] = newVertHeight;
}
longPlusTile.geometry.attributes.position.needsUpdate = true;
currentTile.geometry.attributes.position.needsUpdate = true;
}
}
}
}
If all values inside the array are in fact being updated, maybe they're just not getting uploaded to the GPU. Instead of changing the value inside geometry.attributes.position directly, try using the .setAttribute() method. The docs state that using .setAttribute() and .getAttribute() is preferrable than accessing it directly because it has its own internal storage mechanism.
const latPlusTileVerts = latPlusTile.geometry.getAttribute("position").array;
// ... Loops
latPlusTile.geometry.getAttribute("position").needsUpdate = true;
// Or an alternative is to generate a new attribute...
// in case updating the old one fails
const posAttrib = new THREE.BufferAttribute(latPlusTileVerts, 3);
latPlusTile.geometry.setAttribute("position", posAttrib);

Web Audio Api : How do I add a working convolver?

What I am trying to learn / do: How to set up a simple working convolver (reverb) into my code sandbox below using an impulse response. I thought it was similar to setting a filter but things seem quite different.
What I tried: As with all new technologies things change at a fast pace making it difficult to know which implementation is correct and what is not. I looked at countless WebAudio Api Convolver Tutorials, many were old and others were working but far too "bloated" making it hard to understand what is going on. I tried to implement some of the examples from the mozilla documentation:
I already had a look at: https://developer.mozilla.org/en-US/docs/Web/API/ConvolverNode/buffer
My question: How do I integrate a convolver properly in the context below? As you can see I tried but cant figure this out.
window.addEventListener('load', init, false);
function init() {
setupWebAudio();
}
function setupWebAudio() {
var audio = document.getElementById('music');
var context = new AudioContext();
var source = context.createMediaElementSource(audio);
var filter = context.createBiquadFilter();
var convolver = context.createConvolver();
var inpulseRes = "hall.mp3";
var hallBuffer = inpulseRes;
soundSource = context.createBufferSource();
soundSource.buffer = hallBuffer;
convolver.buffer = hallBuffer;
filter.type = 'lowpass';
filter.frequency.value = 400;
var theParent = document.getElementById("test");
theParent.addEventListener("mousedown", doSomething, false);
function doSomething(e) {
if (e.target !== e.currentTarget) {
if(e.target == theParent.children[0]){
filter.frequency.value += 200;
}
else if(e.target == theParent.children[1]){
filter.frequency.value -= 200;
}
else if(e.target == theParent.children[2]){
filter.type = 'highpass';
}
}
e.stopPropagation();
}
source.connect(filter);
source.connect(convolver);
filter.connect(context.destination);
audio.play();
}
This is a pretty open-ended question; what have you tried that hasn't worked, or is the piece you're missing what the "impulse response" is supposed to be? If the latter, search for "impulse response files" and you'll find tons of free files you can use. You can also generate noise on a logarithmic decay curve into a buffer, and you'll get a basic reverb effect. Basic method to create an impulseResponse buffer:
function impulseResponse( duration, decay, reverse ) {
var sampleRate = audioContext.sampleRate;
var length = sampleRate * duration;
var impulse = audioContext.createBuffer(2, length, sampleRate);
var impulseL = impulse.getChannelData(0);
var impulseR = impulse.getChannelData(1);
if (!decay)
decay = 2.0;
for (var i = 0; i < length; i++){
var n = reverse ? length - i : i;
impulseL[i] = (Math.random() * 2 - 1) * Math.pow(1 - n / length, decay);
impulseR[i] = (Math.random() * 2 - 1) * Math.pow(1 - n / length, decay);
}
return impulse;
}
Your code has both a BufferSourceNode and the convolver pointing to the same buffer, which is almost certainly wrong; you don't usually play back an impulse response file using a buffersource, and you don't usually use a normal sound file as an impulse response. (Look up convolution on Wikipedia if the role of an impulse response isn't clear.) You need to do something like:
function setupWebAudio() {
var audio = document.getElementById('music');
var context = new AudioContext();
var source = context.createMediaElementSource(audio);
var convolver = context.createConvolver();
var irRRequest = new XMLHttpRequest();
irRRequest.open("GET", "hall.mp3", true);
irRRequest.responseType = "arraybuffer";
irRRequest.onload = function() {
context.decodeAudioData( irRRequest.response,
function(buffer) { convolver.buffer = buffer; } );
}
irRRequest.send();
// note the above is async; when the buffer is loaded, it will take effect, but in the meantime, the sound will be unaffected.
source.connect( convolver );
convolver.connect( context.destination );
}
Connect the output of the convolver to something. What you have now is the source connected to the convolver, but the convolver isn't connected to anything. As a first cut, convolver.connect(context.destination).

How to create a background process in Javascript

I'm writing a web process to emulate the Connect4 board game. I have set it out using an Agile controller with seven columns and six rows with each cell referenced as c1r1 as below.
$scope.c1r1 = $scope.counterWhite;
$scope.c1r2 = $scope.counterWhite;
$scope.c1r3 = $scope.counterWhite;
.
.
$scope.c7r4 = $scope.counterWhite;
$scope.c7r5 = $scope.counterWhite;
$scope.c7r6 = $scope.counterWhite;
When the user clicks on any cell it raises an event which I want to show the counter dropping through the available cells, with code as below
$scope.c1r1 = $scope.counterYellow;
setTimeout(fillC1(), 1000);
$scope.c1r1 = $scope.counterWhite;
$scope.c1r2 = $scope.counterYellow;
setTimeout(fillC1(), 1000);
$scope.c1r2 = $scope.counterWhite;
$scope.c1r3 = $scope.counterYellow;
setTimeout(fillC1(), 1000);
$scope.c1r3 = $scope.counterWhite;
$scope.c1r4 = $scope.counterYellow;
setTimeout(fillC1(), 1000);
$scope.c1r4 = $scope.counterWhite;
$scope.c1r5 = $scope.counterYellow;
setTimeout(fillC1(), 1000);
$scope.c1r5 = $scope.counterWhite;
$scope.c1r6 = $scope.counterYellow;
When I run it nothing happens and the counter just appears at the bottom so I need a thread to make it work properly but as JavaScript doesn't do threading I wondered if there was another answer like Web Workers.
Right, the way you set up your cells is part of the problem.
Instead of that load of $scope.cXrY variables, use arrays:
$scope.cells = [];
var x, y;
for(x = 0; x < 7; x ++){
$scope.cells[x] = [];
for(y = 0; y < 6; y ++){
$scope.cells[x][y] = $scope.counterWhite;
}
}
There you go, all your cells in one variable.
Instead of $scope.c2r4, you'd use $scope.cells[2][4].
Now, for the counter going over the cells:
function iterateOverCells(){
fillC1()
$scope.cells[x][y] = $scope.counterWhite; // Make the previous cell white
x++; // Get next cell position
if(x >= 7){
x = 0;
y++;
}
$scope.cells[x][y] = $scope.counterYellow; // Current cell yellow
if(x < 7 && y < 6) // If we're not at the last cell
setTimeout(iterateOverCells, 1000); // Go to the next cell in 1 second
}
var x = 0, y = 0;
$scope.cells[x][y] = $scope.counterYellow; // Make the first cell yellow
setTimeout(iterateOverCells, 1000); // Next cell in 1 second
Web Workers don't have access to the DOM, they are for calculations. I'm not sure why you want to use a background thread, but here's how you can do it:
Let's assume you wrote a script with your background code, which posts messages when it's done with stuff (will get back to that in a sec:
function doWork() {
postMessage("did something");
setTimeout(doWork, 2000);
}
doWork();
Now, check that the browser supports web workers, create one, and listen on the messages published from it:
if(typeof(w) == "undefined") {
executer = new Worker("background.js");
}
executer.onmessage = function(event){
document.getElementById("results").innerHTML = event.data;
};
When you're done with it, you can remove it like so:
executer.terminate();

Categories