Using three in ECMA Script 6 STL Viewer class - javascript

I am trying to convert a Three STL viewer demonstration into an ES6 Class which is then compiled with webpack 2 but i want it to open the file from URL rather than from an actual file input, ive managed to load the three components and most of the class is working but i think i am having problems in the scope with a few of the components, ive ran into a problem i dont seem to be able to get past when trying to use FileReader and then opening the scene and object. TypeError: Cannot read property 'remove' of undefined
threeDimensionalModels.js
// jshint esversion:6
import {
Scene, PerspectiveCamera, WebGLRenderer, Geometry, Mesh,
AmbientLight, DirectionalLight, MeshPhongMaterial, Vector3, Face3,
} from 'three';
var thisClass = null;
class threeDimensionalModels {
constructor(height,width,selector){
this.w = height;
this.h = width;
this.selector = selector;
this.renderer = new WebGLRenderer();
this.view = document.getElementById(this.selector);
this.camera = new PerspectiveCamera(45, this.w / this.h, 1, 1000);
this.scene = new Scene();
this.light1 = new DirectionalLight(0xffffff);
this.light2 = new DirectionalLight(0xffffff);
this.mat = new MeshPhongMaterial({ color: 0x339900, specular: 0x030303 });
this.obj = new Mesh(new Geometry(), this.mat);
this.renderer.setSize(this.w, this.h);
this.view.appendChild(this.renderer.domElement);
this.camera.position.set(0, 0, 50);
this.scene.add(new AmbientLight(0x666666));
this.light1.position.set(0, 100, 100);
this.scene.add(this.light1);
this.light2.position.set(0, -100, -100);
this.scene.add(this.light2);
this.scene.add(this.obj);
}
static binaryVector3(view, offset) {
var v = new Vector3();
v.x = view.getFloat32(offset + 0, true);
v.y = view.getFloat32(offset + 4, true);
v.z = view.getFloat32(offset + 8, true);
return v;
}
static loadBinaryStl(buffer) {
// binary STL
var view = new DataView(buffer);
var size = view.getUint32(80, true);
var geom = new Geometry();
var offset = 84;
for (var i = 0; i < size; i++) {
var normal = threeDimensionalModels.binaryVector3(view, offset);
geom.vertices.push(threeDimensionalModels.binaryVector3(view, offset + 12));
geom.vertices.push(threeDimensionalModels.binaryVector3(view, offset + 24));
geom.vertices.push(threeDimensionalModels.binaryVector3(view, offset + 36));
geom.faces.push(
new Face3(i * 3, i * 3 + 1, i * 3 + 2, normal));
offset += 4 * 3 * 4 + 2;
}
return geom;
}
static m2vec3(match) {
var v = new Vector3();
v.x = parseFloat(match[1]);
v.y = parseFloat(match[2]);
v.z = parseFloat(match[3]);
return v;
}
static toLines(array) {
var lines = [];
var h = 0;
for (var i = 0; i < array.length; i++) {
if (array[i] === 10) {
var line = String.fromCharCode.apply(
null, array.subarray(h, i));
lines.push(line);
h = i + 1;
}
}
lines.push(String.fromCharCode.apply(null, array.subarray(h)));
return lines;
}
static loadTextStl(buffer) {
var lines = threeDimensionalModels.toLines(new Uint8Array(buffer));
var index = 0;
var scan = function (regexp) {
while (lines[index].match(/^\s*$/)) index++;
var r = lines[index].match(regexp);
return r;
};
var scanOk = function (regexp) {
var r = scan(regexp);
if (!r) throw new Error(
"not text stl: " + regexp.toString() +
"=> (line " + (index - 1) + ")" +
"[" + lines[index-1] + "]");
index++;
return r;
};
var facetReg = /^\s*facet\s+normal\s+([^\s]+)\s+([^\s]+)\s+([^\s]+)/;
var vertexReg = /^\s*vertex\s+([^s]+)\s+([^\s]+)\s+([^\s]+)/;
var geom = new Geometry();
scanOk(/^\s*solid\s(.*)/);
while (!scan(/^\s*endsolid/)) {
var normal = scanOk(facetReg);
scanOk(/^\s*outer\s+loop/);
var v1 = scanOk(vertexReg);
var v2 = scanOk(vertexReg);
var v3 = scanOk(vertexReg);
scanOk(/\s*endloop/);
scanOk(/\s*endfacet/);
var base = geom.vertices.length;
geom.vertices.push(threeDimensionalModels.m2vec3(v1));
geom.vertices.push(threeDimensionalModels.m2vec3(v2));
geom.vertices.push(threeDimensionalModels.m2vec3(v3));
geom.faces.push(
new Face3(base, base + 1, base + 2, threeDimensionalModels.m2vec3(normal)));
}
return geom;
}
static loadStlModel(buffer) {
try {
return threeDimensionalModels.loadTextStl(buffer);
} catch (ex) {
return threeDimensionalModels.loadBinaryStl(buffer);
}
}
openStl(url) {
var blob = null;
var xhr = new XMLHttpRequest();
xhr.open("GET", url);
xhr.responseType = "blob";//force the HTTP response, response-type header to be blob
xhr.onload = function() {
blob = xhr.response;
thisClass = this;
var reader = new FileReader();
reader.addEventListener("load", function (ev) {
var buffer = ev.target.result;
var geom = threeDimensionalModels.loadStlModel(buffer);
threeDimensionalModels.scene.remove(thisClass.obj);
threeDimensionalModels.obj = new Mesh(geom, threeDimensionalModels.mat);
threeDimensionalModels.scene.add(threeDimensionalModels.obj);
}, false);
reader.readAsArrayBuffer(blob);
};
xhr.send();
}
}
export default threeDimensionalModels;
HTML
<div id="threedm-view"></div>
<script type="text/javascript">
window.addEventListener("load", function () {
"use strict";
var threedm = new threeDimensionalModels(800,800,'threedm-view');
var loop = function loop() {
requestAnimationFrame(loop);
threedm.obj.rotation.z += 0.05;
threedm.renderer.clear();
threedm.renderer.render(threedm.scene, threedm.camera);
};
loop();
threedm.openStl("/app/uploads/2017/07/Stator.stl");
});
</script>

after a bit more playing around, and looking at the problem area, i realised that i was applying this to the global variable where this in the context i wanted did not exist, i would like to find a better way of passing this into child functions inside ECMA class functions without using a global variable
update
class threeDimensionalModels {
// .... //
openStl(url) {
var blob = null;
var xhr = new XMLHttpRequest();
xhr.open("GET", url);
xhr.responseType = "blob";
thisClass = this; // Moved from inside xhr.onload
xhr.onload = function() {
blob = xhr.response;
var reader = new FileReader();
reader.addEventListener("load", function (ev) {
var buffer = ev.target.result;
var geom = threeDimensionalModels.loadStlModel(buffer);
thisClass.scene.remove(thisClass.obj);
thisClass.obj = new Mesh(geom, thisClass.mat);
thisClass.scene.add(thisClass.obj);
}, false);
reader.readAsArrayBuffer(blob);
};
xhr.send();
}
// ... //
}

Related

Attempting to merge Record.js and driveApi

I have this record.js script to toggle a recording that is currently working as expected.
function Record_Current(config) {
config = config || {};
var self = this;
var audioInput;
var audioNode;
var bufferSize = config.bufferSize || 4096;
var recordedData = [];
var recording = false;
var recordingLength = 0;
var startDate;
var audioCtx;
this.toggleRecording = function() {
if (recording) {
self.stop();
} else {
self.start();
}
};
this.start = function() {
// reset any previous data
recordedData = [];
recordingLength = 0;
// webkit audio context shim
audioCtx = new(window.AudioContext || window.webkitAudioContext)();
if (audioCtx.createJavaScriptNode) {
audioNode = audioCtx.createJavaScriptNode(bufferSize, 1, 1);
} else if (audioCtx.createScriptProcessor) {
audioNode = audioCtx.createScriptProcessor(bufferSize, 1, 1);
} else {
throw 'WebAudio not supported!';
}
audioNode.connect(audioCtx.destination);
navigator.mediaDevices.getUserMedia({ audio: true })
.then(onMicrophoneCaptured)
.catch(onMicrophoneError);
};
this.stop = function() {
stopRecording(function(blob) {
self.blob = blob;
config.onRecordingStop && config.onRecordingStop(blob);
});
};
this.upload = function(url, params, callback) {
var formData = new FormData();
formData.append("audio", self.blob, config.filename || 'recording.wav');
for (var i in params)
formData.append(i, params[i]);
var request = new XMLHttpRequest();
request.upload.addEventListener("progress", function(e) {
callback('progress', e, request);
});
request.upload.addEventListener("load", function(e) {
callback('load', e, request);
});
request.onreadystatechange = function(e) {
var status = 'loading';
if (request.readyState === 4) {
status = request.status === 200 ? 'done' : 'error';
}
callback(status, e, request);
};
request.open("POST", url);
request.send(formData);
};
function stopRecording(callback) {
// stop recording
recording = false;
// to make sure onaudioprocess stops firing
window.localStream.getTracks().forEach((track) => { track.stop(); });
audioInput.disconnect();
audioNode.disconnect();
exportWav({
sampleRate: sampleRate,
recordingLength: recordingLength,
data: recordedData
}, function(buffer, view) {
self.blob = new Blob([view], { type: 'audio/wav' });
callback && callback(self.blob);
});
}
function onMicrophoneCaptured(microphone) {
if (config.visualizer)
visualize(microphone);
// save the stream so we can disconnect it when we're done
window.localStream = microphone;
audioInput = audioCtx.createMediaStreamSource(microphone);
audioInput.connect(audioNode);
audioNode.onaudioprocess = onAudioProcess;
recording = true;
self.startDate = new Date();
config.onRecordingStart && config.onRecordingStart();
sampleRate = audioCtx.sampleRate;
}
function onMicrophoneError(e) {
console.log(e);
alert('Unable to access the microphone.');
}
function onAudioProcess(e) {
if (!recording) {
return;
}
recordedData.push(new Float32Array(e.inputBuffer.getChannelData(0)));
recordingLength += bufferSize;
self.recordingLength = recordingLength;
self.duration = new Date().getTime() - self.startDate.getTime();
config.onRecording && config.onRecording(self.duration);
}
function visualize(stream) {
var canvas = config.visualizer.element;
if (!canvas)
return;
var canvasCtx = canvas.getContext("2d");
var source = audioCtx.createMediaStreamSource(stream);
var analyser = audioCtx.createAnalyser();
analyser.fftSize = 2048;
var bufferLength = analyser.frequencyBinCount;
var dataArray = new Uint8Array(bufferLength);
source.connect(analyser);
function draw() {
// get the canvas dimensions
var width = canvas.width,
height = canvas.height;
// ask the browser to schedule a redraw before the next repaint
requestAnimationFrame(draw);
// clear the canvas
canvasCtx.fillStyle = config.visualizer.backcolor || '#fff';
canvasCtx.fillRect(0, 0, width, height);
if (!recording)
return;
canvasCtx.lineWidth = config.visualizer.linewidth || 2;
canvasCtx.strokeStyle = config.visualizer.forecolor || '#f00';
canvasCtx.beginPath();
var sliceWidth = width * 1.0 / bufferLength;
var x = 0;
analyser.getByteTimeDomainData(dataArray);
for (var i = 0; i < bufferLength; i++) {
var v = dataArray[i] / 128.0;
var y = v * height / 2;
i === 0 ? canvasCtx.moveTo(x, y) : canvasCtx.lineTo(x, y);
x += sliceWidth;
}
canvasCtx.lineTo(canvas.width, canvas.height / 2);
canvasCtx.stroke();
}
draw();
}
function exportWav(config, callback) {
function inlineWebWorker(config, cb) {
var data = config.data.slice(0);
var sampleRate = config.sampleRate;
data = joinBuffers(data, config.recordingLength);
console.log(data);
function joinBuffers(channelBuffer, count) {
var result = new Float64Array(count);
var offset = 0;
var lng = channelBuffer.length;
for (var i = 0; i < lng; i++) {
var buffer = channelBuffer[i];
result.set(buffer, offset);
offset += buffer.length;
}
return result;
}
function writeUTFBytes(view, offset, string) {
var lng = string.length;
for (var i = 0; i < lng; i++) {
view.setUint8(offset + i, string.charCodeAt(i));
}
}
var dataLength = data.length;
// create wav file
var buffer = new ArrayBuffer(44 + dataLength * 2);
var view = new DataView(buffer);
writeUTFBytes(view, 0, 'RIFF'); // RIFF chunk descriptor/identifier
view.setUint32(4, 44 + dataLength * 2, true); // RIFF chunk length
writeUTFBytes(view, 8, 'WAVE'); // RIFF type
writeUTFBytes(view, 12, 'fmt '); // format chunk identifier, FMT sub-chunk
view.setUint32(16, 16, true); // format chunk length
view.setUint16(20, 1, true); // sample format (raw)
view.setUint16(22, 1, true); // mono (1 channel)
view.setUint32(24, sampleRate, true); // sample rate
view.setUint32(28, sampleRate * 2, true); // byte rate (sample rate * block align)
view.setUint16(32, 2, true); // block align (channel count * bytes per sample)
view.setUint16(34, 16, true); // bits per sample
writeUTFBytes(view, 36, 'data'); // data sub-chunk identifier
view.setUint32(40, dataLength * 2, true); // data chunk length
// write the PCM samples
var index = 44;
for (var i = 0; i < dataLength; i++) {
view.setInt16(index, data[i] * 0x7FFF, true);
index += 2;
}
if (cb) {
return cb({
buffer: buffer,
view: view
});
}
postMessage({
buffer: buffer,
view: view
});
}
var webWorker = processInWebWorker(inlineWebWorker);
webWorker.onmessage = function(event) {
callback(event.data.buffer, event.data.view);
// release memory
URL.revokeObjectURL(webWorker.workerURL);
};
webWorker.postMessage(config);
}
function processInWebWorker(_function) {
var workerURL = URL.createObjectURL(new Blob([_function.toString(),
';this.onmessage = function (e) {' + _function.name + '(e.data);}'
], {
type: 'application/javascript'
}));
var worker = new Worker(workerURL);
worker.workerURL = workerURL;
console.log(worker);
return worker;
}
function renderRecording(workerURL, list) {
const worker_url = URL.createObjectURL(workerURL);
const li = document.createElement('li');
const audio = document.createElement('audio');
const anchor = document.createElement('a');
anchor.setAttribute('href', workerURL);
const now = new Date();
anchor.setAttribute(
'download',
`recording-${now.getFullYear()}-${(now.getMonth() + 1).toString().padStart(2, '0')}-${now.getDay().toString().padStart(2, '0')}--${now.getHours().toString().padStart(2, '0')}-${now.getMinutes().toString().padStart(2, '0')}-${now.getSeconds().toString().padStart(2, '0')}.webm`
);
anchor.innerText = 'Download';
audio.setAttribute('src', worker_url);
audio.setAttribute('controls', 'controls');
li.appendChild(audio);
li.appendChild(anchor);
list.appendChild(li);
}
}
and this Google Drive Script
var driveLink = require('stream');
module.exports.uploadFile = function(req) {
var file;
console.log("driveApi upload reached")
function blobToFile(req) {
file = req.body.blob
file.lastModifiedDate = new Date();
file.name = req.body.word;
return file;
}
var bufStream = new stream.PassThrough();
bufStream.end(file);
console.log(typeof 42);
var folderId = "Folder"; // Enter Folder Name
var fileMetadata = {
"name": req.body.word,
parents: [folderId]
}
var media = {
mimeType: "audio/mp3",
body: bufStream
}
drive.files.create({
auth: jwToken,
resource: fileMetadata,
media: media,
fields: "id"
}, function(err, file) {
if (err) {
console.error(err);
} else {
console.log("File Id: ", file.id);
}
console.log("driveApi upload accomplished")
});
}
I have tried a few different approaches to combining the two to make it so it automatically saves the .wav file to the Drive API but it does not work.
I believe I am either not merging the two scripts together the right way or I am missing something. Do I need to use V3 of the Google API?
If anyone could provide some guidance on how to merge the two properly that would be greatly appreciated. Thank you!

Javascript: refactoring existing code to separate concerns, how could we use a class/Prototype and call from it own objects' methods?

I have a web page where I show two models, each one in a canvas using Threejs.
I would like to read a local file, parse it and then make it avaliable to download.
I have done it, all mixed, I mean the logic is having both the responsability to show the canvas and to read, parse, and download the file.
I would like to isolate file read, parse and download logic into another class. Currently I extracted out the canvas' show logic in a class called InitCanvas.js
Here is the code:
// this class handles the load and the canva for a nrrd
// Using programming based on prototype: https://javascript.info/class
// This class should be improved:
// - Canvas Width and height
InitCanvas = function (IdDiv, Filename) {
this.IdDiv = IdDiv;
this.Filename = Filename
}
InitCanvas.prototype = {
constructor: InitCanvas,
init: function () {
this.container = document.getElementById(this.IdDiv);
// this should be changed.
this.container.innerHeight = 600;
this.container.innerWidth = 800;
//These statenments should be changed to improve the image position
this.camera = new THREE.PerspectiveCamera(60, this.container.innerWidth / this.container.innerHeight, 0.01, 1e10);
this.camera.position.z = 300;
let scene = new THREE.Scene();
scene.add(this.camera);
// light
let dirLight = new THREE.DirectionalLight(0xffffff);
dirLight.position.set(200, 200, 1000).normalize();
this.camera.add(dirLight);
this.camera.add(dirLight.target);
// read file
let loader = new THREE.NRRDLoader();
loader.load(this.Filename, function (volume) {
//z plane
let sliceZ = volume.extractSlice('z', Math.floor(volume.RASDimensions[2] / 4));
this.container.innerWidth = sliceZ.iLength;
this.container.innerHeight = sliceZ.jLength;
sliceZ.mesh.material.color.setRGB(0,1,1);
console.log('Our slice is: ', sliceZ);
scene.add(sliceZ.mesh);
}.bind(this));
this.scene = scene;
// renderer
this.renderer = new THREE.WebGLRenderer({alpha: true});
this.renderer.setPixelRatio(this.container.devicePixelRatio);
this.renderer.setSize(this.container.innerWidth, this.container.innerHeight);
// add canvas in container
this.container.appendChild(this.renderer.domElement);
},
animate: function () {
this.renderer.render(this.scene, this.camera);
}
}
So the idea is to refactor the logic.js:
if (!Detector.webgl) Detector.addGetWebGLMessage();
// global variables for this scripts
let OriginalImg,
SegmentImg;
var mouse = new THREE.Vector2();
var raycaster = new THREE.Raycaster();
var mousePressed = false;
var clickCount = 0;
var allText;
init();
animate();
readTextFile("columna01-es-latin1.txt");
// initilize the page
function init() {
let filename = "models/nrrd/columna01.nrrd"; // change your nrrd file
let idDiv = 'original';
OriginalImg = new InitCanvas(idDiv, filename);
OriginalImg.init();
console.log(OriginalImg);
filename = "models/nrrd/columnasegmentado01.nrrd"; // change your nrrd file
idDiv = 'segment';
SegmentImg = new InitCanvas(idDiv, filename);
SegmentImg.init();
}
let originalCanvas = document.getElementById('original');
originalCanvas.addEventListener('mousedown', onDocumentMouseDown, false);
originalCanvas.addEventListener('mouseup', onDocumentMouseUp, false);
function onDocumentMouseDown(event) {
mousePressed = true;
clickCount++;
mouse.x = ( ( event.clientX - OriginalImg.renderer.domElement.offsetLeft ) / OriginalImg.renderer.domElement.clientWidth ) * 2 - 1;
mouse.y = -( ( event.clientY - OriginalImg.renderer.domElement.offsetTop ) / OriginalImg.renderer.domElement.clientHeight ) * 2 + 1
console.log('Mouse x position is: ', mouse.x, 'the click number was: ', clickCount);
console.log('Mouse Y position is: ', mouse.y);
raycaster.setFromCamera(mouse.clone(), OriginalImg.camera);
var objects = raycaster.intersectObjects(OriginalImg.scene.children);
console.log(objects);
}
function onDocumentMouseUp(event) {
mousePressed = false
}
function animate() {
requestAnimationFrame(animate);
OriginalImg.animate();
SegmentImg.animate();
}
function readTextFile(file) {
var rawFile = new XMLHttpRequest();
rawFile.open("GET", file, false);
rawFile.onreadystatechange = function () {
if (rawFile.readyState === 4) {
if (rawFile.status === 200 || rawFile.status == 0) {
allText = rawFile.responseText;
console.log('The complete text is', allText);
let lineArr = intoArray(allText);
let firstLineWords = intoWords(lineArr[0]);
let secondLineWords = intoWords(lineArr[1]);
console.log('Our first line is: ', lineArr[0]);
let atlas = {};
for (let i = 0; i < firstLineWords.length; i++) {
console.log(`Our ${i} word in the first line is : ${firstLineWords[i]}`);
console.log(`Our ${i} word in the SECOND line is : ${secondLineWords[i]}`);
atlas[firstLineWords[i]] = secondLineWords[i];
}
console.log('The atlas is: ', atlas);
let atlasJson = JSON.stringify(atlas);
console.log('Atlas as json is: ', atlasJson);
download(atlasJson, 'atlasJson.txt', 'text/plain');
}
}
};
rawFile.send(null);
}
// Function to download data to a file
function download(text, name, type) {
var a = document.getElementById("a");
var file = new Blob([text], {type: type});
a.href = URL.createObjectURL(file);
a.download = name;
}
function intoArray(lines) {
// splitting all text data into array "\n" is splitting data from each new line
//and saving each new line as each element*
var lineArr = lines.split('\n');
//just to check if it works output lineArr[index] as below
return lineArr;
}
function intoWords(line) {
var wordsArr = line.split('" "');
return wordsArr;
}
To extract out readTextFile, download, intoArray, intoWords in its own class.
I have tried the following, as a new class, called myFileReader:
MyFileReader = function () {
}
MyFileReader.prototype = {
constructor: MyFileReader,
readTextFile: function (file, intoLines, intoWords) {
var rawFile = new XMLHttpRequest();
rawFile.open("GET", file, false);
rawFile.onreadystatechange = function () {
if (rawFile.readyState === 4) {
if (rawFile.status === 200 || rawFile.status == 0) {
allText = rawFile.responseText;
console.log('The complete text is', allText);
let lineArr = this.intoLines(allText);
let firstLineWords = this.intoWords(lineArr[0]);
let secondLineWords = this.intoWords(lineArr[1]);
console.log('Our first line is: ', lineArr[0]);
let atlas = {};
for (let i = 0; i < firstLineWords.length; i++) {
console.log(`Our ${i} word in the first line is : ${firstLineWords[i]}`);
console.log(`Our ${i} word in the SECOND line is : ${secondLineWords[i]}`);
atlas[firstLineWords[i]] = secondLineWords[i];
}
console.log('The atlas is: ', atlas);
let atlasJson = JSON.stringify(atlas);
console.log('Atlas as json is: ', atlasJson);
this.download(atlasJson, 'atlasJson.txt', 'text/plain');
}
}
};
rawFile.send(null);
},
download: function (text, name, type) {
var a = document.getElementById("a");
var file = new Blob([text], {type: type});
a.href = URL.createObjectURL(file);
a.download = name;
},
intoLines: function (text) {
// splitting all text data into array "\n" is splitting data from each new line
//and saving each new line as each element*
var lineArr = text.split('\n');
//just to check if it works output lineArr[index] as below
return lineArr;
},
intoWords: function (lines) {
var wordsArr = lines.split('" "');
return wordsArr;
},
};
And I am using it from logic.js as:
myFileReader = new MyFileReader();
myFileReader.readTextFile("columna01-es-latin1.txt");
Here is the question:
1. Why do we get:
TypeError: this.intoLines is not a function
Which is refered to the line:
let lineArr = this.intoLines(allText);
In addition, to solve this error I have also tried to pass in the function from the object as:
logic.js:
myFileReader = new MyFileReader();
myFileReader.readTextFile("columna01-es-latin1.txt", myFileReader.intoLines(), myFileReader.intoWords());
And in our class, myFileReadder.js, inside readtTextFile we put:
let lineArr = intoLines(allText);
let firstLineWords = intoWords(lineArr[0]);
let secondLineWords = intoWords(lineArr[1]);
And our web console tells us:
TypeError: text is undefined
I understand that text is undefined because in logic.js we are not providing it:
myFileReader.readTextFile("columna01-es-latin1.txt", myFileReader.intoLines(), myFileReader.intoWords());
However I think we can not provide it, because we are in fact requesting to myFileReader that reads all the text from the local file and pass it in to intoLines() method.
2. Why does it happen, and how could we solve it?
Thank you for your help!
EDIT:
The final code after #Bergi suggestion is:
function readTextFile(file) {
var rawFile = new XMLHttpRequest();
rawFile.open("GET", file, false);
rawFile.onreadystatechange = function () {
if (rawFile.readyState === 4) {
if (rawFile.status === 200 || rawFile.status == 0) {
allText = rawFile.responseText;
console.log('The complete text is', allText);
let lineArr = intoLines(allText);
let firstLineWords = intoWords(lineArr[0]);
let secondLineWords = intoWords(lineArr[1]);
console.log('Our first line is: ', lineArr[0]);
let atlas = {};
for (let i = 0; i < firstLineWords.length; i++) {
console.log(`Our ${i} word in the first line is : ${firstLineWords[i]}`);
console.log(`Our ${i} word in the SECOND line is : ${secondLineWords[i]}`);
atlas[firstLineWords[i]] = secondLineWords[i];
}
console.log('The atlas is: ', atlas);
let atlasJson = JSON.stringify(atlas);
console.log('Atlas as json is: ', atlasJson);
download(atlasJson, 'atlasJson.txt', 'text/plain');
}
}
};
rawFile.send(null);
}
function download(text, name, type) {
var a = document.getElementById("a");
var file = new Blob([text], {type: type});
a.href = URL.createObjectURL(file);
a.download = name;
}
function intoLines(text) {
// splitting all text data into array "\n" is splitting data from each new line
//and saving each new line as each element*
var lineArr = text.split('\n');
//just to check if it works output lineArr[index] as below
return lineArr;
}
function intoWords(lines) {
var wordsArr = lines.split('" "');
return wordsArr;
}
I have also studied:
Prototype: 😣 This thread helped me a lot
How does JavaScript .prototype work?
How to download files to local storage
JavaScript: Create and save file
How we delete properties from an object
How do I remove a property from a JavaScript object?
Why does it happen, and how could we solve it?
See How to access the correct `this` inside a callback?.
I have tried the following, as a new class
There's your issue. There's no reason to use a class construct at all, you're not going to instantiate it and keep data on the objects.
Yes, it's a good idea to extract out readTextFile, download, intoArray, intoWords into its own module, in its own file. But there's no reason to use an (empty!) constructor and a prototype, just plain functions would do fine (and you also wouldn't have above problem with accessing instance methods).

Error in recording audio on android using getUserMedia "js"

This code below is from a webpage that makes audio recording. I have a problem that I can not solve already tried several ways and nothing ... The problem is the following when I squeeze the code on a desktop it works normal, the audio comes out clean the real problem happens when I do the same test in android "6.0 , 7.0 ", it manages to do the whole process but the audio comes out with a voice failing, already tried to correct more I can not. Maybe someone who already has a bigger experience can help me.
NOTE: I use "https" on the page.
var audioContext = null;
var context = null;
var volume = null;
var audioInput = null;
var recorder = null;
var recordingLength = 0;
var leftchannel = [];
var rightchannel = [];
var bufferSize = 16384;
var sampleRate = 8000;
var requestStreamReadPermission = {
audio: true,
video:false
};
$scope.canRecordAudio = false;
// -
navigator.getUserMedia = (navigator.getUserMedia ||
navigator.webkitGetUserMedia ||
navigator.mozGetUserMedia ||
navigator.msGetUserMedia);
// -
var writeUTFBytes = function(view, offset, string) {
var lng = string.length;
for (var i = 0; i < lng; i++) {
view.setUint8(offset + i, string.charCodeAt(i));
}
};
// -
var interleave = function(leftChannel, rightChannel) {
var length = leftChannel.length + rightChannel.length;
var result = new Float32Array(length);
var inputIndex = 0;
for (var index = 0; index < length;) {
result[index++] = leftChannel[inputIndex];
result[index++] = rightChannel[inputIndex];
inputIndex++;
}
return result;
};
// -
var mergeBuffers = function(channelBuffer, recordingLength) {
var result = new Float32Array(recordingLength);
var offset = 0;
var lng = channelBuffer.length;
for (var i = 0; i < lng; i++) {
var buffer = channelBuffer[i];
result.set(buffer, offset);
offset += buffer.length;
}
return result;
};
// -
var errorGetUserMedia = function(error) {
console.log(error);
GeneralMessageService.clean();
GeneralMessageService.addMessage($filter('translate')('chat.mobile.checkorwaitforconnection'));
};
// -
var successGetUserMedia = function(stream) {
audioContext = window.AudioContext || window.webkitAudioContext;
context = new audioContext();
// recupera a taxa de amostragem atual a ser usada para a embalagem WAV
// sampleRate = context.sampleRate;
volume = context.createGain();
audioInput = context.createMediaStreamSource(stream);
audioInput.connect(volume);
//recorder = context.createScriptProcessor(bufferSize, 2, 2);
var numberOfInputChannels = 2;
var numberOfOutputChannels = 2;
if (context.createScriptProcessor) {
recorder = context.createScriptProcessor(bufferSize, numberOfInputChannels, numberOfOutputChannels);
} else {
recorder = context.createJavaScriptNode(bufferSize, numberOfInputChannels, numberOfOutputChannels);
}
recorder.onaudioprocess = function(stream) {
if (!$scope.recordingAudio) {
return;
}
var left = stream.inputBuffer.getChannelData(0);
//var right = stream.inputBuffer.getChannelData(1);
leftchannel.push(new Float32Array(left));
//rightchannel.push(new Float32Array(right));
recordingLength += bufferSize;
};
volume.connect(recorder);
recorder.connect(context.destination);
$scope.canRecordAudio = true;
};
// -
if (!!navigator.getUserMedia) {
navigator.getUserMedia(
requestStreamReadPermission,
successGetUserMedia,
errorGetUserMedia
);
} else {
errorGetUserMedia('UserMedia is empty');
}
// -
var startRecording = function() {
leftchannel.length = rightchannel.length = 0;
recordingLength = 0;
};
// -
var stopRecording = function() {
var leftBuffer = mergeBuffers(leftchannel, recordingLength);
//var rightBuffer = mergeBuffers(rightchannel, recordingLength);
//var interleaved = interleave(leftBuffer, rightBuffer);
var buffer = new ArrayBuffer(44 + leftBuffer.length * 2);
var view = new DataView(buffer);
writeUTFBytes(view, 0, 'RIFF');
view.setUint32(4, 44 + leftBuffer.length * 2, true);
writeUTFBytes(view, 8, 'WAVE');
writeUTFBytes(view, 12, 'fmt ');
view.setUint32(16, 16, true);
view.setUint16(20, 1, true);
//MONO
view.setUint16(22, 1, true);
view.setUint32(24, sampleRate, true);
view.setUint32(28, sampleRate * 2, true);
view.setUint16(32, 2, true);
view.setUint16(34, 16, true);
//stereo
/*view.setUint16(22, 2, true);
view.setUint32(24, sampleRate, true);
view.setUint32(28, sampleRate * 4, true);
view.setUint16(32, 4, true);
view.setUint16(34, 16, true);*/
//end
writeUTFBytes(view, 36, 'data');
view.setUint32(40, leftBuffer.length * 2, true);
var lng = leftBuffer.length;
var index = 44;
var volume = 1;
for (var i = 0; i < lng; i++) {
var s = Math.max(-1, Math.min(1, leftBuffer[i]));
view.setInt16(index, s < 0 ? s * 0x8000 : s * 0x7FFF, true);
index += 2;
}
var blob = new Blob([view], {
type: 'audio/wav'
});
var url = (window.URL || window.webkitURL).createObjectURL(blob);
addAudioMessage(url);
//clear();
};

Setting up Web Audio, multiple sounds, seperate gainNodes and global LOWPASS filter?

I am trying to set up some web audio to load/play multiple sound sources simultaneously.
The sounds are being loaded for now and play is triggered through a button input.
My problem is, I want all the sounds to run through one BiquadFilter (in this case type:0; // LOWPASS filter).
I believe I have created the filter right (in two different places, not sure which look at the attached code) but I cannot get a range input to control the frequencies, something isn't communicating very well and im lost completely.
Also, around the same topic, I want each individual sound to run through their own independent gainNodes (volume controls), this again will be changed via a range input.
Basically there will be 6 audio files, running through their own gainNodes and then coming together to go through a LOWPASS filter before the destination (i.e. the speakers).
Im hopefully going to run through individual pannerNodes but currently facing the chance of giving up with the project all together.
Below is my code (like i said before, the button is triggering all the sounds but but the filter is a BIG problem):
HTML:
<body>
<div id="startbtn">
<p><input type="button" onClick="tracks.toggle();">PLAY!</p>
</div> <!-- startbtn div -->
<div id="frequency">
<p><input type="range" id="freq1" min="0" max="1" step="0.01" value="1" onchange="sound.changeFrequency(this);" style="width:180px; background-color:#FFF;"> Frequency</p>
</p>
</div>
<script>
var tracks = new SongTracks();
var sound = new playSound();
</script>
</body>
JAVASCRIPT:
var context = new webkitAudioContext();
var myAudioAnalyser;
function init() {
if('webkitAudioContext' in window) {
myAudioContext = new webkitAudioContext();
// an analyser is used for the spectrum
myAudioAnalyser = myAudioContext.createAnalyser();
myAudioAnalyser.smoothingTimeConstant = 0.85;
myAudioAnalyser.connect(myAudioContext.destination);
fetchSounds();
};
};
// shim layer with setTimeout fallback
window.requestAnimFrame = (function(){
return window.requestAnimationFrame ||
window.webkitRequestAnimationFrame ||
window.mozRequestAnimationFrame ||
window.oRequestAnimationFrame ||
window.msRequestAnimationFrame ||
function( callback ){
window.setTimeout(callback, 1000 / 60);
};
})();
function playSound(buffer, time) {
var source = context.createBufferSource();
source.buffer = buffer;
var filter = context.createBiquadFilter(); ///////////////// HERE
filter.type = filter.LOWPASS;
filter.frequency.value = 5000;
source.connect(filter);
filter.connect(context.destination);
source.start(time);
this.filter = filter;
};
function loadSounds(obj, soundMap, callback) {
var names = []
var paths = []
for (var name in soundMap) {
var path = soundMap[name];
names.push(name);
paths.push(path);
}
bufferLoader = new BufferLoader(context, paths, function(bufferList) {
for (var i = 0; i < bufferList.length; i++) {
var buffer = bufferList[i];
var name = names[i];
obj[name] = buffer;
}
if (callback) {
callback();
}
});
bufferLoader.load();
};
function BufferLoader(context, urlList, callback) {
this.context = context;
this.urlList = urlList;
this.onload = callback;
this.bufferList = new Array();
this.loadCount = 0;
}
BufferLoader.prototype.loadBuffer = function(url, index) {
// Load buffer asynchronously
var request = new XMLHttpRequest();
request.open("GET", url, true);
request.responseType = "arraybuffer";
var loader = this;
request.onload = function() {
// Asynchronously decode the audio file data in request.response
loader.context.decodeAudioData(
request.response,
function(buffer) {
if (!buffer) {
alert('error decoding file data: ' + url);
return;
}
loader.bufferList[index] = buffer;
if (++loader.loadCount == loader.urlList.length)
loader.onload(loader.bufferList);
},
function(error) {
console.error('decodeAudioData error', error);
}
);
}
request.onerror = function() {
alert('BufferLoader: XHR error');
}
request.send();
};
BufferLoader.prototype.load = function() {
for (var i = 0; i < this.urlList.length; ++i)
this.loadBuffer(this.urlList[i], i);
};
var SongTracks = function() {
loadSounds(this, {
vocals: 'tracks/vocals.mp3',
guitar: 'tracks/guitar.mp3',
piano: 'tracks/piano.mp3'
});
};
var filter;
SongTracks.prototype.play = function() {
playSound(this.vocals, 0);
playSound(this.guitar, 0);
playSound(this.piano, 0);
///////////////////////////////////////////////////////////// OR HERE
var source1 = context.createBufferSource();
source1.buffer = this.buffer
source1 = bufferList[0];
var filter = context.createBiquadFilter();
filter.type = filter.LOWPASS;
filter.frequency.value = 5000;
source1.connect(filter);
filter.connect(context.destination);
this.filter = filter;
///////////////////////////////////////////////////////////////////// TO HERE?
};
SongTracks.prototype.stop = function() {
this.source.stop(0);
};
SongTracks.prototype.toggle = function() {
this.isPlaying ? this.stop() : this.play();
this.isPlaying = !this.isPlaying;
};
/* SongTracks.prototype.changeFrequency = function(element) {
var minValue = 40;
var maxValue = context.sampleRate / 2;
var numberOfOctaves = Math.log(maxValue / minValue) / Math.LN2;
var multiplier = Math.pow(2, numberOfOctaves * (element.value - 1.0));
this.filter.frequency.value = maxValue * multiplier;
}; */
playSound.prototype.changeFrequency = function(element) {
var minValue = 40;
var maxValue = context.sampleRate / 2;
var numberOfOctaves = Math.log(maxValue / minValue) / Math.LN2;
var multiplier = Math.pow(2, numberOfOctaves * (element.value - 1.0));
this.filter.frequency.value = maxValue * multiplier;
};
</script>
As you can see, through my notes etc, im very confused and kind of hit a brick wall.
I've seen code which differentiates the audio files, something like;
var source1 = context.createBufferSource();
var source2 = context.createBufferSource();
var source3 = context.createBufferSource();
var source4 = context.createBufferSource();
source1.buffer = bufferList[0];
source2.buffer = bufferList[1];
source3.buffer = bufferList[2];
source4.buffer = bufferList[3];
But i have no idea, good luck.
You should probably simply pass the node to connect to into playSound, and then pass it the FilterNode.
Inside your playSound is the wrong place to create the BiquadFilter - you'll end up creating N of them, one for each playing sound, and you only want one.
You want something like:
HTML file the same, except:
<input type="range" id="freq1" min="0" max="1" step="0.01" value="1" onchange="changeFilterFrequency(this);" style="width:180px; background-color:#FFF;"> Frequency</p>
JS:
function playSound(buffer, outputNode, time) {
var source = context.createBufferSource();
source.buffer = buffer;
source.connect(outputNode);
source.start(time);
}
var globalFilter = null; // one global filter
SongTracks.prototype.play = function() {
var globalFilter = context.createBiquadFilter();
globalFilter.type = globalFilter.LOWPASS;
globalFilter.frequency.value = 5000;
globalFilter.connect(context.destination);
playSound(this.vocals, globalFilter, 0);
playSound(this.guitar, globalFilter, 0);
playSound(this.piano, globalFilter, 0);
};
function changeFilterFrequency(element) {
var minValue = 40;
var maxValue = context.sampleRate / 2;
var numberOfOctaves = Math.log(maxValue / minValue) / Math.LN2;
var multiplier = Math.pow(2, numberOfOctaves * (element.value - 1.0));
globalFilter.frequency.value = maxValue * multiplier;
}

How can I manipulate the DOM with the web audio API/javascript?

I'm new to programming and I'm messing around with the Web Audio API. Right now, I have three samples (kick, clap, hihat) that make up a simple drumkit beat when a Play button is pressed.
I want to be able to illustrate this visually on the front end as a sequencer that plays through this drumkit. For instance, every time the "kick.wav" sample is played, I want to change the color of a div that is associated with it.
My questions are:
How do I associate every time a kick, clap or hihat are played with a div in the html?
How can I add this association to sequence through when the play button is hit?
HTML:
<body>
<div id="container">
<canvas id="canvas"></canvas>
</div>
<button id="play">play</button>
<script src="javascript/tween.js"></script>
<script src="javascript/shared.js"></script>
<script src="javascript/seq.js"></script>
</body>
Javascript:
function playSound(buffer, time) {
var source = context.createBufferSource();
source.buffer = buffer;
source.connect(context.destination);
source.start(time);
}
function loadSounds(obj, soundMap, callback) {
// Array-ify
var names = [];
var paths = [];
for (var name in soundMap) {
var path = soundMap[name];
names.push(name);
paths.push(path);
}
bufferLoader = new BufferLoader(context, paths, function(bufferList) {
for (var i = 0; i < bufferList.length; i++) {
var buffer = bufferList[i];
var name = names[i];
obj[name] = buffer;
}
if (callback) {
callback();
}
});
bufferLoader.load();
}
function BufferLoader(context, urlList, callback) {
this.context = context;
this.urlList = urlList;
this.onload = callback;
this.bufferList = new Array();
this.loadCount = 0;
}
BufferLoader.prototype.loadBuffer = function(url, index) {
// Load buffer asynchronously
var request = new XMLHttpRequest();
request.open("GET", url, true);
request.responseType = "arraybuffer";
var loader = this;
request.onload = function() {
// Asynchronously decode the audio file data in request.response
loader.context.decodeAudioData(
request.response,
function(buffer) {
if (!buffer) {
alert('error decoding file data: ' + url);
return;
}
loader.bufferList[index] = buffer;
if (++loader.loadCount == loader.urlList.length)
loader.onload(loader.bufferList);
},
function(error) {
console.error('decodeAudioData error', error);
}
);
}
request.onerror = function() {
alert('BufferLoader: XHR error');
}
request.send();
};
BufferLoader.prototype.load = function() {
for (var i = 0; i < this.urlList.length; ++i)
this.loadBuffer(this.urlList[i], i);
};
var RhythmSample = function() {
loadSounds(this, {
kick: 'sounds/kick.wav',
claps: 'sounds/claps.wav',
hihat: 'sounds/hihat.wav'
});
};
RhythmSample.prototype.play = function() {
// We'll start playing the rhythm 100 milliseconds from "now"
var startTime = context.currentTime + 0.100;
var tempo = 120; // BPM (beats per minute)
var eighthNoteTime = (60 / tempo) / 2;
var allDivs = document.getElementsByName('colorchangingdivs[]');
// Play 2 bars of the following:
for (var bar = 0; bar < 2; bar++) {
var time = startTime + bar * 8 * eighthNoteTime;
// Play the bass (kick) drum on beats 1, 5
playSound(this.kick, time);
playSound(this.kick, time + 4 * eighthNoteTime);
console.log("4")
// Play the snare drum on beats 3, 7
playSound(this.claps, time + 2 * eighthNoteTime);
playSound(this.claps, time + 6 * eighthNoteTime);
// Play the hi-hat every eighthh note.
for (var i = 0; i < 8; ++i) {
playSound(this.hihat, time + i * eighthNoteTime);
}
}
};
var sample = new RhythmSample();
document.querySelector('#play').addEventListener('click', function() {
sample.play();
});
THANKS SO MUCH!
Add another parameter to playSound() that matches the div id of the one you want to color. And logic to change the color when playing the sound, selecting the div by the id you've passed in.
function playSound(buffer, time, colorID) {
var source = context.createBufferSource();
source.buffer = buffer;
source.connect(context.destination);
source.start(time);
document.getElementById(colorID).backgroundColor = blue; //or hex color/rgb value
}
Then you just have to add the right parameter when you call playSound.
playSound(this.kick, time + 4 * eighthNoteTime,"your-kick-div-id");
If you need different colors for different sounds/divs, then just add an if/elseif statement in the background color setting.

Categories