Attempting to merge Record.js and driveApi - javascript

I have this record.js script to toggle a recording that is currently working as expected.
function Record_Current(config) {
config = config || {};
var self = this;
var audioInput;
var audioNode;
var bufferSize = config.bufferSize || 4096;
var recordedData = [];
var recording = false;
var recordingLength = 0;
var startDate;
var audioCtx;
this.toggleRecording = function() {
if (recording) {
self.stop();
} else {
self.start();
}
};
this.start = function() {
// reset any previous data
recordedData = [];
recordingLength = 0;
// webkit audio context shim
audioCtx = new(window.AudioContext || window.webkitAudioContext)();
if (audioCtx.createJavaScriptNode) {
audioNode = audioCtx.createJavaScriptNode(bufferSize, 1, 1);
} else if (audioCtx.createScriptProcessor) {
audioNode = audioCtx.createScriptProcessor(bufferSize, 1, 1);
} else {
throw 'WebAudio not supported!';
}
audioNode.connect(audioCtx.destination);
navigator.mediaDevices.getUserMedia({ audio: true })
.then(onMicrophoneCaptured)
.catch(onMicrophoneError);
};
this.stop = function() {
stopRecording(function(blob) {
self.blob = blob;
config.onRecordingStop && config.onRecordingStop(blob);
});
};
this.upload = function(url, params, callback) {
var formData = new FormData();
formData.append("audio", self.blob, config.filename || 'recording.wav');
for (var i in params)
formData.append(i, params[i]);
var request = new XMLHttpRequest();
request.upload.addEventListener("progress", function(e) {
callback('progress', e, request);
});
request.upload.addEventListener("load", function(e) {
callback('load', e, request);
});
request.onreadystatechange = function(e) {
var status = 'loading';
if (request.readyState === 4) {
status = request.status === 200 ? 'done' : 'error';
}
callback(status, e, request);
};
request.open("POST", url);
request.send(formData);
};
function stopRecording(callback) {
// stop recording
recording = false;
// to make sure onaudioprocess stops firing
window.localStream.getTracks().forEach((track) => { track.stop(); });
audioInput.disconnect();
audioNode.disconnect();
exportWav({
sampleRate: sampleRate,
recordingLength: recordingLength,
data: recordedData
}, function(buffer, view) {
self.blob = new Blob([view], { type: 'audio/wav' });
callback && callback(self.blob);
});
}
function onMicrophoneCaptured(microphone) {
if (config.visualizer)
visualize(microphone);
// save the stream so we can disconnect it when we're done
window.localStream = microphone;
audioInput = audioCtx.createMediaStreamSource(microphone);
audioInput.connect(audioNode);
audioNode.onaudioprocess = onAudioProcess;
recording = true;
self.startDate = new Date();
config.onRecordingStart && config.onRecordingStart();
sampleRate = audioCtx.sampleRate;
}
function onMicrophoneError(e) {
console.log(e);
alert('Unable to access the microphone.');
}
function onAudioProcess(e) {
if (!recording) {
return;
}
recordedData.push(new Float32Array(e.inputBuffer.getChannelData(0)));
recordingLength += bufferSize;
self.recordingLength = recordingLength;
self.duration = new Date().getTime() - self.startDate.getTime();
config.onRecording && config.onRecording(self.duration);
}
function visualize(stream) {
var canvas = config.visualizer.element;
if (!canvas)
return;
var canvasCtx = canvas.getContext("2d");
var source = audioCtx.createMediaStreamSource(stream);
var analyser = audioCtx.createAnalyser();
analyser.fftSize = 2048;
var bufferLength = analyser.frequencyBinCount;
var dataArray = new Uint8Array(bufferLength);
source.connect(analyser);
function draw() {
// get the canvas dimensions
var width = canvas.width,
height = canvas.height;
// ask the browser to schedule a redraw before the next repaint
requestAnimationFrame(draw);
// clear the canvas
canvasCtx.fillStyle = config.visualizer.backcolor || '#fff';
canvasCtx.fillRect(0, 0, width, height);
if (!recording)
return;
canvasCtx.lineWidth = config.visualizer.linewidth || 2;
canvasCtx.strokeStyle = config.visualizer.forecolor || '#f00';
canvasCtx.beginPath();
var sliceWidth = width * 1.0 / bufferLength;
var x = 0;
analyser.getByteTimeDomainData(dataArray);
for (var i = 0; i < bufferLength; i++) {
var v = dataArray[i] / 128.0;
var y = v * height / 2;
i === 0 ? canvasCtx.moveTo(x, y) : canvasCtx.lineTo(x, y);
x += sliceWidth;
}
canvasCtx.lineTo(canvas.width, canvas.height / 2);
canvasCtx.stroke();
}
draw();
}
function exportWav(config, callback) {
function inlineWebWorker(config, cb) {
var data = config.data.slice(0);
var sampleRate = config.sampleRate;
data = joinBuffers(data, config.recordingLength);
console.log(data);
function joinBuffers(channelBuffer, count) {
var result = new Float64Array(count);
var offset = 0;
var lng = channelBuffer.length;
for (var i = 0; i < lng; i++) {
var buffer = channelBuffer[i];
result.set(buffer, offset);
offset += buffer.length;
}
return result;
}
function writeUTFBytes(view, offset, string) {
var lng = string.length;
for (var i = 0; i < lng; i++) {
view.setUint8(offset + i, string.charCodeAt(i));
}
}
var dataLength = data.length;
// create wav file
var buffer = new ArrayBuffer(44 + dataLength * 2);
var view = new DataView(buffer);
writeUTFBytes(view, 0, 'RIFF'); // RIFF chunk descriptor/identifier
view.setUint32(4, 44 + dataLength * 2, true); // RIFF chunk length
writeUTFBytes(view, 8, 'WAVE'); // RIFF type
writeUTFBytes(view, 12, 'fmt '); // format chunk identifier, FMT sub-chunk
view.setUint32(16, 16, true); // format chunk length
view.setUint16(20, 1, true); // sample format (raw)
view.setUint16(22, 1, true); // mono (1 channel)
view.setUint32(24, sampleRate, true); // sample rate
view.setUint32(28, sampleRate * 2, true); // byte rate (sample rate * block align)
view.setUint16(32, 2, true); // block align (channel count * bytes per sample)
view.setUint16(34, 16, true); // bits per sample
writeUTFBytes(view, 36, 'data'); // data sub-chunk identifier
view.setUint32(40, dataLength * 2, true); // data chunk length
// write the PCM samples
var index = 44;
for (var i = 0; i < dataLength; i++) {
view.setInt16(index, data[i] * 0x7FFF, true);
index += 2;
}
if (cb) {
return cb({
buffer: buffer,
view: view
});
}
postMessage({
buffer: buffer,
view: view
});
}
var webWorker = processInWebWorker(inlineWebWorker);
webWorker.onmessage = function(event) {
callback(event.data.buffer, event.data.view);
// release memory
URL.revokeObjectURL(webWorker.workerURL);
};
webWorker.postMessage(config);
}
function processInWebWorker(_function) {
var workerURL = URL.createObjectURL(new Blob([_function.toString(),
';this.onmessage = function (e) {' + _function.name + '(e.data);}'
], {
type: 'application/javascript'
}));
var worker = new Worker(workerURL);
worker.workerURL = workerURL;
console.log(worker);
return worker;
}
function renderRecording(workerURL, list) {
const worker_url = URL.createObjectURL(workerURL);
const li = document.createElement('li');
const audio = document.createElement('audio');
const anchor = document.createElement('a');
anchor.setAttribute('href', workerURL);
const now = new Date();
anchor.setAttribute(
'download',
`recording-${now.getFullYear()}-${(now.getMonth() + 1).toString().padStart(2, '0')}-${now.getDay().toString().padStart(2, '0')}--${now.getHours().toString().padStart(2, '0')}-${now.getMinutes().toString().padStart(2, '0')}-${now.getSeconds().toString().padStart(2, '0')}.webm`
);
anchor.innerText = 'Download';
audio.setAttribute('src', worker_url);
audio.setAttribute('controls', 'controls');
li.appendChild(audio);
li.appendChild(anchor);
list.appendChild(li);
}
}
and this Google Drive Script
var driveLink = require('stream');
module.exports.uploadFile = function(req) {
var file;
console.log("driveApi upload reached")
function blobToFile(req) {
file = req.body.blob
file.lastModifiedDate = new Date();
file.name = req.body.word;
return file;
}
var bufStream = new stream.PassThrough();
bufStream.end(file);
console.log(typeof 42);
var folderId = "Folder"; // Enter Folder Name
var fileMetadata = {
"name": req.body.word,
parents: [folderId]
}
var media = {
mimeType: "audio/mp3",
body: bufStream
}
drive.files.create({
auth: jwToken,
resource: fileMetadata,
media: media,
fields: "id"
}, function(err, file) {
if (err) {
console.error(err);
} else {
console.log("File Id: ", file.id);
}
console.log("driveApi upload accomplished")
});
}
I have tried a few different approaches to combining the two to make it so it automatically saves the .wav file to the Drive API but it does not work.
I believe I am either not merging the two scripts together the right way or I am missing something. Do I need to use V3 of the Google API?
If anyone could provide some guidance on how to merge the two properly that would be greatly appreciated. Thank you!

Related

Recording audio from mic to .wav file using Web Audio API and ScriptProcessorNode

I want to record audio from mic into .wav format using Web Audio api and ScriptProcessorNode.
I tried to create such recording with the help of
Use JavaScript to record audio as .wav in Chrome
https://web.dev/media-recording-audio/
https://aws.amazon.com/blogs/machine-learning/capturing-voice-input-in-a-browser/
Here is my code
let button = document.createElement('button')
button.innerHTML = 'Start'
button.addEventListener('click', startRecording)
document.body.appendChild(button)
let listening = false;
let numChannels = 1;
let recBuffers = [[], []];
let recLength = 0;
let maxTime = 5000;
let context
function startRecording() {
const handleSuccess = function (stream) {
let audioContext = new AudioContext({
// latencyHint: 'interactive',
sampleRate: 44100,
});
const source = audioContext.createMediaStreamSource(stream);
context = source.context;
const node = (context.createScriptProcessor || context.createJavaScriptNode).call(context, 4096, numChannels, numChannels);
source.connect(node);
node.connect(audioContext.destination);
node.onaudioprocess = function (e) {
if (!listening) return;
for (var i = 0; i < numChannels; i++) {
recBuffers[i].push([...e.inputBuffer.getChannelData(i)]);
}
recLength += recBuffers[0][0].length;
};
};
navigator.mediaDevices.getUserMedia({ audio: true, video: false })
.then(handleSuccess);
}
function beginRecording() {
recBuffers = [[], []];
recLength = 0;
listening = true;
timeout = setTimeout(() => {
endRecording();
}, maxTime);
}
function endRecording() {
clearTimeout(timeout);
timeout = null;
let alok = exportWAV();
console.log('alok', alok)
let link = document.createElement('a')
link.href = window.URL.createObjectURL(alok)
link.download = 'alok.wav'
link.click()
}
function mergeBuffers(buffers, len) {
let result = new Float32Array(len);
let offset = 0;
for (var i = 0; i < buffers.length; i++) {
result.set(buffers[i], offset);
offset += buffers[i].length;
}
return result;
}
function interleave(inputL, inputR) {
let len = inputL.length + inputR.length;
let result = new Float32Array(len);
let index = 0;
let inputIndex = 0;
while (index < len) {
result[index++] = inputL[inputIndex];
result[index++] = inputR[inputIndex];
inputIndex++;
}
return result;
}
function exportWAV() {
let buffers = [];
for (var i = 0; i < numChannels; i++) {
buffers.push(mergeBuffers(recBuffers[i], recLength));
}
let interleaved = numChannels == 2 ? interleave(buffers[0], buffers[1]) : buffers[0];
let dataView = encodeWAV(interleaved);
let blob = new Blob([dataView], { type: 'audio/wav' });
blob.name = Math.floor((new Date()).getTime() / 1000) + '.wav';
listening = false;
return blob;
}
function floatTo16BitPCM(output, offset, input) {
for (var i = 0; i < input.length; i++, offset += 2) {
var s = Math.max(-1, Math.min(1, input[i]));
output.setInt16(offset, s < 0 ? s * 0x8000 : s * 0x7FFF, true);
}
}
function writeString(view, offset, string) {
for (var i = 0; i < string.length; i++) {
view.setUint8(offset + i, string.charCodeAt(i));
}
}
function encodeWAV(samples) {
var buffer = new ArrayBuffer(44 + samples.length * 2);
var view = new DataView(buffer);
/* RIFF identifier */
writeString(view, 0, 'RIFF');
/* file length */
view.setUint32(4, 36 + samples.length * 2, true);
/* RIFF type */
writeString(view, 8, 'WAVE');
/* format chunk identifier */
writeString(view, 12, 'fmt ');
/* format chunk length */
view.setUint32(16, 16, true);
/* sample format (raw) */
view.setUint16(20, 1, true);
/* channel count */
view.setUint16(22, numChannels, true);
/* sample rate */
view.setUint32(24, 44100 || context.sampleRate, true);
/* byte rate (sample rate * block align) */
view.setUint32(28, 44100 || context.sampleRate * 4, true);
/* block align (channel count * bytes per sample) */
view.setUint16(32, numChannels * 2, true);
/* bits per sample */
view.setUint16(34, 16, true);
/* data chunk identifier */
writeString(view, 36, 'data');
/* data chunk length */
view.setUint32(40, samples.length * 2, true);
floatTo16BitPCM(view, 44, samples);
return view;
}
beginRecording()
This code is working fine but generated audio is not playing. What correction is needed in this code to make correct wav data?

Error in recording audio on android using getUserMedia "js"

This code below is from a webpage that makes audio recording. I have a problem that I can not solve already tried several ways and nothing ... The problem is the following when I squeeze the code on a desktop it works normal, the audio comes out clean the real problem happens when I do the same test in android "6.0 , 7.0 ", it manages to do the whole process but the audio comes out with a voice failing, already tried to correct more I can not. Maybe someone who already has a bigger experience can help me.
NOTE: I use "https" on the page.
var audioContext = null;
var context = null;
var volume = null;
var audioInput = null;
var recorder = null;
var recordingLength = 0;
var leftchannel = [];
var rightchannel = [];
var bufferSize = 16384;
var sampleRate = 8000;
var requestStreamReadPermission = {
audio: true,
video:false
};
$scope.canRecordAudio = false;
// -
navigator.getUserMedia = (navigator.getUserMedia ||
navigator.webkitGetUserMedia ||
navigator.mozGetUserMedia ||
navigator.msGetUserMedia);
// -
var writeUTFBytes = function(view, offset, string) {
var lng = string.length;
for (var i = 0; i < lng; i++) {
view.setUint8(offset + i, string.charCodeAt(i));
}
};
// -
var interleave = function(leftChannel, rightChannel) {
var length = leftChannel.length + rightChannel.length;
var result = new Float32Array(length);
var inputIndex = 0;
for (var index = 0; index < length;) {
result[index++] = leftChannel[inputIndex];
result[index++] = rightChannel[inputIndex];
inputIndex++;
}
return result;
};
// -
var mergeBuffers = function(channelBuffer, recordingLength) {
var result = new Float32Array(recordingLength);
var offset = 0;
var lng = channelBuffer.length;
for (var i = 0; i < lng; i++) {
var buffer = channelBuffer[i];
result.set(buffer, offset);
offset += buffer.length;
}
return result;
};
// -
var errorGetUserMedia = function(error) {
console.log(error);
GeneralMessageService.clean();
GeneralMessageService.addMessage($filter('translate')('chat.mobile.checkorwaitforconnection'));
};
// -
var successGetUserMedia = function(stream) {
audioContext = window.AudioContext || window.webkitAudioContext;
context = new audioContext();
// recupera a taxa de amostragem atual a ser usada para a embalagem WAV
// sampleRate = context.sampleRate;
volume = context.createGain();
audioInput = context.createMediaStreamSource(stream);
audioInput.connect(volume);
//recorder = context.createScriptProcessor(bufferSize, 2, 2);
var numberOfInputChannels = 2;
var numberOfOutputChannels = 2;
if (context.createScriptProcessor) {
recorder = context.createScriptProcessor(bufferSize, numberOfInputChannels, numberOfOutputChannels);
} else {
recorder = context.createJavaScriptNode(bufferSize, numberOfInputChannels, numberOfOutputChannels);
}
recorder.onaudioprocess = function(stream) {
if (!$scope.recordingAudio) {
return;
}
var left = stream.inputBuffer.getChannelData(0);
//var right = stream.inputBuffer.getChannelData(1);
leftchannel.push(new Float32Array(left));
//rightchannel.push(new Float32Array(right));
recordingLength += bufferSize;
};
volume.connect(recorder);
recorder.connect(context.destination);
$scope.canRecordAudio = true;
};
// -
if (!!navigator.getUserMedia) {
navigator.getUserMedia(
requestStreamReadPermission,
successGetUserMedia,
errorGetUserMedia
);
} else {
errorGetUserMedia('UserMedia is empty');
}
// -
var startRecording = function() {
leftchannel.length = rightchannel.length = 0;
recordingLength = 0;
};
// -
var stopRecording = function() {
var leftBuffer = mergeBuffers(leftchannel, recordingLength);
//var rightBuffer = mergeBuffers(rightchannel, recordingLength);
//var interleaved = interleave(leftBuffer, rightBuffer);
var buffer = new ArrayBuffer(44 + leftBuffer.length * 2);
var view = new DataView(buffer);
writeUTFBytes(view, 0, 'RIFF');
view.setUint32(4, 44 + leftBuffer.length * 2, true);
writeUTFBytes(view, 8, 'WAVE');
writeUTFBytes(view, 12, 'fmt ');
view.setUint32(16, 16, true);
view.setUint16(20, 1, true);
//MONO
view.setUint16(22, 1, true);
view.setUint32(24, sampleRate, true);
view.setUint32(28, sampleRate * 2, true);
view.setUint16(32, 2, true);
view.setUint16(34, 16, true);
//stereo
/*view.setUint16(22, 2, true);
view.setUint32(24, sampleRate, true);
view.setUint32(28, sampleRate * 4, true);
view.setUint16(32, 4, true);
view.setUint16(34, 16, true);*/
//end
writeUTFBytes(view, 36, 'data');
view.setUint32(40, leftBuffer.length * 2, true);
var lng = leftBuffer.length;
var index = 44;
var volume = 1;
for (var i = 0; i < lng; i++) {
var s = Math.max(-1, Math.min(1, leftBuffer[i]));
view.setInt16(index, s < 0 ? s * 0x8000 : s * 0x7FFF, true);
index += 2;
}
var blob = new Blob([view], {
type: 'audio/wav'
});
var url = (window.URL || window.webkitURL).createObjectURL(blob);
addAudioMessage(url);
//clear();
};

Using three in ECMA Script 6 STL Viewer class

I am trying to convert a Three STL viewer demonstration into an ES6 Class which is then compiled with webpack 2 but i want it to open the file from URL rather than from an actual file input, ive managed to load the three components and most of the class is working but i think i am having problems in the scope with a few of the components, ive ran into a problem i dont seem to be able to get past when trying to use FileReader and then opening the scene and object. TypeError: Cannot read property 'remove' of undefined
threeDimensionalModels.js
// jshint esversion:6
import {
Scene, PerspectiveCamera, WebGLRenderer, Geometry, Mesh,
AmbientLight, DirectionalLight, MeshPhongMaterial, Vector3, Face3,
} from 'three';
var thisClass = null;
class threeDimensionalModels {
constructor(height,width,selector){
this.w = height;
this.h = width;
this.selector = selector;
this.renderer = new WebGLRenderer();
this.view = document.getElementById(this.selector);
this.camera = new PerspectiveCamera(45, this.w / this.h, 1, 1000);
this.scene = new Scene();
this.light1 = new DirectionalLight(0xffffff);
this.light2 = new DirectionalLight(0xffffff);
this.mat = new MeshPhongMaterial({ color: 0x339900, specular: 0x030303 });
this.obj = new Mesh(new Geometry(), this.mat);
this.renderer.setSize(this.w, this.h);
this.view.appendChild(this.renderer.domElement);
this.camera.position.set(0, 0, 50);
this.scene.add(new AmbientLight(0x666666));
this.light1.position.set(0, 100, 100);
this.scene.add(this.light1);
this.light2.position.set(0, -100, -100);
this.scene.add(this.light2);
this.scene.add(this.obj);
}
static binaryVector3(view, offset) {
var v = new Vector3();
v.x = view.getFloat32(offset + 0, true);
v.y = view.getFloat32(offset + 4, true);
v.z = view.getFloat32(offset + 8, true);
return v;
}
static loadBinaryStl(buffer) {
// binary STL
var view = new DataView(buffer);
var size = view.getUint32(80, true);
var geom = new Geometry();
var offset = 84;
for (var i = 0; i < size; i++) {
var normal = threeDimensionalModels.binaryVector3(view, offset);
geom.vertices.push(threeDimensionalModels.binaryVector3(view, offset + 12));
geom.vertices.push(threeDimensionalModels.binaryVector3(view, offset + 24));
geom.vertices.push(threeDimensionalModels.binaryVector3(view, offset + 36));
geom.faces.push(
new Face3(i * 3, i * 3 + 1, i * 3 + 2, normal));
offset += 4 * 3 * 4 + 2;
}
return geom;
}
static m2vec3(match) {
var v = new Vector3();
v.x = parseFloat(match[1]);
v.y = parseFloat(match[2]);
v.z = parseFloat(match[3]);
return v;
}
static toLines(array) {
var lines = [];
var h = 0;
for (var i = 0; i < array.length; i++) {
if (array[i] === 10) {
var line = String.fromCharCode.apply(
null, array.subarray(h, i));
lines.push(line);
h = i + 1;
}
}
lines.push(String.fromCharCode.apply(null, array.subarray(h)));
return lines;
}
static loadTextStl(buffer) {
var lines = threeDimensionalModels.toLines(new Uint8Array(buffer));
var index = 0;
var scan = function (regexp) {
while (lines[index].match(/^\s*$/)) index++;
var r = lines[index].match(regexp);
return r;
};
var scanOk = function (regexp) {
var r = scan(regexp);
if (!r) throw new Error(
"not text stl: " + regexp.toString() +
"=> (line " + (index - 1) + ")" +
"[" + lines[index-1] + "]");
index++;
return r;
};
var facetReg = /^\s*facet\s+normal\s+([^\s]+)\s+([^\s]+)\s+([^\s]+)/;
var vertexReg = /^\s*vertex\s+([^s]+)\s+([^\s]+)\s+([^\s]+)/;
var geom = new Geometry();
scanOk(/^\s*solid\s(.*)/);
while (!scan(/^\s*endsolid/)) {
var normal = scanOk(facetReg);
scanOk(/^\s*outer\s+loop/);
var v1 = scanOk(vertexReg);
var v2 = scanOk(vertexReg);
var v3 = scanOk(vertexReg);
scanOk(/\s*endloop/);
scanOk(/\s*endfacet/);
var base = geom.vertices.length;
geom.vertices.push(threeDimensionalModels.m2vec3(v1));
geom.vertices.push(threeDimensionalModels.m2vec3(v2));
geom.vertices.push(threeDimensionalModels.m2vec3(v3));
geom.faces.push(
new Face3(base, base + 1, base + 2, threeDimensionalModels.m2vec3(normal)));
}
return geom;
}
static loadStlModel(buffer) {
try {
return threeDimensionalModels.loadTextStl(buffer);
} catch (ex) {
return threeDimensionalModels.loadBinaryStl(buffer);
}
}
openStl(url) {
var blob = null;
var xhr = new XMLHttpRequest();
xhr.open("GET", url);
xhr.responseType = "blob";//force the HTTP response, response-type header to be blob
xhr.onload = function() {
blob = xhr.response;
thisClass = this;
var reader = new FileReader();
reader.addEventListener("load", function (ev) {
var buffer = ev.target.result;
var geom = threeDimensionalModels.loadStlModel(buffer);
threeDimensionalModels.scene.remove(thisClass.obj);
threeDimensionalModels.obj = new Mesh(geom, threeDimensionalModels.mat);
threeDimensionalModels.scene.add(threeDimensionalModels.obj);
}, false);
reader.readAsArrayBuffer(blob);
};
xhr.send();
}
}
export default threeDimensionalModels;
HTML
<div id="threedm-view"></div>
<script type="text/javascript">
window.addEventListener("load", function () {
"use strict";
var threedm = new threeDimensionalModels(800,800,'threedm-view');
var loop = function loop() {
requestAnimationFrame(loop);
threedm.obj.rotation.z += 0.05;
threedm.renderer.clear();
threedm.renderer.render(threedm.scene, threedm.camera);
};
loop();
threedm.openStl("/app/uploads/2017/07/Stator.stl");
});
</script>
after a bit more playing around, and looking at the problem area, i realised that i was applying this to the global variable where this in the context i wanted did not exist, i would like to find a better way of passing this into child functions inside ECMA class functions without using a global variable
update
class threeDimensionalModels {
// .... //
openStl(url) {
var blob = null;
var xhr = new XMLHttpRequest();
xhr.open("GET", url);
xhr.responseType = "blob";
thisClass = this; // Moved from inside xhr.onload
xhr.onload = function() {
blob = xhr.response;
var reader = new FileReader();
reader.addEventListener("load", function (ev) {
var buffer = ev.target.result;
var geom = threeDimensionalModels.loadStlModel(buffer);
thisClass.scene.remove(thisClass.obj);
thisClass.obj = new Mesh(geom, thisClass.mat);
thisClass.scene.add(thisClass.obj);
}, false);
reader.readAsArrayBuffer(blob);
};
xhr.send();
}
// ... //
}

Video and audio create with mp4 file

In this code create only Video on chrome, i want video with Audio with one mp4/webm file. is it possible in PHP?
please give me helpful solutions.
var mediaConstraints = { audio: true, video: true };
//var isFirefox = true;
document.querySelector('#start-recording').onclick = function() {
this.disabled = true;
navigator.getUserMedia(mediaConstraints, onMediaSuccess, onMediaError);
};
document.querySelector('#stop-recording').onclick = function() {
this.disabled = true;
mediaRecorder.stop();
};
var mediaRecorder;
function onMediaSuccess(stream) {
var video = document.createElement('video');
var videoWidth = document.getElementById('video-width').value || 320;
var videoHeight = document.getElementById('video-height').value || 240;
video = mergeProps(video, {
controls: true,
id:"preview",
width: videoWidth,
height: videoHeight,
src: URL.createObjectURL(stream)
});
video.play();
//videot='2000';
videosContainer.appendChild(video);
videosContainer.appendChild(document.createElement('hr'));
mediaRecorder = new MediaStreamRecorder(stream);
mediaRecorder.mimeType = 'video/mp4'; // this line is mandatory
mediaRecorder.videoWidth = videoWidth;
mediaRecorder.videoHeight = videoHeight;
mediaRecorder.ondataavailable = function(blob) {
//alert(blob.audio);
//alert(blob.video);
// var a = document.createElement('a');
// a.target = '_blank';
// a.innerHTML = 'Open Recorded Video No. ' + (index++) + ' (Size: ' + bytesToSize(blob.size) + ') Time Length: ' + getTimeLength(timeInterval);
// a.href = URL.createObjectURL(blob);
//videosContainer.appendChild(a);
// videosContainer.appendChild(document.createElement('hr'));
var facedeteval = document.getElementById('facedetectvalue').value;
if(facedeteval !='1')
{
//alert(facedeteval);
var testuserID = document.getElementById("tuid").value;
var capId =document.getElementById("capimg").value;
//alert(capId);
var fileType = 'video'; // or "audio"
var vname = document.getElementById('sect_id').value;
var res = vname;
fileName =res;
var fileName = res+(index++)+'.mp4'; // or "wav" or "ogg"
var formData = new FormData();
formData.append(fileType + '-filename', fileName);
formData.append(fileType + '-blob', blob);
formData.append('test_uid', testuserID);
formData.append('cap_id', capId);
function xhr(url, data, callback) {
var request = new XMLHttpRequest();
request.onreadystatechange = function () {
if (request.readyState == 4 && request.status == 200) {
callback(location.href + request.responseText);
}
};
request.open('POST', url);
request.send(data);
}
xhr('save.php', formData, function (fileURL) {
alert('video save');
stopflag = '0';
document.getElementById('facedetectvalue').value = '1';
});
}
};
var timeInterval = document.querySelector('#time-interval').value;
if(timeInterval) timeInterval = parseInt(timeInterval);
else timeInterval = 5 * 1000;
// get blob after specific time interval
mediaRecorder.start(timeInterval);
document.querySelector('#stop-recording').disabled = false;
}
function onMediaError(e) {
console.error('media error', e);
}
var videosContainer = document.getElementById('videos-container');
var index = 1;
// below function via: http://goo.gl/B3ae8c
function bytesToSize(bytes) {
var k = 1000;
var sizes = ['Bytes', 'KB', 'MB', 'GB', 'TB'];
if (bytes === 0) return '0 Bytes';
var i = parseInt(Math.floor(Math.log(bytes) / Math.log(k)),10);
return (bytes / Math.pow(k, i)).toPrecision(3) + ' ' + sizes[i];
}
// below function via: http://goo.gl/6QNDcI
function getTimeLength(milliseconds) {
var data = new Date(milliseconds);
return data.getUTCHours()+" hours, "+data.getUTCMinutes()+" minutes and "+data.getUTCSeconds()+" second(s)";
}
window.onbeforeunload = function() {
document.querySelector('#start-recording').disabled = false;
};

Setting up Web Audio, multiple sounds, seperate gainNodes and global LOWPASS filter?

I am trying to set up some web audio to load/play multiple sound sources simultaneously.
The sounds are being loaded for now and play is triggered through a button input.
My problem is, I want all the sounds to run through one BiquadFilter (in this case type:0; // LOWPASS filter).
I believe I have created the filter right (in two different places, not sure which look at the attached code) but I cannot get a range input to control the frequencies, something isn't communicating very well and im lost completely.
Also, around the same topic, I want each individual sound to run through their own independent gainNodes (volume controls), this again will be changed via a range input.
Basically there will be 6 audio files, running through their own gainNodes and then coming together to go through a LOWPASS filter before the destination (i.e. the speakers).
Im hopefully going to run through individual pannerNodes but currently facing the chance of giving up with the project all together.
Below is my code (like i said before, the button is triggering all the sounds but but the filter is a BIG problem):
HTML:
<body>
<div id="startbtn">
<p><input type="button" onClick="tracks.toggle();">PLAY!</p>
</div> <!-- startbtn div -->
<div id="frequency">
<p><input type="range" id="freq1" min="0" max="1" step="0.01" value="1" onchange="sound.changeFrequency(this);" style="width:180px; background-color:#FFF;"> Frequency</p>
</p>
</div>
<script>
var tracks = new SongTracks();
var sound = new playSound();
</script>
</body>
JAVASCRIPT:
var context = new webkitAudioContext();
var myAudioAnalyser;
function init() {
if('webkitAudioContext' in window) {
myAudioContext = new webkitAudioContext();
// an analyser is used for the spectrum
myAudioAnalyser = myAudioContext.createAnalyser();
myAudioAnalyser.smoothingTimeConstant = 0.85;
myAudioAnalyser.connect(myAudioContext.destination);
fetchSounds();
};
};
// shim layer with setTimeout fallback
window.requestAnimFrame = (function(){
return window.requestAnimationFrame ||
window.webkitRequestAnimationFrame ||
window.mozRequestAnimationFrame ||
window.oRequestAnimationFrame ||
window.msRequestAnimationFrame ||
function( callback ){
window.setTimeout(callback, 1000 / 60);
};
})();
function playSound(buffer, time) {
var source = context.createBufferSource();
source.buffer = buffer;
var filter = context.createBiquadFilter(); ///////////////// HERE
filter.type = filter.LOWPASS;
filter.frequency.value = 5000;
source.connect(filter);
filter.connect(context.destination);
source.start(time);
this.filter = filter;
};
function loadSounds(obj, soundMap, callback) {
var names = []
var paths = []
for (var name in soundMap) {
var path = soundMap[name];
names.push(name);
paths.push(path);
}
bufferLoader = new BufferLoader(context, paths, function(bufferList) {
for (var i = 0; i < bufferList.length; i++) {
var buffer = bufferList[i];
var name = names[i];
obj[name] = buffer;
}
if (callback) {
callback();
}
});
bufferLoader.load();
};
function BufferLoader(context, urlList, callback) {
this.context = context;
this.urlList = urlList;
this.onload = callback;
this.bufferList = new Array();
this.loadCount = 0;
}
BufferLoader.prototype.loadBuffer = function(url, index) {
// Load buffer asynchronously
var request = new XMLHttpRequest();
request.open("GET", url, true);
request.responseType = "arraybuffer";
var loader = this;
request.onload = function() {
// Asynchronously decode the audio file data in request.response
loader.context.decodeAudioData(
request.response,
function(buffer) {
if (!buffer) {
alert('error decoding file data: ' + url);
return;
}
loader.bufferList[index] = buffer;
if (++loader.loadCount == loader.urlList.length)
loader.onload(loader.bufferList);
},
function(error) {
console.error('decodeAudioData error', error);
}
);
}
request.onerror = function() {
alert('BufferLoader: XHR error');
}
request.send();
};
BufferLoader.prototype.load = function() {
for (var i = 0; i < this.urlList.length; ++i)
this.loadBuffer(this.urlList[i], i);
};
var SongTracks = function() {
loadSounds(this, {
vocals: 'tracks/vocals.mp3',
guitar: 'tracks/guitar.mp3',
piano: 'tracks/piano.mp3'
});
};
var filter;
SongTracks.prototype.play = function() {
playSound(this.vocals, 0);
playSound(this.guitar, 0);
playSound(this.piano, 0);
///////////////////////////////////////////////////////////// OR HERE
var source1 = context.createBufferSource();
source1.buffer = this.buffer
source1 = bufferList[0];
var filter = context.createBiquadFilter();
filter.type = filter.LOWPASS;
filter.frequency.value = 5000;
source1.connect(filter);
filter.connect(context.destination);
this.filter = filter;
///////////////////////////////////////////////////////////////////// TO HERE?
};
SongTracks.prototype.stop = function() {
this.source.stop(0);
};
SongTracks.prototype.toggle = function() {
this.isPlaying ? this.stop() : this.play();
this.isPlaying = !this.isPlaying;
};
/* SongTracks.prototype.changeFrequency = function(element) {
var minValue = 40;
var maxValue = context.sampleRate / 2;
var numberOfOctaves = Math.log(maxValue / minValue) / Math.LN2;
var multiplier = Math.pow(2, numberOfOctaves * (element.value - 1.0));
this.filter.frequency.value = maxValue * multiplier;
}; */
playSound.prototype.changeFrequency = function(element) {
var minValue = 40;
var maxValue = context.sampleRate / 2;
var numberOfOctaves = Math.log(maxValue / minValue) / Math.LN2;
var multiplier = Math.pow(2, numberOfOctaves * (element.value - 1.0));
this.filter.frequency.value = maxValue * multiplier;
};
</script>
As you can see, through my notes etc, im very confused and kind of hit a brick wall.
I've seen code which differentiates the audio files, something like;
var source1 = context.createBufferSource();
var source2 = context.createBufferSource();
var source3 = context.createBufferSource();
var source4 = context.createBufferSource();
source1.buffer = bufferList[0];
source2.buffer = bufferList[1];
source3.buffer = bufferList[2];
source4.buffer = bufferList[3];
But i have no idea, good luck.
You should probably simply pass the node to connect to into playSound, and then pass it the FilterNode.
Inside your playSound is the wrong place to create the BiquadFilter - you'll end up creating N of them, one for each playing sound, and you only want one.
You want something like:
HTML file the same, except:
<input type="range" id="freq1" min="0" max="1" step="0.01" value="1" onchange="changeFilterFrequency(this);" style="width:180px; background-color:#FFF;"> Frequency</p>
JS:
function playSound(buffer, outputNode, time) {
var source = context.createBufferSource();
source.buffer = buffer;
source.connect(outputNode);
source.start(time);
}
var globalFilter = null; // one global filter
SongTracks.prototype.play = function() {
var globalFilter = context.createBiquadFilter();
globalFilter.type = globalFilter.LOWPASS;
globalFilter.frequency.value = 5000;
globalFilter.connect(context.destination);
playSound(this.vocals, globalFilter, 0);
playSound(this.guitar, globalFilter, 0);
playSound(this.piano, globalFilter, 0);
};
function changeFilterFrequency(element) {
var minValue = 40;
var maxValue = context.sampleRate / 2;
var numberOfOctaves = Math.log(maxValue / minValue) / Math.LN2;
var multiplier = Math.pow(2, numberOfOctaves * (element.value - 1.0));
globalFilter.frequency.value = maxValue * multiplier;
}

Categories