Play PCM as it is being generated - javascript

I am generating some raw audio data in javascript and I need to play it as I am generating it. I searched for this here and the closest thing to what I am looking for is this. However, in the answer given there the array of data points is generated first and then the audio is played. I need to play it while generating it. Basically I am receiving some stream of some other data, processing it and generating the audio based on that. I need to play the audio corresponding to the data I am receiving as I am receiving it. (A simplified example is receiving the audio volume and frequency.)

If I'm getting the request correctly, then all you need is a ScriptProcessorNode.
You will feed it with your PCM data in the following way:
wait for its onaudioprocess event.
get the outputBuffer from the event which is an AudioBuffer.
loop through each channels of the outputBuffer (will return an Float32Array).
loop through all the samples of the outputBuffer's channels data.
set your own data
function makeSomeNoise() {
var ctx = new AudioContext();
var processor = ctx.createScriptProcessor(4096, 1, 1);
processor.onaudioprocess = function(evt) {
var outputBuffer = evt.outputBuffer;
// Loop through the output channels
for (var channel = 0; channel < outputBuffer.numberOfChannels; channel++) {
var outputData = outputBuffer.getChannelData(channel);
// Loop through the 4096 samples
for (var sample = 0; sample < outputBuffer.length; sample++) {
outputData[sample] = ((Math.random() * 2) - 1) * 0.5;
}
}
};
processor.connect(ctx.destination);
}
btn.onclick = function() {
if (confirm("That won't be really nice"))
makeSomeNoise();
}
<button id="btn">make some noise</button>

Related

Record internal audio of a website via javascript

i made this webapp to compose music, i wanted to add a feature to download the composition as .mp3/wav/whateverFileFormatPossible, i've been searching on how to do this for many times and always gave up as i couldn't find any examples on how to do it, only things i found were microphone recorders but i want to record the final audio destination of the website.
I play audio in this way:
const a_ctx = new(window.AudioContext || window.webkitAudioContext)()
function playAudio(buf){
const source = a_ctx.createBufferSource()
source.buffer = buf
source.playbackRate.value = pitchKey;
//Other code to modify the audio like adding reverb and changing volume
source.start(0)
}
where buf is the AudioBuffer.
To sum up, i want to record the whole window audio but can't come up with a way.
link to the whole website code on github
Maybe you could use the MediaStream Recording API (https://developer.mozilla.org/en-US/docs/Web/API/MediaStream_Recording_API):
The MediaStream Recording API, sometimes simply referred to as the Media Recording API or the MediaRecorder API, is closely affiliated with the Media Capture and Streams API and the WebRTC API. The MediaStream Recording API makes it possible to capture the data generated by a MediaStream or HTMLMediaElement object for analysis, processing, or saving to disk. It's also surprisingly easy to work with.
Also, you may take a look at this topic: new MediaRecorder(stream[, options]) stream can living modify?. It seems to discuss a related issue and might provide you with some insights.
The following code generates some random noise, applies some transform, plays it and creates an audio control, which allows the noise to be downloaded from the context menu via "Save audio as..." (I needed to change the extension of the saved file to .wav in order to play it.)
<html>
<head>
<script>
const context = new(window.AudioContext || window.webkitAudioContext)()
async function run()
{
var myArrayBuffer = context.createBuffer(2, context.sampleRate, context.sampleRate);
// Fill the buffer with white noise;
// just random values between -1.0 and 1.0
for (var channel = 0; channel < myArrayBuffer.numberOfChannels; channel++) {
// This gives us the actual array that contains the data
var nowBuffering = myArrayBuffer.getChannelData(channel);
for (var i = 0; i < myArrayBuffer.length; i++) {
// audio needs to be in [-1.0; 1.0]
nowBuffering[i] = Math.random() * 2 - 1;
}
}
playAudio(myArrayBuffer)
}
function playAudio(buf){
const streamNode = context.createMediaStreamDestination();
const stream = streamNode.stream;
const recorder = new MediaRecorder( stream );
const chunks = [];
recorder.ondataavailable = evt => chunks.push( evt.data );
recorder.onstop = evt => exportAudio( new Blob( chunks ) );
const source = context.createBufferSource()
source.onended = () => recorder.stop();
source.buffer = buf
source.playbackRate.value = 0.2
source.connect( streamNode );
source.connect(context.destination);
source.start(0)
recorder.start();
}
function exportAudio( blob ) {
const aud = new Audio( URL.createObjectURL( blob ) );
aud.controls = true;
document.body.prepend( aud );
}
</script>
</head>
<body onload="javascript:run()">
<input type="button" onclick="context.resume()" value="play"/>
</body>
</html>
Is this what you were looking for?

Struggling to playback a Float 32 Array (Web Audio API)

I'm building a simple looper, to help me come to an understanding of the Web Audio API however struggling to to get a buffer source to play back the recorded audio.
The code has been simplified as much as possible however with annotation it's still 70+ lines, ommitting the CSS and HTML, so apologies for that. A version including the CSS and HTML can be found on JSFiddle:
https://jsfiddle.net/b5w9j4yk/10/
Any help would be much appreciated. Thank you :)
// Aim of the code is to record the input from the mike to a float32 array. then prass that to a buffer which is linked to a buffer source, so the audio can be played back.
// Grab DOM Elements
const playButton = document.getElementById('play');
const recordButton = document.getElementById('record');
// If allowed access to microphone run this code
const promise = navigator.mediaDevices.getUserMedia({audio: true, video: false})
.then((stream) => {
recordButton.addEventListener('click', () => {
// when the record button is pressed clear enstanciate the record buffer
if (!recordArmed) {
recordArmed = true;
recordButton.classList.add('on');
console.log('recording armed')
recordBuffer = new Float32Array(audioCtx.sampleRate * 10);
}
else {
recordArmed = false;
recordButton.classList.remove('on');
// After the recording has stopped pass the recordBuffer the source's buffer
myArrayBuffer.copyToChannel(recordBuffer, 0);
//Looks like the buffer has been passed
console.log(myArrayBuffer.getChannelData(0));
}
});
// this should stat the playback of the source intended to be used adter the audio has been recorded, I can't get it to work in this given context
playButton.addEventListener('click', () => {
playButton.classList.add('on');
source.start();
});
//Transport variables
let recordArmed = false;
let playing = false;
// this buffer will later be assigned a Float 32 Array / I'd like to keep this intimediate buffer so the audio can be sliced and minipulated with ease later
let recordBuffer;
// Declear Context, input source and a block processor to pass the input sorce to the recordBuffer
const audioCtx = new AudioContext();
const audioIn = audioCtx.createMediaStreamSource(stream);
const processor = audioCtx.createScriptProcessor(512, 1, 1);
// Create a source and corrisponding buffer for playback and then assign link
const myArrayBuffer = audioCtx.createBuffer(1, audioCtx.sampleRate * 10, audioCtx.sampleRate);
const source = audioCtx.createBufferSource();
source.buffer = myArrayBuffer;
// Audio Routing
audioIn.connect(processor);
source.connect(audioCtx.destination);
// When recording is armed pass the samples of the block one at a time to the record buffer
processor.onaudioprocess = ((audioProcessingEvent) => {
let inputBuffer = audioProcessingEvent.inputBuffer;
let i = 0;
if (recordArmed) {
for (let channel = 0; channel < inputBuffer.numberOfChannels; channel++) {
let inputData = inputBuffer.getChannelData(channel);
let avg = 0;
inputData.forEach(sample => {
recordBuffer.set([sample], i);
i++;
});
}
}
else {
i = 0;
}
});
})

Play raw audio with JavaScript

I have a stream of numbers like this
-0.00015259254737998596,-0.00009155552842799158,0.00009155552842799158,0.00021362956633198035,0.0003662221137119663,0.0003967406231879635,0.00024414807580797754,0.00012207403790398877,0.00012207403790398877,0.00012207403790398877,0.0003357036042359691,0.0003357036042359691,0.00018311105685598315,0.00003051850947599719,0,-0.00012207403790398877,0.00006103701895199438,0.00027466658528397473,0.0003967406231879635,0.0003967406231879635,0.0003967406231879635,0.0003967406231879635,0.0003967406231879635,0.0003662221137119663,0.0004882961516159551,0.0004577776421399579,0.00027466658528397473,0.00003051850947599719,-0.00027466658528397473....
Which supposedly represent an audio stream. I got them from here and I've transmitted them over the web, now I'm trying to play the actual sound and I got a snippet from here but I'm getting Uncaught (in promise) DOMException: Unable to decode audio data
I feel like I'm missing quite a lot I just expect this to work like magic and it just could not be the case..
My code
var ws = new WebSocket("ws://....");
ws.onmessage = function (event) {
playByteArray(event.data);
}
var context = new AudioContext();
function playByteArray(byteArray) {
var arrayBuffer = new ArrayBuffer(byteArray.length);
var bufferView = new Uint8Array(arrayBuffer);
for (var i = 0; i < byteArray.length; i++) {
bufferView[i] = byteArray[i];
}
context.decodeAudioData(arrayBuffer, function (buffer) {
buf = buffer;
play();
});
}
// Play the loaded file
function play() {
// Create a source node from the buffer
var source = context.createBufferSource();
source.buffer = buf;
// Connect to the final output node (the speakers)
source.connect(context.destination);
// Play immediately
source.start(0);
}
And the broadcasting part
var ws = new WebSocket("ws://.....");
window.addEventListener("audioinput", function onAudioInput(evt) {
if (ws) {
ws.send(evt.data);
}
}, false);
audioinput.start({
bufferSize: 8192
});
It doesn't look like you're dealing with compatible audio data formats. The code you linked to is for playing byte arrays, in which case your audio data should be a (much longer) string of integer numbers from 0 to 255.
What you've got is a fairly short (as audio data goes) string of floating point numbers. I can't tell what audio format that's supposed to be, but it would require a different player.

Javascript: Audio from local file not playing without HTML "<audio>" element present

Now this one really has me stumped, so I was hoping to see if anyone could spot where I am doing things incorrectly.
So essentially, I have a page with two elements. One is an HTML5 file handler, and the second is a button. When the user selects a file I respond to the onchange event that is generated, decoding the audio and constructing a buffer to be used. I know there is the HTML5 audio tag, but this is going to be a utility that needs to be able to break up the file into manageable chunks.
I have done several tests and have found that the audio that I decode myself will only play after an audio element on the page has been played. I have absolutely no idea what could be causing this behavior, since I have studied several examples online on playing audio. I will include my audio engine below.
Just to clarify, everything is handled by this code.
Thank you.
"use strict";
var AudioFile = function (){
this.length = 0; // Number of samples
this.duration = 0; // Time in seconds
this.sampleRate = 0; // Sample rate
this.channels = 0; // Number of channels
this.data = []; //Audio/Waveform data
};
var audioCtx = null;
class AudioEngine {
constructor (){
// All of the necessary audio control variables
if(!audioCtx){
window.AudioContext = window.AudioContext || window.webkitAudioContext;
audioCtx = new AudioContext();
}
// Will hold audio data waiting to be played
this.buffer = null;
// This will hold the decoded audio file upon completion
this.decodedFile = new AudioFile();
// Automatically create buffer upon finished decoding?
this.autoCreateBuffer = true;
// Specify this if you want to have a function recieve
// the decoded audio, i.e. completionCallback(decodedFile);
this.completionCallback = null;
}
// This will decode an audio file
fileCallback (event){
console.log("file callback");
this.buffer = null;
var reader = new FileReader();
var file = event.target.files[0];
reader.onload = this.loadCallback.bind(this);
reader.readAsArrayBuffer(file);
}
// Called by fileCallback after file has been loaded
loadCallback (file){
console.log("load callback");
var raw = file.target.result;
audioCtx.decodeAudioData(raw, this.decodeCallback.bind(this));
}
// Called by loadCallback after file has been decoded
decodeCallback (data){
console.log("decode callback");
var audioTemp = new AudioFile();
audioTemp.length = data.length;
audioTemp.duration = data.duration;
audioTemp.sampleRate = data.sampleRate;
audioTemp.channels = data.numberOfChannels;
var arr = [];
for(var i = 0; i < data.numberOfChannels; i++){
arr.push(new Float32Array(data.length));
data.copyFromChannel(arr[i], i);
}
audioTemp.data = arr.slice(0);
this.decodedFile = audioTemp;
if(this.autoCreateBuffer){
var buffer = audioCtx.createBuffer(audioTemp.channels, audioTemp.length, audioTemp.sampleRate);
var samples;
for(var c = 0; c < audioTemp.channels; c++){
samples = buffer.getChannelData(c);
for(var i = 0; i < audioTemp.length; i++){
samples[i] = this.decodedFile.data[c][i];
}
}
this.buffer = buffer;
}
if(this.completionCallback){
this.completionCallback(audioTemp);
}
}
// Play data that is in buffer
play(){
if(this.buffer){
var source = audioCtx.createBufferSource();
var tmp = this.buffer.getChannelData(0);
source.buffer = this.buffer;
source.connect(audioCtx.destination);
source.start(0);
console.log("play");
}
}
}
Okay, I have figured out what seems to be the problem. This code is currently in the testing phase and it seems to have something to do with the console being open on firefox that messes with sound being played. Although, it is not entirely consistent with how it behaves, at least I know the general cause of the problem I was having.
In other words, "audio" elements have no problems whether the console is open or not, but there seems to be undefined behavior when opening/minimizing the console with JavaScript controlled audio. However, it always behaves as expected when the console is closed.

Scheduling Web Audio Api playback, multiple plays issue

I am trying to schedule the beep sound to play 3x one second apart. However, the sound is only playing once. Any thoughts on why this might be? (It's included within a larger javascript funciton that declares context etc. . .)
var beepBuffer;
var loadBeep = function() {
var getSound = new XMLHttpRequest(); // Load the Sound with XMLHttpRequest
getSound.open("GET", "/static/music/chime.wav", true); // Path to Audio File
getSound.responseType = "arraybuffer"; // Read as Binary Data
getSound.onload = function() {
context.decodeAudioData(getSound.response, function(buffer){
beepBuffer = buffer; // Decode the Audio Data and Store it in a Variable
});
}
getSound.send(); // Send the Request and Load the File
}
var playBeep = function() {
for (var j = 0;j<3;j++) {
var beeper = context.createBufferSource(); // Declare a New Sound
beeper.buffer = beepBuffer; // Attatch our Audio Data as it's Buffer
beeper.connect(context.destination); // Link the Sound to the Output
console.log(j);
beeper.start(j); // Play the Sound Immediately
}
};
Close - and the other answer's code will work - but it's not the synchronicity, it's that you're not asking context.current time to the start time. Start() doesn't take an offset from "now" - it takes an absolute time. Add context.currentTime to the start param, and you should be good.
Your code assumes that beeper.start(j) is a synchronous method, i.e. it waits for the sound to complete playing. This is not the case, so your loop is probably playing all 3 instances at nearly the same exact time.
One solution is to delay the playing of each instance, by passing a time parameter to the start() method:
var numSecondsInBeep = 3;
for (var j = 0; j < 3; j++) {
var beeper = context.createBufferSource(); // Declare a New Sound
beeper.buffer = beepBuffer; // Attatch our Audio Data as it's Buffer
beeper.connect(context.destination); // Link the Sound to the Output
console.log(j);
beeper.start(context.currentTime + j * numSecondsInBeep);
}
See here for more info on the play() API.

Categories