The following javascript code records a sound and generates blob with audio every 0,5 second.
After recording has stopped the program plays 1-st blob - data[0].
I need the audio player to fire event after data[0] has played, and event handler will deliver the next portion to the audio player - data[1] (далее - data[2], data[3] etc.).
How can I modify the code and which objects should I use to do this ?
I know that I could pass all data[] array to the audio player, but I need a mechanism allowing the audio player to request next portions using events.
navigator.mediaDevices.getUserMedia({audio:true})
.then(function onSuccess(stream) {
const recorder = new MediaRecorder(stream);
const data = [];
recorder.ondataavailable = (e) => {
data.push(e.data);
};
recorder.start(500); // willfire 'dataavailable ' event every 0,5 second
recorder.onstop = (e) => {
const audio = document.createElement('audio');
audio.src = window.URL.createObjectURL(new Blob( data[0] ));
}
setTimeout(() => {
rec.stop();
}, 5000);
})
.catch(function onError(error) {
console.log(error.message);
});
I guess that's what your looking for ?
navigator.mediaDevices
.getUserMedia({ audio: true })
.then(function onSuccess(stream) {
// create the audio stream
const audio = document.createElement('audio');
audio.srcObject = stream; // Pass the audio stream
audio.controls = true;
audio.play();
document.body.appendChild(audio);
const recorder = new MediaRecorder(stream);
const data = [];
// Set event listener
// ondataavailable will fire when you request stop(), requestData() or after all timeSlice you give to the start function.
recorder.ondataavailable = e => data.push(e.data);
// Start recording
// Will generate blob every 500ms
recorder.start(500);
})
.catch(function onError(error) {
console.log(error.message);
});
You had some mistakes to correct :
When recorder call start event wich timeslice parameters, that will not fire the ondataavailable event. You need to stop the recorder to fire the event and create the blob.
You make a mistake on the recorder name's variable and the time on the settimeout function.
You recreate a audio player all times the recorder stop and never append it on the DOM.
Related
I am using MediaStream Recording API to record audio in the browser, like this (courtesy https://github.com/bryanjenningz/record-audio):
const recordAudio = () =>
new Promise(async resolve => {
// This wants to be secure. It will throw unless served from https:// or localhost.
const stream = await navigator.mediaDevices.getUserMedia({ audio: true });
const mediaRecorder = new MediaRecorder(stream);
let audioChunks = [];
mediaRecorder.addEventListener('dataavailable', event => {
audioChunks.push(event.data);
console.log("Got audioChunk!!", event.data.size, event.data.type);
// mediaRecorder.requestData()
});
const start = () => {
audioChunks = [];
mediaRecorder.start(1000); // milliseconds per recorded chunk
};
const stop = () =>
new Promise(resolve => {
mediaRecorder.addEventListener('stop', () => {
const audioBlob = new Blob(audioChunks, { type: 'audio/mpeg' });
const audioUrl = URL.createObjectURL(audioBlob);
const audio = new Audio(audioUrl);
const play = () => audio.play();
resolve({ audioChunks, audioBlob, audioUrl, play });
});
mediaRecorder.stop();
});
resolve({ start, stop });
});
I would like to modify this code to start streaming to nodejs while it's still recording. I understand the header won't be complete until it finished the recording. I can either account for that on nodejs, or perhaps I can live with invalid headers, because I'll be feeding this into ffmpeg on nodejs anyway. How do I do this?
The trick is when you start your recorder, start it like this mediaRecorder.start(timeSlice), where timeSlice is the number of milliseconds the browser waits before emitting a dataavailable event with a blob of data.
Then, in your event handler for dataavailable you call the server:
mediaRecorder.addEventListener('dataavailable', event => {
myHTTPLibrary.post(event.data);
});
That's the general solution. It's not possible to insert an example here, because a code sandbox can't ask you to use your webcam, but I've created one here. It simply sends your data to Request Bin, where you can watch the data stream in.
There are some other things you'll need to think about if you want to stitch the video or audio back together. The blog post touches on that.
I'm posting this because I couldn't find an answer that works.
So what I'm trying to do is to let the visitor to the site play a sound by clicking on an element and let them record this. (Please refer to the code below)
const audio1 = new Audio('https://sun.sw3web.com/wp-content/uploads/sw3/assets/audio/beep1.mp3');
const audio2 = new Audio('https://sun.sw3web.com/wp-content/uploads/sw3/assets/audio/beep2.mp3');
const audio3 = new Audio('https://sun.sw3web.com/wp-content/uploads/sw3/assets/audio/beep3.mp3');
const audio4 = new Audio('https://sun.sw3web.com/wp-content/uploads/sw3/assets/audio/beep4.mp3');
$('#btn1').click( () => audio1.play() );
$('#btn2').click( () => audio2.play() );
$('#btn3').click( () => audio3.play() );
$('#btn4').click( () => audio4.play() );
So I need a way to record the audio that originating from the browser to the speakers.
I cannot use record the audio from microphone since one might use a handsfree and thus there won't be any sound to record.
The problem i'm facing is how to get a stream object that I can attach to a MediaRecorder object.
I tried using MediaRecorder class but couldn't find a way to allocated a Stream from the standard audio output device. This is my code so far. It only records the input from the microphone.
//Play module
jQuery(document).ready(function($){
const audio1 = new Audio('https://sun.sw3web.com/wp-content/uploads/sw3/assets/audio/beep1.mp3');
const audio2 = new Audio('https://sun.sw3web.com/wp-content/uploads/sw3/assets/audio/beep2.mp3');
const audio3 = new Audio('https://sun.sw3web.com/wp-content/uploads/sw3/assets/audio/beep3.mp3');
const audio4 = new Audio('https://sun.sw3web.com/wp-content/uploads/sw3/assets/audio/beep4.mp3');
$('#btn1').click( () => audio1.play() );
$('#btn2').click( () => audio2.play() );
$('#btn3').click( () => audio3.play() );
$('#btn4').click( () => audio4.play() );
//recording module
//-Steps
//-- 1) Start recording the audio
//-- 2) While recording, store the audio data chunks
//-- 3) Stop recording the audio
//-- 4) Convert the audio data chunks to a single audio data blob
//-- 5) Create a URL for that single audio data blob
//-- 6) Play the audio
//API to handle audio recording
var audioRecorder = {
/** Stores the recorded audio as Blob objects of audio data as the recording continues*/
audioBlobs: [], /*of type Blob[]*/
/** Stores the reference of the MediaRecorder instance that handles the MediaStream when recording starts*/
mediaRecorder: null, /*of type MediaRecorder*/
/** Stores the reference to the stream currently capturing the audio*/
streamBeingCaptured: null, /*of type MediaStream*/
/** Start recording the audio
* #returns {Promise} - returns a promise that resolves if audio recording successfully started
*/
start: function () {
//Feature Detection
if (!(navigator.mediaDevices && navigator.mediaDevices.getUserMedia)) {
//Feature is not supported in browser
//return a custom error
return Promise.reject(new Error('mediaDevices API or getUserMedia method is not supported in this browser.'));
}
else {
//Feature is supported in browser
//create an audio stream
return navigator.mediaDevices.getUserMedia({ audio: true }/*of type MediaStreamConstraints*/)
//returns a promise that resolves to the audio stream
.then(stream /*of type MediaStream*/ => {
//save the reference of the stream to be able to stop it when necessary
audioRecorder.streamBeingCaptured = stream;
//create a media recorder instance by passing that stream into the MediaRecorder constructor
audioRecorder.mediaRecorder = new MediaRecorder(stream); /*the MediaRecorder interface of the MediaStream Recording
API provides functionality to easily record media*/
//clear previously saved audio Blobs, if any
audioRecorder.audioBlobs = [];
//add a dataavailable event listener in order to store the audio data Blobs when recording
audioRecorder.mediaRecorder.addEventListener("dataavailable", event => {
//store audio Blob object
audioRecorder.audioBlobs.push(event.data);
});
//start the recording by calling the start method on the media recorder
audioRecorder.mediaRecorder.start();
});
}
},
/** Stop the started audio recording
* #returns {Promise} - returns a promise that resolves to the audio as a blob file
*/
stop: function () {
//return a promise that would return the blob or URL of the recording
return new Promise(resolve => {
//save audio type to pass to set the Blob type
let mimeType = 'audio/webm';
if(MediaRecorder.isTypeSupported(mimeType)) {
//listen to the stop event in order to create & return a single Blob object
audioRecorder.mediaRecorder.addEventListener("stop", () => {
//create a single blob object, as we might have gathered a few Blob objects that needs to be joined as one
let audioBlob = new Blob(audioRecorder.audioBlobs, { type: mimeType });
//resolve promise with the single audio blob representing the recorded audio
resolve(audioBlob);
});
//stop the recording feature
audioRecorder.mediaRecorder.stop();
//stop all the tracks on the active stream in order to stop the stream
audioRecorder.stopStream();
//reset API properties for next recording
audioRecorder.resetRecordingProperties();
}
else {
//mime type is not supported
this.cancel();
Promise.reject(new Error('Mimetype '+mimeType+' is not supported in this browser'));
}
});
},
/** Stop all the tracks on the active stream in order to stop the stream and remove
* the red flashing dot showing in the tab
*/
stopStream: function() {
//stopping the capturing request by stopping all the tracks on the active stream
audioRecorder.streamBeingCaptured.getTracks() //get all tracks from the stream
.forEach(track /*of type MediaStreamTrack*/ => track.stop()); //stop each one
},
/** Reset all the recording properties including the media recorder and stream being captured*/
resetRecordingProperties: function () {
audioRecorder.mediaRecorder = null;
audioRecorder.streamBeingCaptured = null;
/*No need to remove event listeners attached to mediaRecorder as
If a DOM element which is removed is reference-free (no references pointing to it), the element itself is picked
up by the garbage collector as well as any event handlers/listeners associated with it.
getEventListeners(audioRecorder.mediaRecorder) will return an empty array of events.*/
},
/** Cancel audio recording*/
cancel: function () {
//stop the recording feature
audioRecorder.mediaRecorder.stop();
//stop all the tracks on the active stream in order to stop the stream
audioRecorder.stopStream();
//reset API properties for next recording
audioRecorder.resetRecordingProperties();
}
}// End of AudioRecorder object
/*----------------------------*/
/* Controller code */
/*----------------------------*/
/** Starts the audio recording*/
function startAudioRecording() {
//start recording using the audio recording API
audioRecorder.start()
.then(() => { //on success
console.log("Recording Audio...") ;
})
.catch(error => { //on error
//No Browser Support Error
if (error.message.includes("mediaDevices API or getUserMedia method is not supported in this browser.")) {
console.log("To record audio, use browsers like Chrome and Firefox.");
}
//Error handling structure
switch (error.name) {
case 'AbortError': //error from navigator.mediaDevices.getUserMedia
console.log("An AbortError has occured.");
break;
case 'NotAllowedError': //error from navigator.mediaDevices.getUserMedia
console.log("A NotAllowedError has occured. User might have denied permission.");
break;
case 'NotFoundError': //error from navigator.mediaDevices.getUserMedia
console.log("A NotFoundError has occured.");
break;
case 'NotReadableError': //error from navigator.mediaDevices.getUserMedia
console.log("A NotReadableError has occured.");
break;
case 'SecurityError': //error from navigator.mediaDevices.getUserMedia or from the MediaRecorder.start
console.log("A SecurityError has occured.");
break;
case 'TypeError': //error from navigator.mediaDevices.getUserMedia
console.log("A TypeError has occured.");
break;
case 'InvalidStateError': //error from the MediaRecorder.start
console.log("An InvalidStateError has occured.");
break;
case 'UnknownError': //error from the MediaRecorder.start
console.log("An UnknownError has occured.");
break;
default:
console.log("AudioRecording: An error occured " + error);
}
});
}//end of start recording handler
function stopAudioRecording() {
//stop the recording using the audio recording API
console.log("Stopping Audio Recording...")
audioRecorder.stop()
.then(audioAsblob => { //stopping makes promise resolves to the blob file of the recorded audio
console.log("stopped with audio Blob:"+ audioAsblob);
let audioUrl = URL.createObjectURL(audioAsblob); // A DOMString containing an object URL that can be used to reference the contents of the specified source object.
let audio = new Audio(audioUrl);
audio.play() // returns a Promise which is resolved when playback has been successfully started.
.then( () => {console.log('Audio is playing ')})
.catch(error => {console.log('Playback error '+error.name)});
})
.catch(error => {
//Error handling structure
switch (error.name) {
case 'InvalidStateError': //error from the MediaRecorder.stop
console.log("stopAudioRecording : An InvalidStateError has occured.");
break;
default:
console.log("stopAudioRecording : An error occured " + error);
}
});
}// end of the stop recording function
/** Cancel the currently started audio recording */
function cancelAudioRecording() {
console.log("Canceling audio...");
//cancel the recording using the audio recording API
audioRecorder.cancel();
//Do something after audio recording is cancelled
}
/*----------------------------*/
/* View code */
/*----------------------------*/
$('#btnr').on('click',function(){
startAudioRecording();
});
$('#btns').on('click',function(){
stopAudioRecording();
});
$('#btnc').on('click',function(){
cancelAudioRecording();
});
//end of onload event handler
});
I am trying to build an Internet Radio platform and I have battled a lot with the problem that is mentioned on the title.
To explain myself further, what I am trying to achieve is, 1) while recording input from the broadcaster's microphone, to mix it with audio from music playback and 2) at the same time be able to lower or raise the volume of the music playback (also realtime through the UI) so that the broadcaster's voice can blend with the music.
This is to imitate a usual radio broadcaster's behavior where music volume lowers when the person wants to speak and raises back again when he finishes talking! The 2nd feature definitely comes after the 1st but I guess mentioning it helps explain both.
To conclude, I have already managed to write code that receives and reproduces microphone input (though it doesn't work perfectly!). At this point I need to know if there is code or libraries that can help me do exactly what I am trying to do. All this is done in hope I won't need to use IceCast etc.
Below is my code for getting microphone input:
// getting microphone input and sending it to our server
var recordedChunks = [];
var mediaRecorder = null;
let slice = 100; // how frequently we capture sound
const slices = 20; // 20 * => after 2 sec
let sendfreq = slice * slices; // how frequently we send it
/* get microphone button handle */
var microphoneButton = document.getElementById('console-toggle-microphone');
microphoneButton.setAttribute('on', 'no');
/* initialise mic streaming capability */
navigator.mediaDevices.getUserMedia({ audio: true, video: false }).then(stream => {
_stream = stream;
})
.catch(function(err) {
show_error('Error: Microphone access has been denied probably!', err);
});
function toggle_mic() {
if (microphoneButton.getAttribute('on') == 'yes')
{
clearInterval();
microphoneButton.setAttribute('on', 'no');
microphoneButton.innerHTML = 'start mic';
}
else if (microphoneButton.getAttribute('on') == 'no')
{
microphoneButton.setAttribute('on', 'yes');
microphoneButton.innerHTML = 'stop mic';
function record_and_send() {
const recorder = new MediaRecorder(_stream);
const chunks = [];
recorder.ondataavailable = e => chunks.push(e.data);
recorder.onstop = e => socket.emit('console-mic-chunks', chunks);
setTimeout(()=> recorder.stop(), sendfreq); // we'll have a 5s media file
recorder.start();
}
// generate a new file every 5s
setInterval(record_and_send, sendfreq);
}
}
Thanks alot!
In case when your audio track from the microphone doesn't need to be synchronized with audio playback (as for me I do not see any reason for this), then you can just play two separate audio instances and change the volume of the one underway (audio playback in your case).
Shortly speaking, you don't have to mix audio tracks and do complex stuff to solve this task.
Draft example:
<input type="range" id="myRange" value="20" oninput="changeVol(this.value)" onchange="changeVol(this.value)">
// Audio playback
const audioPlayback = new Audio();
const audioPlaybackSrc = document.createElement("source");
audioPlaybackSrc.type = "audio/mpeg";
audioPlaybackSrc.src = "path/to/audio.mp3";
audioPlayback.appendChild(audioPlaybackSrc);
audioPlayback.play();
// Change volume for audio playback on the fly
function changeVol(newVolumeValue) {
audioPlayback.volume = newVolumeValue;
}
// Dealing with the microphone
navigator.mediaDevices.getUserMedia({
audio: true
})
.then(stream => {
// Start recording the audio
const mediaRecorder = new MediaRecorder(stream);
mediaRecorder.start();
// While recording, store the audio data chunks
const audioChunks = [];
mediaRecorder.addEventListener("dataavailable", event => {
audioChunks.push(event.data);
});
// Play the audio after stop
mediaRecorder.addEventListener("stop", () => {
const audioBlob = new Blob(audioChunks);
const audioUrl = URL.createObjectURL(audioBlob);
const audio = new Audio(audioUrl);
audio.play();
});
// Stop recording the audio
setTimeout(() => {
mediaRecorder.stop();
}, 3000);
});
Play multiple audio files simultaneously
Change audio volume with JS
How to record and play audio in JavaScript
I’ve created a minimal WebRTC test site that is able to request the user’s webcam/audio stream, to record it, and to playback the recording after it has been stopped.
Demo: https://output.jsbin.com/tabosipefo/
Edit1: https://jsbin.com/tabosipefo/edit?html,console,output
Since this happens all within one Promise navigator.mediaDevices.getUserMedia(), I was wondering, if it is actually possible to detect and on-going stream and to (a) record it, and (b) to stop and save it.
1 WebRTC does not work in jsbin when in edit view for some reason...
If you use no framework and want to use vanilla JS, your best step is to tack the stream object to the global window.
Preview stream
const showWebcamStream = () => {
navigator.mediaDevices
.getUserMedia({ audio: true, video: true })
.then(stream => {
window.localStream = stream; // ⭠ tack it to the window object
// grab the <video> object
const video = document.querySelector("#video-preview");
video.srcObject = stream;
// Display stream
video.onloadedmetadata = () => video.play();
})
.catch(err => console.log(err.name, err.message));
};
Now the video will be displayed within the video element (id: #videp-preview).
Stop Stream(s)
const hideWebcamStream = () => localStream.getTracks().forEach(track => track.stop());
You should put the mediaRecorder in the window object in order to stop it later.
Record Stream
const startWebcamRecorder = () => {
// check if localStream is in window and if it is active
if ("localStream" in window && localStream.active) {
// save the mediaRecorder also to Window in order independently stop it
window.mediaRecorder = new MediaRecorder(localStream);
window.dataChunks = [];
mediaRecorder.start();
console.log(mediaRecorder.state);
mediaRecorder.ondataavailable = e => dataChunks.push(e.data);
}
};
Stop Recording and Preview the recording
You need another video element to playback your recording #video-playback
const stopWebcamRecorder = () => {
if ("mediaRecorder" in window && mediaRecorder.state === "recording") {
mediaRecorder.stop();
console.log(mediaRecorder.state);
mediaRecorder.onstop = () => {
let blob = new Blob(dataChunks, { type: "video/mp4;" });
dataChunks = [];
let videoURL = window.URL.createObjectURL(blob);
const videoPlayback = document.getElementById("video-playback");
videoPlayback.src = videoURL;
};
}
};
I just recorded a piece of audio and I want to play it with pure Javascript code.
So this is my code:
navigator.getUserMedia({audio: true},function(stream){
var recorder = new MediaRecorder(stream);
recorder.start(1000);
recorder.ondataavailable = function(e){
console.log(e.data);
// var buffer = new Blob([e.data],{type: "video/webm"});
};
});
What do I have to do in ondataavailable so that I can play the audio chunks stored in memory without and audio or video tag in HTML?
I don't really see why you don't want an audio or video element, but anyway, the first steps are the same.
The MediaRecorder.ondataavailable event will fire at regular intervals, and will contain a data property containing a chunk of the recorded media.
You need to store these chunks, in order to be able to merge them in a single Blob at the end of the recording.
To merge them, you would simply call new Blob(chunks_array), where chunks_array is an Array containing all the chunk Blobs you got from dataavailable.data.
Once you've got this final Blob, you can use it as a normal media, e.g, either play it in a MediaElement thanks to the URL.createObjectURL method, or convert it to an ArrayBuffer and then decode it through the WebAudio API or whatever other ways you'd like.
navigator.mediaDevices.getUserMedia({audio: true})
.then(recordStream)
.catch(console.error);
function recordStream(stream){
const chunks = []; // an Array to store all our chunks
const rec = new MediaRecorder(stream);
rec.ondataavailable = e => chunks.push(e.data);
rec.onstop = e => {
stream.getTracks().forEach(s => s.stop());
finalize(chunks);
};
rec.start();
setTimeout(()=>rec.stop(), 5000); // stop the recorder in 5s
}
function finalize(chunks){
const blob = new Blob(chunks);
playMedia(blob);
}
function playMedia(blob){
const ctx = new AudioContext();
const fileReader = new FileReader();
fileReader.onload = e => ctx.decodeAudioData(fileReader.result)
.then(buf => {
btn.onclick = e => {
const source = ctx.createBufferSource();
source.buffer = buf;
source.connect(ctx.destination);
source.start(0);
};
btn.disabled = false;
});
fileReader.readAsArrayBuffer(blob);
}
<button id="btn" disabled>play</button>
And as a plnkr for chrome and its heavy iframes restrictions.