I am designing a simple web page to record audio from the systems microphone and upload it to the server using php. However on pressing record button, I am getting the following exception:
app.js:44 Uncaught TypeError: Cannot read property 'getUserMedia' of undefined
at HTMLButtonElement.startRecording
My HTML Code:
<div id="controls">
<button id="recordButton">Record</button>
<button id="pauseButton" disabled>Pause</button>
<button id="stopButton" disabled>Stop</button>
</div>
<div id="formats">Format: start recording to see sample rate</div>
<p><strong>Recordings:</strong></p>
<ol id="recordingsList"></ol>
<!-- inserting these scripts at the end to be able to use all the elements in the DOM -->
<script src="https://cdn.rawgit.com/mattdiamond/Recorderjs/08e7abd9/dist/recorder.js"></script>
<script src="js/app.js"></script>
My JavaScript Code:
//webkitURL is deprecated but nevertheless
URL = window.URL || window.webkitURL;
var gumStream; //stream from getUserMedia()
var rec; //Recorder.js object
var input; //MediaStreamAudioSourceNode we'll be recording
// shim for AudioContext when it's not avb.
var AudioContext = window.AudioContext || window.webkitAudioContext;
var audioContext //audio context to help us record
var recordButton = document.getElementById("recordButton");
var stopButton = document.getElementById("stopButton");
var pauseButton = document.getElementById("pauseButton");
//add events to those 2 buttons
recordButton.addEventListener("click", startRecording);
stopButton.addEventListener("click", stopRecording);
pauseButton.addEventListener("click", pauseRecording);
function startRecording() {
console.log("recordButton clicked");
/*
Simple constraints object, for more advanced audio features see
https://addpipe.com/blog/audio-constraints-getusermedia/
*/
var constraints = { audio: true, video:false }
/*
Disable the record button until we get a success or fail from getUserMedia()
*/
recordButton.disabled = true;
stopButton.disabled = false;
pauseButton.disabled = false
/*
We're using the standard promise based getUserMedia()
https://developer.mozilla.org/en-US/docs/Web/API/MediaDevices/getUserMedia
*/
navigator.mediaDevices.getUserMedia(constraints).then(function(stream) {
console.log("getUserMedia() success, stream created, initializing Recorder.js ...");
/*
create an audio context after getUserMedia is called
sampleRate might change after getUserMedia is called, like it does on macOS when recording through AirPods
the sampleRate defaults to the one set in your OS for your playback device
*/
audioContext = new AudioContext();
//update the format
document.getElementById("formats").innerHTML="Format: 1 channel pcm # "+audioContext.sampleRate/1000+"kHz"
/* assign to gumStream for later use */
gumStream = stream;
/* use the stream */
input = audioContext.createMediaStreamSource(stream);
/*
Create the Recorder object and configure to record mono sound (1 channel)
Recording 2 channels will double the file size
*/
rec = new Recorder(input,{numChannels:1})
//start the recording process
rec.record()
console.log("Recording started");
}).catch(function(err) {
//enable the record button if getUserMedia() fails
recordButton.disabled = false;
stopButton.disabled = true;
pauseButton.disabled = true
});
}
function pauseRecording(){
console.log("pauseButton clicked rec.recording=",rec.recording );
if (rec.recording){
//pause
rec.stop();
pauseButton.innerHTML="Resume";
}else{
//resume
rec.record()
pauseButton.innerHTML="Pause";
}
}
function stopRecording() {
console.log("stopButton clicked");
//disable the stop button, enable the record too allow for new recordings
stopButton.disabled = true;
recordButton.disabled = false;
pauseButton.disabled = true;
//reset button just in case the recording is stopped while paused
pauseButton.innerHTML="Pause";
//tell the recorder to stop the recording
rec.stop();
//stop microphone access
gumStream.getAudioTracks()[0].stop();
//create the wav blob and pass it on to createDownloadLink
rec.exportWAV(createDownloadLink);
}
function createDownloadLink(blob) {
var url = URL.createObjectURL(blob);
var au = document.createElement('audio');
var li = document.createElement('li');
var link = document.createElement('a');
//name of .wav file to use during upload and download (without extendion)
var filename = new Date().toISOString();
//add controls to the <audio> element
au.controls = true;
au.src = url;
//save to disk link
link.href = url;
link.download = filename+".wav"; //download forces the browser to donwload the file using the filename
link.innerHTML = "Save to disk";
//add the new audio element to li
li.appendChild(au);
//add the filename to the li
li.appendChild(document.createTextNode(filename+".wav "))
//add the save to disk link to li
li.appendChild(link);
//upload link
var upload = document.createElement('a');
upload.href="#";
upload.innerHTML = "Upload";
upload.addEventListener("click", function(event){
var xhr=new XMLHttpRequest();
xhr.onload=function(e) {
if(this.readyState === 4) {
console.log("Server returned: ",e.target.responseText);
}
};
var fd=new FormData();
fd.append("audio_data",blob, filename);
xhr.open("POST","upload2.php",true);
xhr.send(fd);
})
li.appendChild(document.createTextNode (" "))//add a space in between
li.appendChild(upload)//add the upload link to li
//add the li element to the ol
recordingsList.appendChild(li);
}
Related
I tried to implement recorder.js. took the code snippets from https://blog.addpipe.com/using-recorder-js-to-capture-wav-audio-in-your-html5-web-site/ .
But after the implementation, whenever I try to record, it gives me a .wav file of 0:00 length. I couldn't find any logical reason behind it. even the console does not showing any error. Did anyone faced this issue before?
my code looks like:
HTML file
<button id="recordButton">Record</button>
<button id="pauseButton" disabled>Pause</button>
<button id="stopButton" disabled>Stop</button>
<h3>Recordings</h3>
<ol id="recordingsList"></ol>
JS file
//webkitURL is deprecated but nevertheless
URL = window.URL || window.webkitURL;
var gumStream;
//stream from getUserMedia()
var rec;
//Recorder.js object
var input;
//MediaStreamAudioSourceNode we'll be recording
// shim for AudioContext when it's not avb.
var AudioContext = window.AudioContext || window.webkitAudioContext;
var audioContext = new AudioContext;
//new audio context to help us record
var recordButton = document.getElementById("recordButton");
var stopButton = document.getElementById("stopButton");
var pauseButton = document.getElementById("pauseButton");
//add events to those 3 buttons
recordButton.addEventListener("click", startRecording);
stopButton.addEventListener("click", stopRecording);
pauseButton.addEventListener("click", pauseRecording);
function startRecording()
{
console.log("recordButton clicked");
/* Simple constraints object, for more advanced audio features see
https://addpipe.com/blog/audio-constraints-getusermedia/ */
var constraints = {
audio: true,
video: false
}
/* Disable the record button until we get a success or fail from getUserMedia() */
recordButton.disabled = true;
stopButton.disabled = false;
pauseButton.disabled = false;
/* We're using the standard promise based getUserMedia()
https://developer.mozilla.org/en-US/docs/Web/API/MediaDevices/getUserMedia */
navigator.mediaDevices.getUserMedia(constraints).then(function (stream) {
console.log("getUserMedia() success, stream created, initializing Recorder.js ...");
/* assign to gumStream for later use */
gumStream = stream;
/* use the stream */
input = audioContext.createMediaStreamSource(stream);
/* Create the Recorder object and configure to record mono sound (1 channel) Recording 2 channels will double the file size */
rec = new Recorder(input, {
numChannels: 1
})
//start the recording process
rec.record();
console.log("Recording started");
}).catch(function (err) {
//enable the record button if getUserMedia() fails
recordButton.disabled = false;
stopButton.disabled = true;
pauseButton.disabled = true;
});
}
function pauseRecording()
{
console.log("pauseButton clicked rec.recording=", rec.recording);
if (rec.recording) {
//pause
rec.stop();
pauseButton.innerHTML = "Resume";
} else {
//resume
rec.record()
pauseButton.innerHTML = "Pause";
}
}
function stopRecording()
{
console.log("stopButton clicked");
//disable the stop button, enable the record too allow for new recordings
stopButton.disabled = true;
recordButton.disabled = false;
pauseButton.disabled = true;
//reset button just in case the recording is stopped while paused
pauseButton.innerHTML = "Pause";
//tell the recorder to stop the recording
rec.stop(); //stop microphone access
gumStream.getAudioTracks()[0].stop();
//create the wav blob and pass it on to createDownloadLink
rec.exportWAV(createDownloadLink);
}
function createDownloadLink(blob)
{
var url = URL.createObjectURL(blob);
var au = document.createElement('audio');
var li = document.createElement('li');
var link = document.createElement('a');
//add controls to the <audio> element
au.controls = true;
au.src = url;
//link the a element to the blob
link.href = url;
link.download = new Date().toISOString() + '.wav';
link.innerHTML = link.download;
//add the new audio and a elements to the li element
li.appendChild(au);
li.appendChild(link);
//add the li element to the ordered list
recordingsList.appendChild(li);
}
I am building an audio recorder and want to manipulate the audio using Python but am recording the audio using JavaScript. Currently I can download the blob file as a .wav using a link in HTML but simply want to transfer the audio to be read in as a .wav file in Python.
I have thought about saving the file as a .wav locally and then reading it in using Flask but would prefer to not do this. Does anyone know how to do one of these methods? Thanks in advance for any help. Below is my code:
app.js:
URL = window.URL || window.webkitURL;
//to stream audio from getUserMedia() from MediaStream Recording API https://developer.mozilla.org/en-US/docs/Web/API/MediaStream_Recording_API
var gumStream;
//this creates mediarecorder object
var rec;
//this is the media stream audio source node to record
var input;
// shim for AudioContext (shim corrects the existing audio context code) for when it is not available
var AudioContext = window.AudioContext || window.webkitAudioContext;
//variable defining audio context
var audioContext
//variables for the record/pause/stop buttons
var recordButton = document.getElementById("recordButton");
var stopButton = document.getElementById("stopButton");
var pauseButton = document.getElementById("pauseButton");
//add events to buttons upon being clicked
recordButton.addEventListener("click", startRecording);
stopButton.addEventListener("click", stopRecording);
pauseButton.addEventListener("click", pauseRecording);
function startRecording() {
console.log("recordButton clicked");
var constraints = { audio: true, video:false }
//disable record button until success/fail is received from getUserMedia()
recordButton.disabled = true;
stopButton.disabled = false;
pauseButton.disabled = false
navigator.mediaDevices.getUserMedia(constraints).then(function(stream) {
//success in obtaining stream, so create audio context as sample rate may change after getUserMedia is called (does this on macOS when using AirPods where sr defaults to one set in OS for playback device)
console.log("getUserMedia() success, stream created, initializing Recorder.js ...");
audioContext = new AudioContext();
//update the sr format
document.getElementById("formats").innerHTML="Format: 1 channel pcm # "+audioContext.sampleRate/1000+"kHz"
gumStream = stream;
input = audioContext.createMediaStreamSource(stream);
//create recorded object and configure to record mono sound (1 channel) / no need to record 2 channels as this will double file size and not improve quality of sound
rec = new Recorder(input,{numChannels:1})
//start the recording process
rec.record()
console.log("Recording started");
}).catch(function(err) {
//enable the record button if getUserMedia() fails
recordButton.disabled = false;
stopButton.disabled = true;
pauseButton.disabled = true
});
}
function pauseRecording(){
console.log("pauseButton clicked rec.recording=",rec.recording );
if (rec.recording){
//pause
rec.stop();
pauseButton.innerHTML="Resume";
}else{
//resume
rec.record()
pauseButton.innerHTML="Pause";
}
}
function stopRecording() {
console.log("stopButton clicked");
//disable the stop button, enable the record to allow for new recordings
stopButton.disabled = true;
recordButton.disabled = false;
pauseButton.disabled = true;
//reset pause button in case the recording is stopped while paused
pauseButton.innerHTML="Pause";
//tell the recorder to stop the recording
rec.stop();
//stop microphone access
gumStream.getAudioTracks()[0].stop();
//create the wav blob and pass it on to createDownloadLink
rec.exportWAV(createDownloadLink)*emphasized text*
}
function createDownloadLink(blob) {
var url = URL.createObjectURL(blob);
var au = document.createElement('audio');
var li = document.createElement('li');
var link = document.createElement('a');
//name of .wav file to use during upload and download (without extendion)
var filename = new Date().toISOString();
//add controls to the <audio> element
au.controls = true;
au.src = url;
//save to disk link
link.href = url;
link.download = filename+".wav"; //download forces the browser to donwload the file using the filename
link.innerHTML = "Save to disk";
//add the new audio element to li
li.appendChild(au);
//add the filename to the li
li.appendChild(document.createTextNode(filename+".wav "))
//add the save to disk link to li
li.appendChild(link);
//add the li element to the ol
recordingsList.appendChild(li);
}
main.py:
from flask import request
from flask import render_template
import os
import mysql.connector
app = Flask(__name__)
#app.route("/", methods=['POST', 'GET'])
def index():
if request.method == "POST":
print(type(request.get_data("audio_data")))
if os.path.isfile('./file.wav'):
print("./file.wav exists")
return render_template('index.html', request="POST")
else:
return render_template("index.html")
if __name__ == "__main__":
app.run()```
I am using recordJs library for recording voice of client and send it to the server. In firefox and other browsers, it works well without any error. when i try to run it in chrome, it starts recording the voice, but when it calls stopRecording function, it faces following error:
Uncaught TypeError: Cannot read property 'stop' of undefined
at stopRecording (توانایی-پرسش-سن-از-افراد:1209)
at startTimer (توانایی-پرسش-سن-از-افراد:1364)
Here is my JS codes:
<script type="text/javascript">
'use strict';
//webkitURL is deprecated but nevertheless
URL = window.URL || window.webkitURL;
let gumStream; //stream from getUserMedia()
let rec; //Recorder.js object
let input; //MediaStreamAudioSourceNode we'll be recording
// shim for AudioContext when it's not avb.
let AudioContext = window.AudioContext || window.webkitAudioContext;
let audioContext //audio context to help us record
function startRecording() {
console.log("recordButton clicked");
/*
Simple constraints object, for more advanced audio features see
https://addpipe.com/blog/audio-constraints-getusermedia/
*/
var constraints = { audio: true, video:false }
/*
Disable the record button until we get a success or fail from getUserMedia()
*/
/*
We're using the standard promise based getUserMedia()
https://developer.mozilla.org/en-US/docs/Web/API/MediaDevices/getUserMedia
*/
navigator.mediaDevices.getUserMedia(constraints).then(function(stream) {
console.log("getUserMedia() success, stream created, initializing Recorder.js ...");
/*
create an audio context after getUserMedia is called
sampleRate might change after getUserMedia is called, like it does on macOS when recording through AirPods
the sampleRate defaults to the one set in your OS for your playback device
*/
audioContext = new AudioContext();
//update the format
// document.getElementById("formats").innerHTML="Format: 1 channel pcm # "+audioContext.sampleRate/1000+"kHz"
/* assign to gumStream for later use */
gumStream = stream;
/* use the stream */
input = audioContext.createMediaStreamSource(stream);
/*
Create the Recorder object and configure to record mono sound (1 channel)
Recording 2 channels will double the file size
*/
rec = new Recorder(input,{numChannels:1});
//start the recording process
rec.record();
console.log("Recording started");
}).catch(function(err) {
});
}
function pauseRecording(){
console.log("pauseButton clicked rec.recording=",rec.recording );
if (rec.recording){
//pause
rec.stop();
}else{
rec.record();
}
}
function stopRecording() {
//tell the recorder to stop the recording
rec.stop();
//stop microphone access
gumStream.getAudioTracks()[0].stop();
//create the wav blob and pass it on to createDownloadLink
rec.exportWAV(setUserVoice);
}
function setUserVoice(blob)
{
let formData = new FormData
formData.append('userVoice', blob)
$.ajax({
type: 'POST',
headers: {'X-CSRF-TOKEN': $('meta[name="csrf-token"]').attr('content')},
url: '{{ route('user.mockTest.participation.saveUserVoice') }}',
data: formData,
processData: false,
contentType: false,
success: function (data) {
if (data['result'] == 'success')
{
$('#recordUserVoice').prop('disabled', true);
}
else
{
Swal.fire(
'{{__('Error')}}',
'{{__('An error occurred')}}',
'error'
);
}
},
error: function (err) {
console.log(err);
}
});
}
function createDownloadLink(blob) {
var url = URL.createObjectURL(blob);
var au = document.createElement('audio');
var li = document.createElement('li');
var link = document.createElement('a');
//name of .wav file to use during upload and download (without extendion)
var filename = new Date().toISOString();
//add controls to the <audio> element
au.controls = true;
au.src = url;
//save to disk link
link.href = url;
link.download = filename+".wav"; //download forces the browser to donwload the file using the filename
link.innerHTML = "Save to disk";
//add the new audio element to li
li.appendChild(au);
//add the filename to the li
li.appendChild(document.createTextNode(filename+".wav "))
//add the save to disk link to li
li.appendChild(link);
//upload link
var upload = document.createElement('a');
upload.href="#";
upload.innerHTML = "Upload";
upload.addEventListener("click", function(event){
var xhr=new XMLHttpRequest();
xhr.onload=function(e) {
if(this.readyState === 4) {
console.log("Server returned: ",e.target.responseText);
}
};
var fd=new FormData();
fd.append("audio_data",blob, filename);
xhr.open("POST","upload.php",true);
xhr.send(fd);
})
li.appendChild(document.createTextNode (" "))//add a space in between
li.appendChild(upload)//add the upload link to li
//add the li element to the ol
recordingsList.appendChild(li);
}
document.getElementById('timer').innerHTML =
'00' + ":" + '00';
function startRecord()
{
startRecording();
startTimer();
}
function startTimer() {
$('#recordTextHolder').addClass('d-none');
$('#timer').removeClass('d-none');
var presentTime = document.getElementById('timer').innerHTML;
var timeArray = presentTime.split(/[:]+/);
var m = timeArray[0];
console.log(timeArray[1])
var s = checkSecond((parseInt(timeArray[1]) + 1));
if(parseInt(s) == 5)
{
m = '0'+(parseInt(m)+1)
s = '00'
}
if(m == 2 && s == 1){
stopRecording()
shake()
return
}
document.getElementById('timer').innerHTML =
m + ":" + s;
console.log(m)
setTimeout(startTimer, 1000);
}
function checkSecond(sec) {
if (sec < 10 && sec >= 0) {sec = "0" + sec}; // add zero in front of numbers <10
if (sec < 0) {sec = "59"};
return sec;
</script>
I would be grateful, if someone guide me to handle this problem.
It looks like you are setting the value of rec here:
rec = new Recorder(input,{numChannels:1});
And the error message is presumably from here:
function stopRecording() {
//tell the recorder to stop the recording
rec.stop();
Can you try adding a console.log?
function stopRecording() {
console.log("rec:", rec)
rec.stop();
Report back what rec contains at that time.
Thanks for reporting that it says "undefined".
Now ask yourself: how can 'rec' be undefined at that time?
I assume that your console is showing the "recordButton clicked"?
And the "getUserMedia() success..."?
How about the "Recording started" message?
I suggest getting rid of the following block:
.catch(function(err) {
});
What that block does is silently "swallow" any error messages that you would otherwise see. In general, don't put in empty catch blocks unless you genuinely do not want to know about errors occurring there.
i tried to built my website with audio player inside. But i have problem when i combined song that load from database with play button. I want to make my play button change when it clicked and load song from database. I have codes like these :
HTML and PHP
<div id="playbtn">
<button id="play_btn" onclick="playPause()"></button>
<?php
$song= "SELECT mp3Lagu FROM folksong WHERE songtitle = 'Apuse'";
$result = mysql_query($song);
while ($row = mysql_fetch_array($result)) {
echo'
<audio id="listenlagu">
<source src="data:audio/mp3;base64,'.base64_encode( $row['mp3Lagu'] ).'">
</audio>';
}
?></div>
I used javascript to change the display button like these :
JAVASCRIPT
<script>
var audio, playbtn;
function initAudioPlayer(){
audio = new Audio();
//audio = document.getElementById('listenlagu');
//audio.src = "audio/Apuse.mp3";
audio.src = document.getElementById('listenlagu');
audio.load();
audio.loop = true;
audio.play();
// Set object references
playbtn = document.getElementById("play_btn");
// Add Event Handling
playbtn.addEventListener("click",playPause);
// Functions
function playPause(){
if(audio.paused){
audio.play();
playbtn.style.background = "url(images/pause70.png) no-repeat";
} else {
audio.pause();
playbtn.style.background = "url(images/play70.png) no-repeat";
}
}
}
window.addEventListener("load", initAudioPlayer);
</script>
but it's not working when i combined with javascript -_____-"
Anyone know where the problems are ?
Can you help me to fix these ?
You should update the following (full code example below):
assign the src attribute to audio.src
move your playPause function outside of the initAudioPlayer function
make your playPause function accept 2 parameters 1 - "audio" (the new Audio object) and 2 - playbtn (the button element), and have it return a function so that it can be used as a callback in the addEventListenter method
Here is a working plunkr: https://plnkr.co/edit/bticzS?p=preview
initAudioPlayer();
function initAudioPlayer(){
var audio = new Audio();
var aContainer = document.getElementById('listenlagu');
// assign the audio src
audio.src = aContainer.querySelectorAll('source')[0].getAttribute('src');
audio.load();
audio.loop = true;
audio.play();
// Set object references
var playbtn = document.getElementById("play_btn");
// Add Event Handling
playbtn.addEventListener("click", playPause(audio, playbtn));
}
// Functions
function playPause(audio, playbtn){
return function () {
if(audio.paused){
audio.play();
playbtn.style.background = "url(images/pause70.png) no-repeat";
} else {
audio.pause();
playbtn.style.background = "url(images/play70.png) no-repeat";
}
}
}
I'm trying to use the HTML5 Web Audio API to create an equalizer type graphic, but for some reason the data is never brought into the MediaElementSource.
$('.table').on('click', 'tr', function() {
if ($(this) != $('.table tr:first-child')) {
var src = $(this).children().first().attr('data-src');
var audio = new Audio();
audio.src = src;
audio.controls = true;
$('.file-playlist').append(audio);
console.log(audio);
audio.load();
audio.play();
context = new webkitAudioContext();
console.log(context);
analyser = context.createAnalyser();
console.log(analyser);
source = context.createMediaElementSource(audio);
console.log(source);
source.connect(analyser);
console.log(source);
analyser.connect(context.destination);
console.log(analyser);
rafCallback();
}
});
In the function above I have created an audio element and used that as the source for the context's MediaElementSource however there is some issue that I cannot find because in the console, the activeSourceCount attribute of the AudioContext is always 0, which means that it never received the audio element that I gave it as a parameter.
EDIT:
I modified my code according to what idbehold said; however, now I have 2 errors, an InvalidStateError: DOM Exception 11 on the source = context.createMediaElementSource(audio); line, and a TypeError: Cannot read property 'frequencyBinCount' of undefined at the line var freqByteData = new Uint8Array(analyser.frequencyBinCount); Additionally, the MediaElementSource still has 0 activeSourceCounts.
$(document).ready(function() {
var context = new webkitAudioContext();
console.log(context);
var audio;
var source;
$('.table').on('click', 'tr', function() {
if ($(this) != $('.table tr:first-child')) {
var src = $(this).children().first().attr('data-src');
if (audio) {
audio.remove();
audio = new Audio();
audio.src = src;
audio.controls = true;
$('.file-playlist').append(audio);
console.log(audio);
audio.addEventListener("canplay", function(e) {
analyser = context.createAnalyser();
console.log(analyser);
source.disconnect();
source = context.createMediaElementSource(audio);
console.log(source);
source.connect(analyser);
console.log(source);
analyser.connect(context.destination);
console.log(analyser);
audio.load();
audio.play();
}, false);
}
else {
audio = new Audio();
audio.src = src;
audio.controls = true;
$('.file-playlist').append(audio);
console.log(audio);
audio.addEventListener("canplay", function(e) {
analyser = (analyser || context.createAnalyser());
console.log(analyser);
source = context.createMediaElementSource(audio);
console.log(source);
source.connect(analyser);
console.log(source);
analyser.connect(context.destination);
console.log(analyser);
audio.load();
audio.play();
}, false);
}
}
rafCallback();
});
});
EDIT 2:
in my rafCallback() function, I noticed that the data from the Uint8Array was never being processed, so I added the getByteFrequencyData(analyser.frequencyBinCount); which fixed everything.
You can only create a single AudioContext per window and you should be waiting until the audio's canplay event fires before setting up your MediaElementSource. You should also be disconnecting the MediaElementSource when you're finished using it.
Here's an example that I used to answer a similar question: http://jsbin.com/acolet/1/