I am using the MediaStreamRecorder.js library for audio capture with Javascript. Almost everything is ok. The only problem I am finding is that when I click on Stop to stop recording the red recording icon is still up there, on the tab.
Anyone know how I remove this icon when you click Stop?
Example in jsFiddle:https://jsfiddle.net/davidsadan/wazb1jks
Here is the print of how it is when I click Stop:
ps: It only works with https, Sorry my english, im brazilian...
var mediaConstraints = {
audio: true
};
var mediaRecorder;
var blobURL;
function onMediaSuccess(stream) {
$(function(){
mediaRecorder = new MediaStreamRecorder(stream);
mediaRecorder.mimeType = 'audio/wav';
mediaRecorder.audioChannels = 1;
mediaRecorder.ondataavailable = function (blob) {
// POST/PUT "Blob" using FormData/XHR2
blobURL = URL.createObjectURL(blob);
$("#result").append('<audio controls src="' + blobURL + '"></audio><br>' + blobURL + '');
};
mediaRecorder.start(5000*5000);
setTimeout(function(){
mediaRecorder.stop();
}, 120 * 1000);//Se não clicar em parar, a gravação para automaticamente em 2 minutos.
});
}
function onMediaError(e) {
console.error('media error', e);
}
function onStop(){
mediaRecorder.stop();
mediaRecorder.stream.stop();
}
var interval;
function contadorIncremento(){
var count = 1;
interval = setInterval(function() {
if(count > 1)
$('.contador').html("setInterval: Ja passou "+ count++ +" segundos!");
else
$('.contador').html("setInterval: Ja passou "+ count++ +" segundo!");
}, 1000);
}
function stopContadorIncremento(){
clearTimeout(interval);
$('.contador').html("");
}
$(function(){
$(".play").on("click", function(){
navigator.getUserMedia(mediaConstraints, onMediaSuccess, onMediaError);
contadorIncremento();
});
$(".stop").on("click", function(){
mediaRecorder.stop();
stopContadorIncremento();
});
$(".resume").on("click", function(){
mediaRecorder.resume();
});
$(".salvar").on("click", function(){
mediaRecorder.save();
});
});
<script src="https://webrtcexperiment-webrtc.netdna-ssl.com/MediaStreamRecorder.js"></script>
<script src="https://ajax.googleapis.com/ajax/libs/jquery/2.1.1/jquery.min.js"></script>
<div id="result"></div>
<button class="play">Start</button>
<button class="stop">Stop</button>
<button class="resume">Resume</button>
<button class="salvar">Salvar</button>
<div class="contador"></div>
You must aware that the red recording point is launched by the getUserMedia method. That method starts capturing the webcam or microphone as in your case (audio-only).
What happens with your code, is that you stops the recording feature, mediaRecorder.stop() but you are not stopping the capturing request.
So keep the reference to the track that you want to stop and stop it on demand.
var localStream;
// ...keep track onSuccess
function onMediaSuccess(stream) {
localStream = stream;
// ...
}
// ...close it when requested (TIP: add req status variables)
$(".stop").on("click", function(){
mediaRecorder.stop();
localStream.stop();
Hope this helps!
EDITED
Due a dreprecation of the stream.stop() function, what you need to do is to select the specific track and stop it.
There are three MediaStream deprecations in Chrome 45:
MediaStream.ended
MediaStream.label
MediaStream.stop()
In parallel are two additions:
MediaStream.active
MediaStreamTrack.stop()
So to stop it, first get these tracks (you should have only one).
// ...close it when requested (TIP: add req status variables)
$(".stop").on("click", function(){
mediaRecorder.stop();
// can also use getAudioTracks() or getVideoTracks()
var track = localStream.getTracks()[0]; // if only one media track
// ...
track.stop();
});
Check more info about this upgrade and related resources here: https://developers.google.com/web/updates/2015/07/mediastream-deprecations?hl=en#stop-ended-and-active
You can also do this..
var tracks = localStream.getTracks();
tracks.forEach(function(track){
track.stop();
});
var mediaConstraints = {
audio: true
};
var mediaRecorder;
var blobURL;
function onMediaSuccess(stream) {
$(function(){
mediaRecorder = new MediaStreamRecorder(stream);
mediaRecorder.mimeType = 'audio/wav';
mediaRecorder.audioChannels = 1;
mediaRecorder.ondataavailable = function (blob) {
// POST/PUT "Blob" using FormData/XHR2
blobURL = URL.createObjectURL(blob);
$("#result").append('<audio controls src="' + blobURL + '"></audio><br>' + blobURL + '');
};
mediaRecorder.start(5000*5000);
setTimeout(function(){
mediaRecorder.getTracks().forEach(track => track.stop());
}, 120 * 1000);//Se não clicar em parar, a gravação para automaticamente em 2 minutos.
});
}
function onMediaError(e) {
console.error('media error', e);
}
function onStop(){
mediaRecorder.getTracks().forEach(track => track.stop());
mediaRecorder.stream.getTracks().forEach(track => track.stop());
}
var interval;
function contadorIncremento(){
var count = 1;
interval = setInterval(function() {
if(count > 1)
$('.contador').html("setInterval: Ja passou "+ count++ +" segundos!");
else
$('.contador').html("setInterval: Ja passou "+ count++ +" segundo!");
}, 1000);
}
function stopContadorIncremento(){
clearTimeout(interval);
$('.contador').html("");
}
$(function(){
$(".play").on("click", function(){
navigator.getUserMedia(mediaConstraints, onMediaSuccess, onMediaError);
contadorIncremento();
});
$(".stop").on("click", function(){
mediaRecorder.getTracks().forEach(track => track.stop());
stopContadorIncremento();
});
$(".resume").on("click", function(){
mediaRecorder.resume();
});
$(".salvar").on("click", function(){
mediaRecorder.save();
});
});
Related
I'm using a module to detect when the user is speaking, called hark. Here's some of the code:
// original source code is taken from:
// https://github.com/SimpleWebRTC/hark
// copyright goes to &yet team
// edited by Muaz Khan for RTCMultiConnection.js
function hark(stream, options) {
var audioContextType = window.webkitAudioContext || window.AudioContext;
var harker = this;
harker.events = {};
harker.on = function (event, callback) {
harker.events[event] = callback;
};
harker.emit = function () {
if (harker.events[arguments[0]]) {
harker.events[arguments[0]](arguments[1], arguments[2], arguments[3], arguments[4]);
}
};
// make it not break in non-supported browsers
if (!audioContextType) return harker;
options = options || {};
// Config
var smoothing = (options.smoothing || 0.1),
interval = (options.interval || 50),
threshold = options.threshold,
play = options.play,
history = options.history || 10,
running = true;
(...)
return harker;
}
What is this line for?
var harker = this;
When I checked in the debugger, this stores a Window object in harker. And from what I'm seeing it makes for some unexpected behavior when I call hark more than once.
Why not just do var harker;?
Full code is here:
https://www.webrtc-experiment.com/hark.js
And here's a demo where it's used:
<style>
html, body {
margin: 0!important;
padding: 0!important;
}
video {
width: auto;
max-width: 100%;
}
</style>
<title>Auto Stop RecordRTC on Silence</title>
<h1>Auto Stop RecordRTC on Silence</h1>
<br>
<button id="btn-start-recording">Start Recording</button>
<button id="btn-stop-recording" disabled style="display: none;">Stop Recording</button>
<hr>
<video controls autoplay playsinline></video>
<script src="/RecordRTC.js"></script>
<script src="https://www.webrtc-experiment.com/hark.js"></script>
<script>
var video = document.querySelector('video');
var h1 = document.querySelector('h1');
var default_h1 = h1.innerHTML;
function captureCamera(callback) {
navigator.mediaDevices.getUserMedia({ audio: true, video: true }).then(function(camera) {
callback(camera);
}).catch(function(error) {
alert('Unable to capture your camera. Please check console logs.');
console.error(error);
});
}
function stopRecordingCallback() {
video.srcObject = null;
var blob = recorder.getBlob();
video.src = URL.createObjectURL(blob);
recorder.camera.stop();
video.muted = false;
}
var recorder; // globally accessible
document.getElementById('btn-start-recording').onclick = function() {
this.disabled = true;
captureCamera(function(camera) {
video.muted = true;
video.srcObject = camera;
recorder = RecordRTC(camera, {
type: 'video'
});
recorder.startRecording();
var max_seconds = 3;
var stopped_speaking_timeout;
var speechEvents = hark(camera, {});
speechEvents.on('speaking', function() {
if(recorder.getBlob()) return;
clearTimeout(stopped_speaking_timeout);
if(recorder.getState() === 'paused') {
// recorder.resumeRecording();
}
h1.innerHTML = default_h1;
});
speechEvents.on('stopped_speaking', function() {
if(recorder.getBlob()) return;
// recorder.pauseRecording();
stopped_speaking_timeout = setTimeout(function() {
document.getElementById('btn-stop-recording').click();
h1.innerHTML = 'Recording is now stopped.';
}, max_seconds * 1000);
// just for logging purpose (you ca remove below code)
var seconds = max_seconds;
(function looper() {
h1.innerHTML = 'Recording is going to be stopped in ' + seconds + ' seconds.';
seconds--;
if(seconds <= 0) {
h1.innerHTML = default_h1;
return;
}
setTimeout(looper, 1000);
})();
});
// release camera on stopRecording
recorder.camera = camera;
document.getElementById('btn-stop-recording').disabled = false;
});
};
document.getElementById('btn-stop-recording').onclick = function() {
this.disabled = true;
recorder.stopRecording(stopRecordingCallback);
};
</script>
<footer style="margin-top: 20px;"><small id="send-message"></small></footer>
<script src="https://www.webrtc-experiment.com/common.js"></script>
The pattern of assigning the value of this to a variable is something you can read more about by searching for this that pattern (or self = this ) for example, since that's a common name for the variable for "saving" a reference to this.
The reason for doing that is that this changes depending on the context of functions. If you assign this at a specific scope you can pass that along to other functions - since they wouldn't be able to use this, since this might mean something completely different to them.
I try to read an array of sentence asynchronously by audio; but i want to wait the duration of current audio before reading another sentence.
I'm trying to settimeout while audio duration, i onloadmetadata but it does not work. i think audio.duration work only in onloadedmetadata. when i debug i got the right duration in onloadedmetadata and NaN out of it
Here is what i have done.
Excuse my english
var j = 0, texte = "";
function listen(callback) {
var sentence = texte[j];
if (j < texte.length) {
j++;
} else {
j = 0;
texte = "";
return;
}
callback(sentence, listen);
}
function play (sentence, callback) {
// Play the received speech
googleTTS(sentence, 'fr', 1) // speed normal = 1 (default), slow = 0.24
.then(function (url) {
console.log(url); // https://translate.google.com/translate_tts?...
var audio = new Audio(url);
var duration;
audio.onloadedmetadata = function(){
duration = audio.duration;
}
audio.play();
setTimeout(function(){
callback(play);
}, duration);
});
}
Try adding and ended event handler to your audio object:
audio.addEventListener('ended', function() {
callback(play);
});
audio.play();
This event will trigger when the current audio has finished playing.
audio.play() probably doesn't pause to load metadata. That's why the callback is there. Therefore duration isn't being set before you're calling setTimeout.
Instead of
var duration;
audio.onloadedmetadata = function(){
duration = audio.duration;
}
audio.play();
setTimeout(function(){
callback(play);
}, duration);
Try
var duration;
audio.onloadedmetadata = function(){
duration = audio.duration;
setTimeout(function(){
callback(play);
}, duration);
}
audio.play();
Edited - Update in bottom of post
I'm building a web based app for android using phonegap,
and i came across this problem,
after the initialization of getusermedia when i use the Volume down/up button the volume control is for call and not for speaker,
even if i didn't start a new recording...
in addition i noticed that the phone actually thinks its inside a phone call while the app is running for example:
i start my app , then i open whatsapp and try to record a voice message the message is being canceled.
I know that the next segment is the problem (i commented it and there was no problem)
/***recording audio block***/
function audioRecordingInit() {
navigator.getUserMedia = ( navigator.getUserMedia ||
navigator.webkitGetUserMedia ||
navigator.mozGetUserMedia ||
navigator.msGetUserMedia);
if (navigator.getUserMedia) {
console.log('getUserMedia supported.');
var constraints = {audio: true};
var chunks = [];
var onSuccess = function (stream) {
$.globals.mediaRecorder = new MediaRecorder(stream);
$.globals.mediaRecorder.onstop = function(e) {
console.log("data available after MediaRecorder.stop() called.");
var blob = new Blob(chunks, { 'type' : 'audio/ogg; codecs=opus' });
chunks = [];
if(!$("#recordBtn").hasClass("private"))
$.globals.lastRecorded.audio.src = window.URL.createObjectURL(blob);
else
$.globals.lastRecordedPrivate.audio.src = window.URL.createObjectURL(blob);
console.log("audio created");
};
$.globals.mediaRecorder.ondataavailable = function(e) {
chunks.push(e.data);
}
};
var onError = function (err) {
console.log('The following error occured: ' + err);
};
navigator.getUserMedia(constraints, onSuccess, onError);// I think this is the problem
}
else{
console.log('getUserMedia not supported on your browser!');
}
/***end of recording audio block***/
}
this function is called after device ready
and im adding also the start recording and stop recording functions below
function startRecording(event) {
document.getElementById("recordingTime").innerText = "00:00";
$.globals.mediaRecorder.start();
console.log($.globals.mediaRecorder.state);
console.log("recorder started");
$.globals.timerInterval = setInterval(function () {
$.globals.sec += 1;
if ($.globals.sec == 60) {
$.globals.min++;
$.globals.sec = 0;
}
if ($.globals.min < 10) {
if ($.globals.sec < 10)
$.globals.timeText = "0" + $.globals.min + ":0" + $.globals.sec;
else
$.globals.timeText = "0" + $.globals.min + ":" + $.globals.sec;
}
else if ($.globals.sec < 10)
$.globals.timeText = min + ":0" + $.globals.sec;
else
$.globals.timeText = min + ":" + $.globals.sec;
document.getElementById("recordingTime").innerText = $.globals.timeText;
}, 1000);
}
function stopRecording() {
if($(".circleNav").hasClass("recording"))
$(".circleNav").toggleClass("recording");
$.globals.currentState="Recorded";
console.log($.globals.mediaRecorder.state);
if($.globals.mediaRecorder.state=="inactive"){
$.globals.mediaRecorder.start();
}
$.globals.mediaRecorder.stop();
console.log("recorder stopped");
clearInterval($.globals.timerInterval);
}
startRecording starts when touchstarts on record button
stopRecording is called when touchend on record button
thank you for your help
Update:
the conflict was with the microphone because the stream was always live and not only when recording.
now it works fine but still needs to make the record button disabled while in a phone call otherwise it will conflict and possibly crash the app or diconnect the mic from the all or maybe even disconnect it.
ok,
so after alot of reading about MediaRecorder,MediaStream and MediaStreamTrack.
I found the problem, the stream of audio stayed active and was using the microphone and in so denied me access to phone calls and voice messages in whatsapp.
I will add my solution below:
function audioRecordingInit() {
navigator.mediaDevices.getUserMedia = ( navigator.getUserMedia ||
navigator.webkitGetUserMedia ||
navigator.mozGetUserMedia ||
navigator.msGetUserMedia);
if (navigator.getUserMedia) {
console.log('getUserMedia supported.');
navigator.getUserMedia({audio:true}, onSuccessMedia, onErrorMedia);
return true;
}
else{
console.log('getUserMedia not supported on your browser!');
return false;
}
}
notice that i made this function return a boolean value and i separated the success and error functions.
function onSuccessMedia(stream) {
var chunks = [];
$.globals.mediaStream=stream;
console.log(stream);
$.globals.mediaRecorder = new MediaRecorder(stream);
$.globals.mediaRecorder.onstop = function(e) {
console.log("data available after MediaRecorder.stop() called.");
var blob = new Blob(chunks, { 'type' : 'audio/mp3; codecs=opus' });
chunks = [];
if(!$("#recordBtn").hasClass("private"))
$.globals.lastRecorded.audio.src = window.URL.createObjectURL(blob);
else
$.globals.lastRecordedPrivate.audio.src = window.URL.createObjectURL(blob);
console.log("audio created");
};
$.globals.mediaRecorder.ondataavailable = function(e) {
chunks.push(e.data);
};
$.globals.mediaRecorder.start();
console.log("recording start");
}
function onErrorMedia(err) {
console.log('The following error occured: ' + err);
}
here i moved the mediaRecorder.start() to the onSuccess function instead inside of the start recording...
and finely I changed the start recording function to
function startRecording(event) {
if(audioRecordingInit()) {
//here is all of the visual changes
}
}
and that's it everything is working correctly
I am using the code from here MediaStreamRecorder to record video and
audio in single webm format. I have modified the code as per my use case.
what i am trying to do here is whenever user clicks on a button like "Start" it should start recording a video and when they click on "Stop" it should stop. but Whenever I try to stop video ( using mediaRecorder.stop() ) it doesn't stops at all, it throws some error like
TypeError: medaiRecorder is undefined
and the recording continues even after calling Stop() on mediaRecorder
PS: I am using Rails.Also, the same code works well when using onclick().
Below is the log from console:
"start worked "
TypeError: mediaRecorder is undefined
"stop worked"
here is the code i am trying to run:
<div class="container">
<button id="first">Start!!</button>
<button id="second">Stop!!</button>
</div>
<script>
var mediaConstraints = {
audio: !!navigator.mozGetUserMedia,
video: true
};
var mediaRecorder;
function onMediaSuccess(stream) {
var mediaRecorder = new MediaStreamRecorder(stream);
mediaRecorder.mimeType = 'video/webm';
mediaRecorder.ondataavailable = function (blob) {
// do something with blob
};
mediaRecorder.start(3000);
}
function onMediaError(e) {
console.error('media error', e);
}
document.getElementById('first').addEventListener('click',firstHandler,false);
document.getElementById('second').addEventListener('click',secondHandler,false);
function firstHandler()
{
console.log('start worked ');
navigator.getUserMedia(mediaConstraints, onMediaSuccess, onMediaError);
}
function secondHandler()
{
console.log('stop worked');
mediaRecorder.stop();
}
// below function via: http://goo.gl/B3ae8c
function bytesToSize(bytes)
{
var k = 1000;
var sizes = ['Bytes', 'KB', 'MB', 'GB', 'TB'];
if (bytes === 0) return '0 Bytes';
var i = parseInt(Math.floor(Math.log(bytes) / Math.log(k)),10);
return (bytes / Math.pow(k, i)).toPrecision(3) + ' ' + sizes[i];
}
// below function via: http://goo.gl/6QNDcI
function getTimeLength(milliseconds)
{
var data = new Date(milliseconds);
return data.getUTCHours()+" hours, "+data.getUTCMinutes()+" minutes and "+data.getUTCSeconds()+" second(s)";
}
////////////////////////////////////////////
// Identify Browser with below code
///////////////////////////////////////////
// function GetBrowser()
// {
// return navigator ? navigator.userAgent.toLowerCase() : "other";
// }
// console.log(GetBrowser());
</script>
I'm trying to stream a webm video file to the client.
The first chunk (which is about 4466 bytes long) "works" (using appendBuffer), as I can see the <video> resize to the video's resolution.
Here's the error I get after appending a third time:
Uncaught InvalidStateError: An attempt was made to use an object that is not, or is no longer, usable.
The SourceBuffer's buffered property has this error:
[Exception: DOMException]
Here's the client code:
var client = new BinaryClient('ws://127.0.0.1:3080/binary-endpoint');
var ms = new MediaSource();
var sourceBuffer;
var video = document.querySelector('video');
var needUpdate = true;
var paused = true;
var busy = false;
var initial = true;
ms.addEventListener('sourceopen', function(e) {
console.log('The MediaSource has been opened: ', e)
sourceBuffer = ms.addSourceBuffer('video/webm; codecs="vorbis,vp8"');
ms.duration = 1000;
sourceBuffer.addEventListener('updatestart', function(e) {
busy = true;
});
sourceBuffer.addEventListener('updateend', function(e) {
console.log(e);
busy = false;
});
sourceBuffer.addEventListener('update', function(e) {
console.log('Update succeeded', e)
});
console.log('The SourceBuffer has been created', sourceBuffer);
}, false);
client.on('stream', function(stream, meta){
stream.on('data', function(data) {
var DATA = new Uint8Array(data);
if (needUpdate && !busy) {
sourceBuffer.appendBuffer(DATA);
}
if (!initial && paused) {
video.play();
paused = false;
}
initial = false;
});
});
video.src = window.URL.createObjectURL(ms);