I'm trying to stream a webm video file to the client.
The first chunk (which is about 4466 bytes long) "works" (using appendBuffer), as I can see the <video> resize to the video's resolution.
Here's the error I get after appending a third time:
Uncaught InvalidStateError: An attempt was made to use an object that is not, or is no longer, usable.
The SourceBuffer's buffered property has this error:
[Exception: DOMException]
Here's the client code:
var client = new BinaryClient('ws://127.0.0.1:3080/binary-endpoint');
var ms = new MediaSource();
var sourceBuffer;
var video = document.querySelector('video');
var needUpdate = true;
var paused = true;
var busy = false;
var initial = true;
ms.addEventListener('sourceopen', function(e) {
console.log('The MediaSource has been opened: ', e)
sourceBuffer = ms.addSourceBuffer('video/webm; codecs="vorbis,vp8"');
ms.duration = 1000;
sourceBuffer.addEventListener('updatestart', function(e) {
busy = true;
});
sourceBuffer.addEventListener('updateend', function(e) {
console.log(e);
busy = false;
});
sourceBuffer.addEventListener('update', function(e) {
console.log('Update succeeded', e)
});
console.log('The SourceBuffer has been created', sourceBuffer);
}, false);
client.on('stream', function(stream, meta){
stream.on('data', function(data) {
var DATA = new Uint8Array(data);
if (needUpdate && !busy) {
sourceBuffer.appendBuffer(DATA);
}
if (!initial && paused) {
video.play();
paused = false;
}
initial = false;
});
});
video.src = window.URL.createObjectURL(ms);
Related
I'm using this function to create a sound, which works well on desktop and Android, and works initially on iOS when I use a touchevent to start it. I need to later replace the sound with another sound file, however on iOS it doesn't start - I'm assuming because it needs another user interaction to play the sound.
This is a VR app in a headset so this kind of user interaction isn't possible. Is there another way of replacing the sound or another non-click user interaction I can use like movement?
I've seen this http://matt-harrison.com/perfect-web-audio-on-ios-devices-with-the-web-audio-api/
Which seems to have another solution, but I don't want to pre-load all of the files (they're reasonably big and there's 10 of them) which seems to be a requirement here - plus I use the pause function in the code I have. Are there any easy ways round this?
var AudioContext = AudioContext || webkitAudioContext, context = new AudioContext();
function createSound(filename) {
console.log('createSound()');
var url = cdnPrefix + '/' + filename;
var buffer;
context = new AudioContext();
var request = new XMLHttpRequest();
request.open('GET', url, true);
request.responseType = 'arraybuffer';
// Decode asynchronously
request.onload = function() {
context.decodeAudioData(request.response, function(b) {
buffer = b;
play();
});
}
request.send();
var sourceNode = null,
startedAt = 0,
pausedAt = 0,
playing = false,
volume = context.createGain();
var play = function() {
if(playing || !buffer)
return;
var offset = pausedAt;
sourceNode = context.createBufferSource();
sourceNode.connect(context.destination);
sourceNode.connect(volume);
volume.gain.value = 1;
sourceNode.buffer = buffer;
sourceNode.start(0, offset);
sourceNode.onended = onEnded;
sourceNode.onstatechange = onStateChange;
sourceNode.onloaded = onLoaded;
//sourceNode.loop = true;
startedAt = context.currentTime - offset;
pausedAt = 0;
playing = true;
$(document).trigger("voiceoverPlay");
if(isPaused == true)
pause();
};
function onEnded(event){
$(document).trigger("voiceoverEnded");
play();
}
function onStateChange(event){
console.log('onStateChange',event);
}
function onLoaded(event){
console.log('onLoaded',event);
}
var pause = function() {
var elapsed = context.currentTime - startedAt;
stop();
pausedAt = elapsed;
$(document).trigger("voiceoverPause");
};
var stop = function() {
if (sourceNode) {
sourceNode.disconnect();
if(playing === true)
sourceNode.stop(0);
sourceNode = null;
}
pausedAt = 0;
startedAt = 0;
playing = false;
};
var getPlaying = function() {
return playing;
};
var getCurrentTime = function() {
if(pausedAt) {
return pausedAt;
}
if(startedAt) {
return context.currentTime - startedAt;
}
return 0;
};
var setCurrentTime = function(time) {
pausedAt = time;
};
var getDuration = function() {
return buffer.duration;
};
return {
getCurrentTime: getCurrentTime,
setCurrentTime: setCurrentTime,
getDuration: getDuration,
getPlaying: getPlaying,
play: play,
pause: pause,
stop: stop
};
}
You need a touch event for each sound.
I ended up using SoundJS which is much better.
Edited - Update in bottom of post
I'm building a web based app for android using phonegap,
and i came across this problem,
after the initialization of getusermedia when i use the Volume down/up button the volume control is for call and not for speaker,
even if i didn't start a new recording...
in addition i noticed that the phone actually thinks its inside a phone call while the app is running for example:
i start my app , then i open whatsapp and try to record a voice message the message is being canceled.
I know that the next segment is the problem (i commented it and there was no problem)
/***recording audio block***/
function audioRecordingInit() {
navigator.getUserMedia = ( navigator.getUserMedia ||
navigator.webkitGetUserMedia ||
navigator.mozGetUserMedia ||
navigator.msGetUserMedia);
if (navigator.getUserMedia) {
console.log('getUserMedia supported.');
var constraints = {audio: true};
var chunks = [];
var onSuccess = function (stream) {
$.globals.mediaRecorder = new MediaRecorder(stream);
$.globals.mediaRecorder.onstop = function(e) {
console.log("data available after MediaRecorder.stop() called.");
var blob = new Blob(chunks, { 'type' : 'audio/ogg; codecs=opus' });
chunks = [];
if(!$("#recordBtn").hasClass("private"))
$.globals.lastRecorded.audio.src = window.URL.createObjectURL(blob);
else
$.globals.lastRecordedPrivate.audio.src = window.URL.createObjectURL(blob);
console.log("audio created");
};
$.globals.mediaRecorder.ondataavailable = function(e) {
chunks.push(e.data);
}
};
var onError = function (err) {
console.log('The following error occured: ' + err);
};
navigator.getUserMedia(constraints, onSuccess, onError);// I think this is the problem
}
else{
console.log('getUserMedia not supported on your browser!');
}
/***end of recording audio block***/
}
this function is called after device ready
and im adding also the start recording and stop recording functions below
function startRecording(event) {
document.getElementById("recordingTime").innerText = "00:00";
$.globals.mediaRecorder.start();
console.log($.globals.mediaRecorder.state);
console.log("recorder started");
$.globals.timerInterval = setInterval(function () {
$.globals.sec += 1;
if ($.globals.sec == 60) {
$.globals.min++;
$.globals.sec = 0;
}
if ($.globals.min < 10) {
if ($.globals.sec < 10)
$.globals.timeText = "0" + $.globals.min + ":0" + $.globals.sec;
else
$.globals.timeText = "0" + $.globals.min + ":" + $.globals.sec;
}
else if ($.globals.sec < 10)
$.globals.timeText = min + ":0" + $.globals.sec;
else
$.globals.timeText = min + ":" + $.globals.sec;
document.getElementById("recordingTime").innerText = $.globals.timeText;
}, 1000);
}
function stopRecording() {
if($(".circleNav").hasClass("recording"))
$(".circleNav").toggleClass("recording");
$.globals.currentState="Recorded";
console.log($.globals.mediaRecorder.state);
if($.globals.mediaRecorder.state=="inactive"){
$.globals.mediaRecorder.start();
}
$.globals.mediaRecorder.stop();
console.log("recorder stopped");
clearInterval($.globals.timerInterval);
}
startRecording starts when touchstarts on record button
stopRecording is called when touchend on record button
thank you for your help
Update:
the conflict was with the microphone because the stream was always live and not only when recording.
now it works fine but still needs to make the record button disabled while in a phone call otherwise it will conflict and possibly crash the app or diconnect the mic from the all or maybe even disconnect it.
ok,
so after alot of reading about MediaRecorder,MediaStream and MediaStreamTrack.
I found the problem, the stream of audio stayed active and was using the microphone and in so denied me access to phone calls and voice messages in whatsapp.
I will add my solution below:
function audioRecordingInit() {
navigator.mediaDevices.getUserMedia = ( navigator.getUserMedia ||
navigator.webkitGetUserMedia ||
navigator.mozGetUserMedia ||
navigator.msGetUserMedia);
if (navigator.getUserMedia) {
console.log('getUserMedia supported.');
navigator.getUserMedia({audio:true}, onSuccessMedia, onErrorMedia);
return true;
}
else{
console.log('getUserMedia not supported on your browser!');
return false;
}
}
notice that i made this function return a boolean value and i separated the success and error functions.
function onSuccessMedia(stream) {
var chunks = [];
$.globals.mediaStream=stream;
console.log(stream);
$.globals.mediaRecorder = new MediaRecorder(stream);
$.globals.mediaRecorder.onstop = function(e) {
console.log("data available after MediaRecorder.stop() called.");
var blob = new Blob(chunks, { 'type' : 'audio/mp3; codecs=opus' });
chunks = [];
if(!$("#recordBtn").hasClass("private"))
$.globals.lastRecorded.audio.src = window.URL.createObjectURL(blob);
else
$.globals.lastRecordedPrivate.audio.src = window.URL.createObjectURL(blob);
console.log("audio created");
};
$.globals.mediaRecorder.ondataavailable = function(e) {
chunks.push(e.data);
};
$.globals.mediaRecorder.start();
console.log("recording start");
}
function onErrorMedia(err) {
console.log('The following error occured: ' + err);
}
here i moved the mediaRecorder.start() to the onSuccess function instead inside of the start recording...
and finely I changed the start recording function to
function startRecording(event) {
if(audioRecordingInit()) {
//here is all of the visual changes
}
}
and that's it everything is working correctly
I am working on a project where I want to
getUserMedia -> MediaRecorder -> socketIO -> MediaSource appendbuffer
I got it to work, however after a few seconds it randomly stops.
I know about WebRTC, but in the project I am working on it's based on an environment which is a version of Chrome embedded that doesn't support WebRTC.
Server:
'use strict';
const io = require('socket.io')();
io.on('connection', (socket) => {
console.log('connection');
socket.on('stream', (data) => {
socket.emit('stream', data);
});
});
io.listen(3001);
Client:
//Codecs
var SOURCE_BUFFER_MIME = 'video/webm; codecs="vp8, vorbis"';
var RECORDER_MIME = 'video/webm; codecs="vp8"';
//Here buffers are stored when sourcebuffer is still 'updating'
var buffers = [];
//Video1: Directly from get user media
var $video1 = document.getElementById('video1');
//Video2: Using mediasource
var $video2 = document.getElementById('video2');
var mediaSource = new MediaSource();
var sourceBuffer = null;
$video2.src = window.URL.createObjectURL(mediaSource);
$video2.play();
mediaSource.addEventListener('sourceopen', function(e) {
sourceBuffer = mediaSource.addSourceBuffer(SOURCE_BUFFER_MIME);
});
//Start socketIO connection
socket = io.connect('http://localhost:3001/');
socket.on('connect', function() {
console.log('Connected');
socket.on('stream', function(data) {
if (sourceBuffer !== null) {
console.count('Received');
buffers.unshift(new Uint8Array(data));
append();
}
});
});
function append() {
if (buffers.length && mediaSource.readyState == "open" &&
(sourceBuffer !== null && !sourceBuffer.updating)) {
var buffer = buffers[0];
console.log(buffer.byteLength);
buffers.shift();
sourceBuffer.appendBuffer(buffer);
}
}
navigator.webkitGetUserMedia({audio: true, video: true},
function(stream) {
$video1.src = window.URL.createObjectURL(stream);
$video1.play();
//Extract data
recorder = new MediaRecorder(stream, { mimeType : RECORDER_MIME });
recorder.ondataavailable = function(e) {
// recorder.pause();
if (e.data && e.data.size > 0) {
socket.emit('stream', e.data);
}
};
recorder.start(500);
//recorder.start(5000); //this causes the pause right after the next batch is appended
},
function(err) {
console.log(err);
}
);
I am using the MediaStreamRecorder.js library for audio capture with Javascript. Almost everything is ok. The only problem I am finding is that when I click on Stop to stop recording the red recording icon is still up there, on the tab.
Anyone know how I remove this icon when you click Stop?
Example in jsFiddle:https://jsfiddle.net/davidsadan/wazb1jks
Here is the print of how it is when I click Stop:
ps: It only works with https, Sorry my english, im brazilian...
var mediaConstraints = {
audio: true
};
var mediaRecorder;
var blobURL;
function onMediaSuccess(stream) {
$(function(){
mediaRecorder = new MediaStreamRecorder(stream);
mediaRecorder.mimeType = 'audio/wav';
mediaRecorder.audioChannels = 1;
mediaRecorder.ondataavailable = function (blob) {
// POST/PUT "Blob" using FormData/XHR2
blobURL = URL.createObjectURL(blob);
$("#result").append('<audio controls src="' + blobURL + '"></audio><br>' + blobURL + '');
};
mediaRecorder.start(5000*5000);
setTimeout(function(){
mediaRecorder.stop();
}, 120 * 1000);//Se não clicar em parar, a gravação para automaticamente em 2 minutos.
});
}
function onMediaError(e) {
console.error('media error', e);
}
function onStop(){
mediaRecorder.stop();
mediaRecorder.stream.stop();
}
var interval;
function contadorIncremento(){
var count = 1;
interval = setInterval(function() {
if(count > 1)
$('.contador').html("setInterval: Ja passou "+ count++ +" segundos!");
else
$('.contador').html("setInterval: Ja passou "+ count++ +" segundo!");
}, 1000);
}
function stopContadorIncremento(){
clearTimeout(interval);
$('.contador').html("");
}
$(function(){
$(".play").on("click", function(){
navigator.getUserMedia(mediaConstraints, onMediaSuccess, onMediaError);
contadorIncremento();
});
$(".stop").on("click", function(){
mediaRecorder.stop();
stopContadorIncremento();
});
$(".resume").on("click", function(){
mediaRecorder.resume();
});
$(".salvar").on("click", function(){
mediaRecorder.save();
});
});
<script src="https://webrtcexperiment-webrtc.netdna-ssl.com/MediaStreamRecorder.js"></script>
<script src="https://ajax.googleapis.com/ajax/libs/jquery/2.1.1/jquery.min.js"></script>
<div id="result"></div>
<button class="play">Start</button>
<button class="stop">Stop</button>
<button class="resume">Resume</button>
<button class="salvar">Salvar</button>
<div class="contador"></div>
You must aware that the red recording point is launched by the getUserMedia method. That method starts capturing the webcam or microphone as in your case (audio-only).
What happens with your code, is that you stops the recording feature, mediaRecorder.stop() but you are not stopping the capturing request.
So keep the reference to the track that you want to stop and stop it on demand.
var localStream;
// ...keep track onSuccess
function onMediaSuccess(stream) {
localStream = stream;
// ...
}
// ...close it when requested (TIP: add req status variables)
$(".stop").on("click", function(){
mediaRecorder.stop();
localStream.stop();
Hope this helps!
EDITED
Due a dreprecation of the stream.stop() function, what you need to do is to select the specific track and stop it.
There are three MediaStream deprecations in Chrome 45:
MediaStream.ended
MediaStream.label
MediaStream.stop()
In parallel are two additions:
MediaStream.active
MediaStreamTrack.stop()
So to stop it, first get these tracks (you should have only one).
// ...close it when requested (TIP: add req status variables)
$(".stop").on("click", function(){
mediaRecorder.stop();
// can also use getAudioTracks() or getVideoTracks()
var track = localStream.getTracks()[0]; // if only one media track
// ...
track.stop();
});
Check more info about this upgrade and related resources here: https://developers.google.com/web/updates/2015/07/mediastream-deprecations?hl=en#stop-ended-and-active
You can also do this..
var tracks = localStream.getTracks();
tracks.forEach(function(track){
track.stop();
});
var mediaConstraints = {
audio: true
};
var mediaRecorder;
var blobURL;
function onMediaSuccess(stream) {
$(function(){
mediaRecorder = new MediaStreamRecorder(stream);
mediaRecorder.mimeType = 'audio/wav';
mediaRecorder.audioChannels = 1;
mediaRecorder.ondataavailable = function (blob) {
// POST/PUT "Blob" using FormData/XHR2
blobURL = URL.createObjectURL(blob);
$("#result").append('<audio controls src="' + blobURL + '"></audio><br>' + blobURL + '');
};
mediaRecorder.start(5000*5000);
setTimeout(function(){
mediaRecorder.getTracks().forEach(track => track.stop());
}, 120 * 1000);//Se não clicar em parar, a gravação para automaticamente em 2 minutos.
});
}
function onMediaError(e) {
console.error('media error', e);
}
function onStop(){
mediaRecorder.getTracks().forEach(track => track.stop());
mediaRecorder.stream.getTracks().forEach(track => track.stop());
}
var interval;
function contadorIncremento(){
var count = 1;
interval = setInterval(function() {
if(count > 1)
$('.contador').html("setInterval: Ja passou "+ count++ +" segundos!");
else
$('.contador').html("setInterval: Ja passou "+ count++ +" segundo!");
}, 1000);
}
function stopContadorIncremento(){
clearTimeout(interval);
$('.contador').html("");
}
$(function(){
$(".play").on("click", function(){
navigator.getUserMedia(mediaConstraints, onMediaSuccess, onMediaError);
contadorIncremento();
});
$(".stop").on("click", function(){
mediaRecorder.getTracks().forEach(track => track.stop());
stopContadorIncremento();
});
$(".resume").on("click", function(){
mediaRecorder.resume();
});
$(".salvar").on("click", function(){
mediaRecorder.save();
});
});
I am trying to work out with first demo MediaSource try out but I am unable to play video. Source I am using is a webm video "big-buck-bunny_trailer.webm". I keep getting "Uncaught InvalidStateError: Failed to execute 'endOfStream' on 'MediaSource': The 'updating' attribute is true on one or more of this MediaSource's SourceBuffers.". I have handled the sourcebuffer updating flag as well but still it doesn't seem to work.
window.URL = window.URL || window.webkitURL;
window.MediaSource = window.MediaSource || window.WebKitMediaSource;
if (!!!window.MediaSource) {
//handle fallback
alert('MediaSource API is not available');
}
var mediaSource = new MediaSource();
var sourceBuffer;
var audio;
var playRange = 0;
var queue = [];
var playing = false;
var contentLength = undefined;
function ajaxMediaGET(url, range, async, callback) {
var xhr = new XMLHttpRequest();
xhr.open("GET", url);
xhr.responseType = "arraybuffer";
xhr.setRequestHeader("Range", range);
xhr.addEventListener('load', onLoad, false);
xhr.send();
function onLoad() {
playRange += Number(xhr.getResponseHeader("Content-Length"));
if(!contentLength) {
var contentLengthString = xhr.getResponseHeader("Content-Range").split("/");
contentLength = Number(contentLengthString[1]);
}
//callback(new Uint8Array(xhr.response));
sourceBuffer.addEventListener("update", function() {
if(!sourceBuffer.updating && queue.length > 0) {// && mediaSource.readyState == "open"
sourceBuffer.appendBuffer(queue.shift());
sourceBuffer.removeEventListener("update");
}
}, false);
if (!sourceBuffer.updating) {
sourceBuffer.appendBuffer(new Uint8Array(xhr.response));
} else {
queue.push(new Uint8Array(xhr.response));
}
if(playRange < contentLength - 1) {
ajaxMediaGET("path-to-server", loadNextRange(playRange), true, undefined);
} else {
mediaSource.endOfStream();
}
}
}
function mediaSourceOpen(e) {
sourceBuffer = e.target.addSourceBuffer('video/webm;codecs="vp8,vorbis"');
ajaxMediaGET("path-to-server", loadNextRange(playRange), true, undefined);
/*if(!playing) {
audio.play();
playing = true;
}*/
}
function initPlayer() {
audio = document.querySelector('video');
audio.src = window.URL.createObjectURL(mediaSource);
mediaSource.addEventListener('sourceopen', mediaSourceOpen, false);
mediaSource.addEventListener('webkitsourceopen', mediaSourceOpen, false);
}
initPlayer();
You need to wait until the sourceBuffer has completed updating. Replace mediaSource.endOfStream(); with
if(playRange < contentLength - 1) {
ajaxMediaGET("path-to-server", loadNextRange(playRange), true, undefined);
} else {
sourceBuffer.addEventListener('updateend', function(){
mediaSource.endOfStream();
});
}
Have a look at the docs and sample code for
MediaSource.endOfStream()