MediaSource appending to SourceBuffer does not work after the first time - javascript

I am currently trying to stream a .webm video file via socket.io to my client (currently using Chrome as client).
Appending the first Uint8Array to the SourceBuffer works fine but appending further ones does not work and throws the following error:
Uncaught DOMException: Failed to execute 'appendBuffer' on 'SourceBuffer': The HTMLMediaElement.error attribute is not null.
My current code:
'use strict';
let socket = io.connect('http://localhost:1337');
let mediaSource = new MediaSource();
let video = document.getElementById("player");
let queue = [];
let sourceBuffer;
video.src = window.URL.createObjectURL(mediaSource);
mediaSource.addEventListener('sourceopen', function() {
sourceBuffer = mediaSource.addSourceBuffer('video/webm; codecs="vorbis,vp8"');
socket.on("video", function(data) {
let uIntArray = new Uint8Array(data);
if (!sourceBuffer.updating) {
sourceBuffer.appendBuffer(uIntArray);
} else {
queue.push(data);
}
});
});
Server side code (snippet)
io.on('connection', function(socket) {
console.log("Client connected");
let readStream = fs.createReadStream("bunny.webm");
readStream.addListener('data', function(data) {
socket.emit('video', data);
});
});
I also removed the webkit checks since this will only run on Chromium browsers.

I think you have to free the buffer, see the remove() function
http://w3c.github.io/media-source/#widl-SourceBuffer-remove-void-double-start-unrestricted-double-end
Let me know if it helped.

Related

JavaScript: Use MediaRecorder to record streams from <video> but failed

I'm trying to record parts of the video from a tag, save it for later use. And I found this article: Recording a media element, which described a method by first calling stream = video.captureStream(), then use new MediaRecord(stream) to get a recorder.
I've tested on some demos, the MediaRecorder works fine if stream is from user's device (such as microphone). However, when it comes to media element, my FireFox browser throws an exception: MediaRecorder.start: The MediaStream's isolation properties disallow access from MediaRecorder.
So any idea on how to deal with it?
Browser: Firefox
The page (including the js file) is stored at local.
The src attribute of <video> tag could either be a file from local storage or a url from Internet.
Code snippets:
let chunks = [];
let getCaptureStream = function () {
let stream;
const fps = 0;
if (video.captureStream) {
console.log("use captureStream");
stream = video.captureStream(fps);
} else if (video.mozCaptureStream) {
console.log("use mozCaptureStream");
stream = video.mozCaptureStream(fps);
} else {
console.error('Stream capture is not supported');
stream = null;
}
return stream;
}
video.addEventListener('play', () => {
let stream = getCaptureStream();
const mediaRecorder = new MediaRecorder(stream);
mediaRecorder.onstop = function() {
const newVideo = document.createElement('video');
newVideo.setAttribute('controls', '');
newVideo.controls = true;
const blob = new Blob(chunks);
chunks = [];
const videoURL = window.URL.createObjectURL(blob, { 'type' : 'video/mp4; codecs="avc1.42E01E, mp4a.40.2"' });
newVideo.src = videoURL;
document.body.appendChild(video);
}
mediaRecorder.ondataavailable = function(e) {
chunks.push(e.data);
}
stopButton.onclick = function() {
mediaRecorder.stop()
}
mediaRecorder.start(); // This is the line triggers exception.
});
I found the solution myself.
When I turned to Chrome, it shows that a CORS issue limits me from even playing original video. So I guess it's because the secure strategy that preventing MediaRecorder from accessing MediaStreams. Therefore, I deployed the local files to a local server with instruction on this page.
After that, the MediaRecorder started working. Hope this will help someone in need.
But still, the official document doesn't seem to mention much about isolation properties of media elements. So any idea or further explanation is welcomed.

Stream live audio to Node.js server

I'm working on a project and I require to send an audio stream to a Node.js server. I'm able to capture microphone sound with this function:
function micCapture(){
'use strict';
navigator.getUserMedia = navigator.getUserMedia || navigator.webkitGetUserMedia || navigator.mozGetUserMedia;
var constraints = {
audio: true,
video: false
};
var video = document.querySelector('video');
function successCallback(stream) {
window.stream = stream; // stream available to console
if (window.URL) {
video.src = window.webkitURL.createObjectURL(stream);
} else {
video.src = stream;
}
//Send audio stream
//server.send(stream);
}
function errorCallback(error) {
console.log('navigator.getUserMedia error: ', error);
}
navigator.getUserMedia(constraints, successCallback, errorCallback);
}
As you can see, I'm able to capture audio and play it on the website.
Now I want to send that audio stream to a Node.js server, and send it back to other clients. Like a voicechat, but I don't want to use WebRTC as I need the stream in the server. How can I achieve this? Can I use socket.io-stream to do this? In the examples I saw, they recorded the audio, and sent a file, but I need "live" audio.
I have recently done live audio upload using socket.io from browser to server. I am going to answer here in case someone else needs it.
var stream;
var socket = io();
var bufferSize = 1024 * 16;
var audioContext = new AudioContext();
// createScriptProcessor is deprecated. Let me know if anyone find alternative
var processor = audioContext.createScriptProcessor(bufferSize, 1, 1);
processor.connect(audioContext.destination);
navigator.mediaDevices.getUserMedia({ video: false, audio: true }).then(handleMicStream).catch(err => {
console.log('error from getUserMedia', err);
});
handleMicStream will run when user accepts the permission to use microphone.
function handleMicStream(streamObj) {
// keep the context in a global variable
stream = streamObj;
input = audioContext.createMediaStreamSource(stream);
input.connect(processor);
processor.onaudioprocess = e => {
microphoneProcess(e); // receives data from microphone
};
}
function microphoneProcess(e) {
const left = e.inputBuffer.getChannelData(0); // get only one audio channel
const left16 = convertFloat32ToInt16(left); // skip if you don't need this
socket.emit('micBinaryStream', left16); // send to server via web socket
}
// Converts data to BINARY16
function convertFloat32ToInt16(buffer) {
let l = buffer.length;
const buf = new Int16Array(l / 3);
while (l--) {
if (l % 3 === 0) {
buf[l / 3] = buffer[l] * 0xFFFF;
}
}
return buf.buffer;
}
Have your socket.io server listen to micBinaryStream and you should get the data. I needed the data as a BINARY16 format for google api if you do not need this you can skip the function call to convertFloat32ToInt16().
Important
When you need to stop listening you MUST disconnect the the processor and end the stream. Run the function closeAll() below.
function closeAll() {
const tracks = stream ? stream.getTracks() : null;
const track = tracks ? tracks[0] : null;
if (track) {
track.stop();
}
if (processor) {
if (input) {
try {
input.disconnect(processor);
} catch (error) {
console.warn('Attempt to disconnect input failed.');
}
}
processor.disconnect(audioContext.destination);
}
if (audioContext) {
audioContext.close().then(() => {
input = null;
processor = null;
audioContext = null;
});
}
}
it's an old time question, i see. I'm doing the same thing (except my server doesn't run node.js and is written in C#) and stumbled upon this.
Don't know if someone is still interested but i've elaborated a bit. The current alternative to the deprecated createScriptProcessor is the AudioWorklet interface.
From: https://webaudio.github.io/web-audio-api/#audioworklet
1.32.1. Concepts
The AudioWorklet object allows developers to supply scripts (such as JavaScript or >WebAssembly code) to process audio on the rendering thread, supporting custom >AudioNodes. This processing mechanism ensures synchronous execution of the script >code with other built-in AudioNodes in the audio graph.
You cannot implement interfaces in Javascript as far as i know but you can extend a class derived from it.
And the one we need is: https://developer.mozilla.org/en-US/docs/Web/API/AudioWorkletProcessor
So i did write a processor that just mirrors the output with the input values and displays them.
class CustomAudioProcessor extends AudioWorkletProcessor {
process (inputs, outputs, parameters) {
const input = inputs[0];
const output = output[0];
for (let channel = 0; channel < input.length; ++channel) {
for (let i = 0; i < input[channel].length; ++i) {
// Just copying all the data from input to output
output[channel][i] = input[channel][i];
// The next one will make the app crash but yeah, the values are there
// console.log(output[channel][i]);
}
}
}
}
The processor must then be placed into the audio pipeline, after the microphone and before the speakers.
function record() {
constraints = { audio: true };
navigator.mediaDevices.getUserMedia(constraints)
.then(function(stream) {
audioCtx = new AudioContext();
var source = audioCtx.createMediaStreamSource(stream);
audioCtx.audioWorklet.addModule("custom-audio-processor.js").then(() => {
customAudioProcessor = new AudioWorkletNode(audioCtx, "custom-audio-processor");
source.connect(customAudioProcessor);
customAudioProcessor.connect(audioCtx.destination);
})
audioCtx.destination.play();
Works! Good luck! :)

Playing RTP Video Stream with HTML5-Video-Tag

I am sending RTP video data from an Android phone to my computer. The RTP streaming works, proven with ffplay.
Now I'm trying to receive and display the stream inside Chrome (63.0.3239.84, Win10, 64Bit). I receive the RTP packets with node.js and pass them to all connected users with help of socket.io.
To display the video in an HTML5 <video>-tag, I tried a similar approach as the example in the docs of the MediaSource API. This is, what i've tried to append the video data on the fly:
var socket = io();
var ms = new MediaSource();
var mimeCodec = 'video/mp4; codecs="avc1.42C01E"';
var queue = [];
var video = document.querySelector('video');
// receiving the rtp-packets from android, forwarded by nodes socket.io
socket.on('video-fragment', function(fragment) {
queue.push(fragment.slice(11)); // cutting of rtp header to get plain NAL units
});
video.src = URL.createObjectURL(ms);
var timeout = 500;
var appendChunk = function() {
if(queue.length > 0) {
timeout = 500;
var append = function() {
var sb = ms.addSourceBuffer(mimeCodec);
sb.appendBuffer(queue.shift());
sb.addEventListener('updateend', function() {
if(video.paused) {
console.log("try to start video");
video.play();
}
appendChunk();
});
ms.removeSourceBuffer(sb);
};
if(ms.readyState == "open") {
append();
} else {
ms.addEventListener('sourceopen', function() {
appendChunk();
});
}
} else {
setTimeout(appendChunk, timeout); // try again after some timeout...
timeout *= 2;
}
}
appendChunk(); // start recursive call
<video></video>
However, I don't get any errors from my video-object. If I close the MediaSource at some point with ms.endOfStream() and try to start the video after that, I get a MediaError with code 4: DEMUXER_ERROR_COULD_NOT_OPEN.
Any suggestions? Am I somehow on the right way? Or is this kind of video streaming not possible in Chrome and the only way of "live streaming" would be with using a playlist and video-file-chunks?
The browser only supports video in an mp4 (or webm) container. It does not support the rtp protocol. It must be repackaged to a format the browser supports.

How to play huge WEBM file Locally

I am working with Mediarecorder to record video. I need to show LOCAL preview of this video directly from the browser. My problem is long time recordings and huge sized videos. So i can't store MediaRecorder blobs in memory, because it will lead to browser crush on machines with low memory (RAM). So each webm blob from onDataAvailable i store into indexedDB.
After finishing recording i request all stored data from indexedDB by getAll transaction, but i still have same problem with memory. How can i play this array of blobs without loading all data into memory? How can i buffer this data?
I try to use MediaSource API
My method to get data from indexedDB:
function getAllData(indexedDB){
return new Promise(function (resolve, reject) {
var transaction = indexedDB.transaction(["blobs"], 'readonly');
var dbResult = transaction.objectStore("blobs").getAll();
dbResult.onerror = function (error) {
reject('failed to read from db: ' + error);
}
dbResult.onsuccess = function (event) {
resolve(event.target.result);
}
});
}
And when trying to play by MediaSource API:
getAllData().then(function (chunks) {
var mediaSource = new MediaSource();
var sourceBuffer;
var player = document.createElement('video');
var previewContainer = document.getElementById('preview_container');
player.width = 300;
player.height = 200;
player.autoPlay = false;
player.controls = true;
player.src = URL.createObjectURL(mediaSource);
mediaSource.addEventListener('sourceopen', function (e) {
sourceBuffer = mediaSource.addSourceBuffer('video/webm; codecs="vp8"');
(function readCunks(index) {
var fileReader = new FileReader();
index = index || 0;
if (index >= chunks.length) {
return;
}
fileReader.onload = function (e) {
sourceBuffer.appendBuffer(new Uint8Array(e.target.result));
readCunks(++index);
};
fileReader.readAsArrayBuffer(chunks[index]);
})();
}, false);
previewContainer.innerHTML = "";
previewContainer.appendChild(preview.player);
player.play();
}).catch(function (e) {
console.log('error on get data from db: ' + e);
});
But i get error "Failed to execute 'appendBuffer' on 'SourceBuffer': This SourceBuffer has been removed from the parent media source."
Update:
In addition i tested this code on Firefox. Is partially worked... After adding chunk 100 to sourseBuffer i got error: QuotaExceededError
So i have two main questions:
How can i fix SourceBuffer error and why it happens?
This is the best way to store and play huge .webm files Locally?

Streaming microphone through sockets using socket IO and Node JS

I am making an application where I want the user to use their mic (on their phone) and be able to talk to each other in the game lobby. However, this has proven to be more than difficult.
I am using Node JS socket io and socket io stream
on my client I am using the audio api to take my microphones input ( I am not really worried about this all that much because I am going to make this a Native IOS app)
navigator.getUserMedia = ( navigator.getUserMedia ||
navigator.webkitGetUserMedia ||
navigator.mozGetUserMedia ||
navigator.msGetUserMedia);
if (navigator.getUserMedia) {
navigator.getUserMedia (
// constraints
{
video: false,
audio: true
},
function(localMediaStream) {
var video = document.querySelector('audio');
video.src = window.URL.createObjectURL(localMediaStream);
lcm = localMediaStream;
var audioContext = window.AudioContext;
var context = new audioContext();
var audioInput = context.createMediaStreamSource(localMediaStream);
var bufferSize = 2048;
// create a javascript node
var recorder = context.createScriptProcessor(bufferSize, 1, 1);
// specify the processing function
recorder.onaudioprocess = recorderProcess;
// connect stream to our recorder
audioInput.connect(recorder);
// connect our recorder to the previous destination
recorder.connect(context.destination);
},
// errorCallback
function(err) {
console.log("The following error occured: " + err);
$("video").remove();
alert("##");
}
);
} else {
console.log("getUserMedia not supported");
}
function recorderProcess(e) {
var left = e.inputBuffer.getChannelData(0);
window.stream.write(convertFloat32ToInt16(left));
//var f = $("#aud").attr("src");
var src = window.URL.createObjectURL(lcm);
ss(socket).emit('file', src, {size: src.size});
ss.createBlobReadStream(src).pipe(window.stream);
//ss.createReadStream(f).pipe(widnow.stream);
}
function convertFloat32ToInt16(buffer)
{
l = buffer.length;
buf = new Int16Array(l);
while (l--) {
buf[l] = Math.min(1, buffer[l])*0x7FFF;
}
return buf.buffer;
}
});
ss(socket).on('back', function(stream, data) {
//console.log(stream);
var video = document.querySelector('audio');
video.src = window.URL.createObjectURL(stream);
console.log("getting mic data");
});
i which I can successfully listen to my self speak on the microphone. I am using the stream socket to create a blob to upload to my server...
index.ss(socket).on('file', function(stream, data) {
console.log("getting stream");
var filename = index.path.basename(data.name);
//var myfs = index.fs.createWriteStream(filename);
var fileWriter = new index.wav.FileWriter('demo.wav', {
channels: 1,
sampleRate: 48000,
bitDepth: 16
});
var streams = index.ss.createStream();
streams.pipe(fileWriter);
index.ss(socket).emit('back', fileWriter, {size: fileWriter.size});
});
I cannot get the stream to write to a file or even a temporary buffer, and Then stream back to a client so I can then play or "stream" the audio real time. After a while the server crashes with saying that the pipe is not writable.
Has anyone else encountered this?
By using SFMediaStream library you can socket.io and Nodejs server for live streaming your microphone from a browser. But this library still need some improvement before release to the production.
For the presenter
var mySocket = io("/", {transports:['websocket']});
// Set latency to 100ms (Equal with streamer)
var presenterMedia = new ScarletsMediaPresenter({
audio:{
channelCount:1,
echoCancellation: false
}
}, 100);
// Every new client streamer must receive this header buffer data
presenterMedia.onRecordingReady = function(packet){
mySocket.emit('bufferHeader', packet);
}
// Send buffer to the server
presenterMedia.onBufferProcess = function(streamData){
mySocket.emit('stream', streamData);
}
presenterMedia.startRecording();
For the streamer
var mySocket = io("/", {transports:['websocket']});
// Set latency to 100ms (Equal with presenter)
var audioStreamer = new ScarletsAudioBufferStreamer(100);
audioStreamer.playStream();
// Buffer header must be received first
mySocket.on('bufferHeader', function(packet){
audioStreamer.setBufferHeader(packet);
});
// Receive buffer and play it
mySocket.on('stream', function(packet){
// audioStreamer.realtimeBufferPlay(packet);
audioStreamer.receiveBuffer(packet);
});
// Request buffer header
mySocket.emit('requestBufferHeader', '');
Or you can test it from your localhost with this example

Categories