WebRTC & Peer JS Calling Issues - javascript

In my function connect(id): I start a call using var call = this.peer.call(id, this.mediastream); then I use
call.on('stream', function(stream)
{
// `stream` is the MediaStream of the remote peer.
// Here you'd add it to an HTML video/canvas element.
peerVideo.srcObject = stream;
console.log("Call stream has started! CLinet end!!!!!" + stream);
output.innerHTML = "Streaming audio from other client...";
});
To get the audio from the peer. This call.on stream is never executed so that means the call is not returning anything.
On the peer side of things they can hear the caller just fine. I'm not sure what is wrong!
Here is the full code:
const video = document.getElementById("video");
const peerVideo = document.getElementById("peervideo");
const output = document.getElementById("output");
const peerText = document.getElementById("peerid");
const peerButton = document.getElementById("peersubmit");
var promise = null;
var mediastream = null;
var peer = null;
var myId = null;
async function init()
{
//init the media devices
initMediaDevices();
//init peer
initPeer();
//Add timeout to ensure the peer.on method had time to get id from server
setTimeout(() => { console.log("My ID is: " + this.myId); output.innerHTML = "My ID: " + formatString(this.myId)}, 2000);
}
function initMediaDevices()
{
//Setup stream for usermedia
try
{
this.promise = navigator.mediaDevices.getUserMedia({audio: true})
promise.then((stream) =>
{
setStream(stream);
//video.srcObject = stream;
console.log(stream);
})
output.innerHTML = "Audio was established!";
}
catch(err)
{
console.log(err)
output.innerHTML = "Audio Failed!"
}
}
function initPeer()
{
this.peer = new Peer();
this.peer.on('open', function(id)
{
setID(id)
});
peer.on('call', function(call)
{
// Answer the call, providing our mediaStream
call.answer(this.mediastream);
console.log(call + "-------------------- got here peer call");
output.innerHTML = "Connected to Peer";
call.on('stream', function(stream)
{
// `stream` is the MediaStream of the remote peer.
// Here you'd add it to an HTML video/canvas element.
peerVideo.srcObject = stream;
console.log("Call stream has started! Peer end");
output.innerHTML = "Streaming audio from other client...";
});
});
}
function setID(id)
{
this.myId = id;
}
function setStream(stream)
{
this.mediastream = stream;
console.log("Media Stream Set! " + this.mediastream);
}
function connect(id)
{
var call = this.peer.call(id, this.mediastream);
call.on('stream', function(stream)
{
// `stream` is the MediaStream of the remote peer.
// Here you'd add it to an HTML video/canvas element.
peerVideo.srcObject = stream;
console.log("Call stream has started! CLinet end!!!!!" + stream);
output.innerHTML = "Streaming audio from other client...";
});
console.log(call + "----------------------" + this.mediastream + " This is the person that connected");
}
init();
//Event listeners
peerButton.addEventListener("click", () =>
{
let id = peerText.value;
console.log("Button Pressed!")
connect(id);
});
//unrelated
function formatString(string)
{
var newString = "";
for (var i = 0; i < string.length; i++)
{
var letter = string.charAt(i);
if(isNaN(letter))
{
newString += letter.fontcolor("red");
}
else
{
newString += letter;
}
}
return newString;
}
And the html
<video id="video" autoplay>Video Stream no available</video>
<video id="peervideo" autoplay>Video Stream no available</video>
<h3 id="output"></h3>
<input type="text" id="peerid">
<input type="submit" value="Submit" id="peersubmit">

I have same issue.
When you decide to set a debugger and make breaking points around
var call = this.peer.call(id, this.mediastream);
call.on('stream', function(s
You will receive the stream, sounds like you need to put some setTimeout to prevent this behaviour, and I know it works... but its bad practice

Add timeout to connect function it will work
your connect(id) is triggered before the new user completes navigator.getUserMedia();

Actually it's because you executed call.on before navigator.mediaDevices.getUserMedia.
so try use await initMediaDevices to make sure initPeer();
execute afterwards.

Related

Can you Trim an MP4 Video using only Javascript and HTML? [duplicate]

Closed. This question needs to be more focused. It is not currently accepting answers.
Want to improve this question? Update the question so it focuses on one problem only by editing this post.
Closed 6 years ago.
Improve this question
I am trying to trim the length of a video within the browser, from either the beginning or the end. What I currently have is a MediaStream which is being recorded using the MediaRecorder API, which I use to produce a Blob at the end of the recording with type set to 'video/mp4'. This works great and I am able to play the video back, however I would like a way to trim the video either at the start or end.
Upon further investigation of the MediaStream API I came across the SourceBuffer object, that you can obtain from a MediaStreamTrack and use it to remove a time slice, which is exactly what I want. However I am unsure of how to obtain the MediaStreamTrack from the video (blob) since video.srcObject property returns null.
I am trying to trim the length of a video within the browser, from
either the beginning or the end.
You can use Array.prototype.slice() to remove blobs in chunks of one second 1000ms of data, or other time range, segments from end of array containing blobs pushed to an array at MediaRecorder dataavailable event. Where MediaRecorder .start() is called with parameter 1000, to set each recorded Blob to 1000ms of recorded data.
Approach utilizes modified version of https://github.com/samdutton/simpl/tree/gh-pages/mediarecorder . Added <input type="number"> elements to set remove chunks of 1s from recorded video for both playback and download by using .slice()
html
<video id="gum" autoplay muted controls></video>
<video id="recorded" autoplay controls></video>
<div>
<button id="record">Start Recording</button><label for="record"></label><br>
<span>Seconds of recorded video to play (min 1):</span><input min="1" type="number" disabled />
<button id="play" disabled>Play</button>
<span>Seconds of recorded video to download (min 1):</span><input min="1" type="number" disabled /><button id="download" disabled>Download</button>
</div>
javascript
'use strict';
/* globals MediaRecorder */
// This code is adapted from
// https://rawgit.com/Miguelao/demos/master/mediarecorder.html
'use strict';
/* globals MediaRecorder */
// This code is adapted from
// https://rawgit.com/Miguelao/demos/master/mediarecorder.html
var mediaSource = new MediaSource();
mediaSource.addEventListener('sourceopen', handleSourceOpen, false);
var mediaRecorder;
var recordedBlobs;
var sourceBuffer;
var gumVideo = document.querySelector('video#gum');
var recordedVideo = document.querySelector('video#recorded');
var input = document.querySelectorAll("input[type=number]");
recordedVideo.ontimeupdate = function(e) {
console.log("recorded video currentTime:", e.target.currentTime)
}
gumVideo.onprogress = function(e) {
// console.log("getUserMedia video currentTime:", e.target.currentTime)
}
var recordButton = document.querySelector('button#record');
var playButton = document.querySelector('button#play');
var downloadButton = document.querySelector('button#download');
recordButton.onclick = toggleRecording;
playButton.onclick = play;
downloadButton.onclick = download;
var currentTimes = [];
recordButton.nextElementSibling.innerHTML = "recorded video "
+ currentTimes.length
+ "s";
// window.isSecureContext could be used for Chrome
var isSecureOrigin = location.protocol === 'https:' ||
location.host === 'localhost';
if (!isSecureOrigin) {
alert('getUserMedia() must be run from a secure origin: HTTPS or localhost.' +
'\n\nChanging protocol to HTTPS');
location.protocol = 'HTTPS';
}
// Use old-style gUM to avoid requirement to enable the
// Enable experimental Web Platform features flag in Chrome 49
navigator.getUserMedia = navigator.getUserMedia ||
navigator.webkitGetUserMedia || navigator.mozGetUserMedia;
var constraints = {
audio: true,
video: true
};
navigator.getUserMedia(constraints, successCallback, errorCallback);
function successCallback(stream) {
console.log('getUserMedia() got stream: ', stream);
window.stream = stream;
if (window.URL) {
gumVideo.src = window.URL.createObjectURL(stream);
} else {
gumVideo.src = stream;
}
}
function errorCallback(error) {
console.log('navigator.getUserMedia error: ', error);
}
// navigator.mediaDevices.getUserMedia(constraints)
// .then(function(stream) {
// console.log('getUserMedia() got stream: ', stream);
// window.stream = stream; // make available to browser console
// if (window.URL) {
// gumVideo.src = window.URL.createObjectURL(stream);
// } else {
// gumVideo.src = stream;
// }
// }).catch(function(error) {
// console.log('navigator.getUserMedia error: ', error);
// });
function handleSourceOpen(event) {
console.log('MediaSource opened');
sourceBuffer = mediaSource.addSourceBuffer('video/webm; codecs="vp8"');
console.log('Source buffer: ', sourceBuffer);
}
function handleDataAvailable(event) {
if (event.data && event.data.size > 0) {
currentTimes.push(gumVideo.currentTime);
recordedBlobs.push(event.data);
recordButton.nextElementSibling.innerHTML = "recorded video "
+ recordedBlobs.length
+ "s";
}
}
function handleStop(event) {
console.log('Recorder stopped: ', event);
console.log("recorded times from getUserMedia video:", currentTimes);
}
function toggleRecording() {
if (recordButton.textContent === 'Start Recording') {
startRecording();
} else {
stopRecording();
recordButton.textContent = 'Start Recording';
playButton.disabled = false;
downloadButton.disabled = false;
}
}
// The nested try blocks will be simplified when Chrome 47 moves to Stable
function startRecording() {
var options = {
mimeType: 'video/webm',
bitsPerSecond: 100000
};
recordedBlobs = [];
currentTimes = [];
for (var i = 0; i < input.length; i++) {
input[i].setAttribute("max", 1);
input[i].setAttribute("disabled", "disabled");
}
playButton.disabled = true;
downloadButton.disabled = true;
try {
mediaRecorder = new MediaRecorder(window.stream, options);
} catch (e0) {
console.log('Unable to create MediaRecorder with options Object: ', e0);
try {
options = {
mimeType: 'video/webm,codecs=vp9',
bitsPerSecond: 100000
};
mediaRecorder = new MediaRecorder(window.stream, options);
} catch (e1) {
console.log('Unable to create MediaRecorder with options Object: ', e1);
try {
options = 'video/vp8'; // Chrome 47
mediaRecorder = new MediaRecorder(window.stream, options);
} catch (e2) {
alert('MediaRecorder is not supported by this browser.\n\n' +
'Try Firefox 29 or later, or Chrome 47 or later,'
+ ' with Enable experimental Web Platform features enabled '
+ ' from chrome://flags.');
console.error('Exception while creating MediaRecorder:', e2);
return;
}
}
}
console.log('Created MediaRecorder', mediaRecorder, 'with options', options);
recordButton.textContent = 'Stop Recording';
playButton.disabled = true;
downloadButton.disabled = true;
mediaRecorder.onstop = handleStop;
mediaRecorder.ondataavailable = handleDataAvailable;
mediaRecorder.start(1000); // collect 1000ms of data
console.log('MediaRecorder started', mediaRecorder);
}
function stopRecording() {
mediaRecorder.stop();
for (var i = 0; i < input.length; i++) {
input[i].setAttribute("max", recordedBlobs.length);
input[i].removeAttribute("disabled");
}
console.log('Recorded Blobs: ', recordedBlobs);
recordedVideo.controls = true;
}
function play() {
console.log(`playing ${input[0].value}s of getUserMedia video`
+ `recorded by MediaRecorder from time ranges`
, currentTimes.slice(0, input[0].value));
// slice `input[0].value` amount, in seconds, from end of recorded video
// for playback
var file = recordedBlobs.slice(0, input[0].value);
var superBuffer = new Blob(file, {
type: 'video/webm'
});
recordedVideo.src = window.URL.createObjectURL(superBuffer);
}
function download() {
// slice `input[1].value` amount, in seconds, from end of recorded video
// for download
var file = recordedBlobs.slice(0, input[1].value);
var blob = new Blob(file, {
type: 'video/webm'
});
var url = window.URL.createObjectURL(blob);
var a = document.createElement('a');
a.style.display = 'none';
a.href = url;
a.download = 'test.webm';
document.body.appendChild(a);
a.click();
setTimeout(function() {
document.body.removeChild(a);
window.URL.revokeObjectURL(url);
}, 100);
}
plnkr https://plnkr.co/edit/LxuV5jMX0RZtDxOxT1qa?p=preview

How do i play an HLS stream when playlist.m3u8 file is constantly being updated?

I am using MediaRecorder to record chunks of my live video in webm format from MediaStream and converting these chunks to .ts files on the server using ffmpeg and then updating my playlist.m3u8 file with this code:
function generateM3u8Playlist(fileDataArr, playlistFp, isLive, cb) {
var durations = fileDataArr.map(function(fd) {
return fd.duration;
});
var maxT = maxOfArr(durations);
var meta = [
'#EXTM3U',
'#EXT-X-VERSION:3',
'#EXT-X-MEDIA-SEQUENCE:0',
'#EXT-X-ALLOW-CACHE:YES',
'#EXT-X-TARGETDURATION:' + Math.ceil(maxT),
];
fileDataArr.forEach(function(fd) {
meta.push('#EXTINF:' + fd.duration.toFixed(2) + ',');
meta.push(fd.fileName2);
});
if (!isLive) {
meta.push('#EXT-X-ENDLIST');
}
meta.push('');
meta = meta.join('\n');
fs.writeFile(playlistFp, meta, cb);
}
Here fileDataArr holds information for all the chunks that have been created.
After that i use this code to create a hls server :
var runStreamServer = (function(streamFolder) {
var executed = false;
return function(streamFolder) {
if (!executed) {
executed = true;
var HLSServer = require('hls-server')
var http = require('http')
var server = http.createServer()
var hls = new HLSServer(server, {
path: '/stream', // Base URI to output HLS streams
dir: 'C:\\Users\\Work\\Desktop\\live-stream\\webcam2hls\\videos\\' + streamFolder // Directory that input files are stored
})
console.log("We are going to stream from folder:" + streamFolder);
server.listen(8000);
console.log('Server Listening on Port 8000');
}
};
})();
The problem is that if i stop creating new chunks and then use the hls server link:
http://localhost:8000/stream/playlist.m3u8 then the video plays in VLC but if i try to play during the recording it keeps loading the file but does not play. I want it to play while its creating new chunks and updating playlist.m3u8. The quirk in generateM3u8Playlist function is that it adds '#EXT-X-ENDLIST' to the playlist file after i have stopped recording.
The software is still in production so its a bit messy code. Thank you for any answers.
The client side that generates blobs is as follows:
var mediaConstraints = {
video: true,
audio:true
};
navigator.getUserMedia(mediaConstraints, onMediaSuccess, onMediaError);
function onMediaSuccess(stream) {
console.log('will start capturing and sending ' + (DT / 1000) + 's videos when you press start');
var mediaRecorder = new MediaStreamRecorder(stream);
mediaRecorder.mimeType = 'video/webm';
mediaRecorder.ondataavailable = function(blob) {
var count2 = zeroPad(count, 5);
// here count2 just creates a blob number
console.log('sending chunk ' + name + ' #' + count2 + '...');
send('/chunk/' + name + '/' + count2 + (stopped ? '/finish' : ''), blob);
++count;
};
}
// Here we have the send function which sends our blob to server:
function send(url, blob) {
var xhr = new XMLHttpRequest();
xhr.open('POST', url, true);
xhr.responseType = 'text/plain';
xhr.setRequestHeader('Content-Type', 'video/webm');
//xhr.setRequestHeader("Content-Length", blob.length);
xhr.onload = function(e) {
if (this.status === 200) {
console.log(this.response);
}
};
xhr.send(blob);
}
The code that receives the XHR request is as follows:
var parts = u.split('/');
var prefix = parts[2];
var num = parts[3];
var isFirst = false;
var isLast = !!parts[4];
if ((/^0+$/).test(num)) {
var path = require('path');
shell.mkdir(path.join(__dirname, 'videos', prefix));
isFirst = true;
}
var fp = 'videos/' + prefix + '/' + num + '.webm';
var msg = 'got ' + fp;
console.log(msg);
console.log('isFirst:%s, isLast:%s', isFirst, isLast);
var stream = fs.createWriteStream(fp, { encoding: 'binary' });
/*stream.on('end', function() {
respond(res, ['text/plain', msg]);
});*/
//req.setEncoding('binary');
req.pipe(stream);
req.on('end', function() {
respond(res, ['text/plain', msg]);
if (!LIVE) { return; }
var duration = 20;
var fd = {
fileName: num + '.webm',
filePath: fp,
duration: duration
};
var fileDataArr;
if (isFirst) {
fileDataArr = [];
fileDataArrs[prefix] = fileDataArr;
} else {
var fileDataArr = fileDataArrs[prefix];
}
try {
fileDataArr.push(fd);
} catch (err) {
fileDataArr = [];
console.log(err.message);
}
videoUtils.computeStartTimes(fileDataArr);
videoUtils.webm2Mpegts(fd, function(err, mpegtsFp) {
if (err) { return console.error(err); }
console.log('created %s', mpegtsFp);
var playlistFp = 'videos/' + prefix + '/playlist.m3u8';
var fileDataArr2 = (isLast ? fileDataArr : lastN(fileDataArr, PREV_ITEMS_IN_LIVE));
var action = (isFirst ? 'created' : (isLast ? 'finished' : 'updated'));
videoUtils.generateM3u8Playlist(fileDataArr2, playlistFp, !isLast, function(err) {
console.log('playlist %s %s', playlistFp, (err ? err.toString() : action));
});
});
runStreamServer(prefix);
}
You don't show us how you use MediaRecorder to generate your "chunks" of data. Do you you use its ondataavailable event for this purpose?
If so, please keep this in mind: You must concatenate all the chunks handed to you by ondataavailable to get a valid .webm (or .matroska) data stream.
You can't just store an arbitrary chunk of data in a media file and expect it to play. Even ffmpeg needs all your chunks streamed to it to generate valid output. That's because the first couple of chunks contain the mandatory .webm initialization segment, and the other chunks do not.

uwsgi-websocket GET /websocket no PONG received in 3 seconds

I am trying to implement websocket through uwsgi Gevent. But, in between 2,3 requests I am always getting no PONG received in 3 seconds error.
Server Side code:
#ws.route('/websocket')
def audio(ws):
first_message = True
total_msg = ""
sample_rate = 0
total_data = []
while True:
msg = ws.receive()
# processing message here and sending response back
ws.send("response")
if __name__ == '__main__':
app.run(https='0.0.0.0:443,{},{}'.format(ssl_cert,ssl_key), port = 5002, gevent=1000)
Client side code:
ws = new WebSocket('wss://ec2-54-72-7-110.eu-west-1.compute.amazonaws.com/websocket');
//ws = new WebSocket('wss://ec2-54-72-7-110.eu-west-1.compute.amazonaws.com:5000/websocket');
ws.onopen = function(evt) {
console.log('Connected to websocket.');
alert("Recording started")
navigator.getUserMedia({audio: true, video: false}, initializeRecorder, function(e) {
console.log('No live audio input: ' + e);
});
}
function initializeRecorder(stream){
audio_context = new AudioContext;
sampleRate = audio_context.sampleRate;
ws.send("sample rate:" + sampleRate);
var audioInput = audio_context.createMediaStreamSource(stream);
console.log("Created media stream.");
var bufferSize = 4096;
// record only 1 channel
recorder = audio_context.createScriptProcessor(bufferSize, 1, 1);
// specify the processing function
recorder.onaudioprocess = recorderProcess;
// connect stream to our recorder
audioInput.connect(recorder);
// connect our recorder to the previous destination
recorder.connect(audio_context.destination);
}
function recorderProcess(e) {
var left = e.inputBuffer.getChannelData(0);
if (ws.readyState === WebSocket.OPEN) {
var view = new Int16Array(convertFloat32ToInt16(left))
console.log(view);
ws.send(view);
}
}
function close_WebSocket(){
console.log("done")
ws.send("done")
ws.onmessage = function(evt) {
console.log(evt.data)
ws.close()
}
audio_context.close();
}
I don't know what is wrong with it?

Unable to stream video over a websocket to Firefox

I have written some code stream video over a websocket so a sourcebuffer which works in Chrome and Edge.
However, when I run this in Firefox, the video never plays back, just a spinning wheel animation is displayed. When I check the <video> statistics, It reads HAVE_METADATA as the ready state and NETWORK_LOADING as the network state.
The code looks follows:
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8"/>
</head>
<body>
<video controls></video>
<script>
var mime = 'video/mp4; codecs="avc1.4D401E,mp4a.40.2"';
var address = 'ws://localhost:54132'
/* Media Source */
var source = new MediaSource();
var video = document.querySelector('video');
video.src = URL.createObjectURL(source);
source.addEventListener('sourceopen', sourceOpen);
/* Buffer */
var buffer;
var socket;
var queue = [];
var offset = -1;
var timescale;
// When the media source opens:
function sourceOpen() {
buffer = source.addSourceBuffer(mime);
buffer.addEventListener('updateend', processQueue);
socket = new WebSocket(address);
socket.binaryType = 'arraybuffer';
socket.onmessage = onMessage;
}
// When more data is received.
function onMessage(event) {
queue.push(event.data);
processQueue();
}
// Process queue if possible.
function processQueue() {
if ((queue.length == 0) || (buffer.updating)) {
return;
}
var data = queue.shift();
if (offset === -1) {
var parsed = parseMP4(data);
if (parsed.hasOwnProperty('moov')) {
timescale = parsed.moov.mvhd.timescale;
} else if (parsed.hasOwnProperty('moof')) {
offset = 0 - (parsed.moof.traf[0].tfdt.baseMediaDecodeTime / this.timescale - 0.4);
buffer.timestampOffset = offset;
}
}
// console.log('appending ' + data.byteLength + ' bytes');
buffer.appendBuffer(data);
}
// Parse out the offset.
function parseMP4(data) {
// SNIP for brevity
}
</script>
</body>
</html>
Could not reproduce <video> element not playing at firefox 47.
Merged approaches at Mocking Websocket Message Events to create mock WebSocket events; bufferAll.html demo at Let's Make a Netflix
An Intro to Streaming Media on the Web for MediaSource usage pattern.
Included <progress> and progress event to notify user of media loading status.
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8"/>
</head>
<body>
<progress min="0" value="0"></progress><br><label></label><br>
<video controls></video>
<script>
// http://nickdesaulniers.github.io/netfix/demo/bufferAll.html
// http://jsfiddle.net/adamboduch/JVfkt/
// The global web socket.
var sock, sourceBuffer;
sock = new WebSocket( "ws://mock" );
sock.onerror = function(e) {
console.log("sock error", e)
}
// This is unchanging production code that doesn"t know
// we"re mocking the web socket.
sock.onmessage = function( e ) {
console.log("socket message", e.data);
sourceBuffer.appendBuffer(e.data);
};
var video = document.querySelector("video");
var progress = document.querySelector("progress");
var label = document.querySelector("label");
var assetURL = "http://nickdesaulniers.github.io/netfix/"
+ "demo/frag_bunny.mp4";
// Need to be specific for Blink regarding codecs
// ./mp4info frag_bunny.mp4 | grep Codec
var mimeCodec = 'video/mp4; codecs="avc1.42E01E, mp4a.40.2"';
if ("MediaSource" in window
&& MediaSource.isTypeSupported(mimeCodec)) {
var mediaSource = new MediaSource;
//console.log(mediaSource.readyState); // closed
video.src = URL.createObjectURL(mediaSource);
mediaSource.addEventListener("sourceopen", sourceOpen);
} else {
console.error("Unsupported MIME type or codec: ", mimeCodec);
}
video.addEventListener("canplay", function() {
alert("video canplay")
})
function sourceOpen (_) {
//console.log(this.readyState); // open
var mediaSource = this;
sourceBuffer = mediaSource.addSourceBuffer(mimeCodec);
fetchAB(assetURL, function (buf) {
sourceBuffer.addEventListener("updateend", function (event) {
console.log("sourceBuffer updateend event;"
+ "mediaSource.readyState:"
, mediaSource.readyState);
// mediaSource.endOfStream();
// video.play();
// console.log(mediaSource.readyState); // ended
});
});
};
// mock `WebSocket` message
function fetchAB (url, cb) {
var xhr = new XMLHttpRequest;
xhr.open("get", url);
var file = url.split("/").pop();
xhr.responseType = "arraybuffer";
xhr.onload = function () {
// mock `WebSocket` message
sock.dispatchEvent( new MessageEvent( "message", {
data: xhr.response
}));
console.log("video sent to sock", sock);
cb();
};
xhr.onprogress = function(e) {
progress.max = e.total;
progress.value = e.loaded;
label.innerHTML = "loading " + file + " ...<br>"
+ e.loaded + " of "
+ e.total + " bytes loaded";
}
xhr.send();
};
</script>
</body>
</html>
plnkr http://plnkr.co/edit/RCIqDXTB2BL3lec9bhfz
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8"/>
</head>
<body>
<progress min="0" value="0"></progress><br><label></label><br>
<video controls></video>
<script>
// http://nickdesaulniers.github.io/netfix/demo/bufferAll.html
// http://jsfiddle.net/adamboduch/JVfkt/
// The global web socket.
var sock, sourceBuffer;
sock = new WebSocket( "ws://mock" );
sock.onerror = function(e) {
console.log("sock error", e)
}
// This is unchanging production code that doesn"t know
// we"re mocking the web socket.
sock.onmessage = function( e ) {
console.log("socket message", e.data);
sourceBuffer.appendBuffer(e.data);
};
var video = document.querySelector("video");
var progress = document.querySelector("progress");
var label = document.querySelector("label");
var assetURL = "http://nickdesaulniers.github.io/netfix/"
+ "demo/frag_bunny.mp4";
// Need to be specific for Blink regarding codecs
// ./mp4info frag_bunny.mp4 | grep Codec
var mimeCodec = 'video/mp4; codecs="avc1.42E01E, mp4a.40.2"';
if ("MediaSource" in window
&& MediaSource.isTypeSupported(mimeCodec)) {
var mediaSource = new MediaSource;
//console.log(mediaSource.readyState); // closed
video.src = URL.createObjectURL(mediaSource);
mediaSource.addEventListener("sourceopen", sourceOpen);
} else {
console.error("Unsupported MIME type or codec: ", mimeCodec);
}
video.addEventListener("canplay", function() {
alert("video canplay")
})
function sourceOpen (_) {
//console.log(this.readyState); // open
var mediaSource = this;
sourceBuffer = mediaSource.addSourceBuffer(mimeCodec);
fetchAB(assetURL, function (buf) {
sourceBuffer.addEventListener("updateend", function (event) {
console.log("sourceBuffer updateend event;"
+ "mediaSource.readyState:"
, mediaSource.readyState);
// mediaSource.endOfStream();
// video.play();
// console.log(mediaSource.readyState); // ended
});
});
};
// mock `WebSocket` message
function fetchAB (url, cb) {
var xhr = new XMLHttpRequest;
xhr.open("get", url);
var file = url.split("/").pop();
xhr.responseType = "arraybuffer";
xhr.onload = function () {
// mock `WebSocket` message
sock.dispatchEvent( new MessageEvent( "message", {
data: xhr.response
}));
console.log("video sent to sock", sock);
cb();
};
xhr.onprogress = function(e) {
progress.max = e.total;
progress.value = e.loaded;
label.innerHTML = "loading " + file + " ...<br>"
+ e.loaded + " of "
+ e.total + " bytes loaded";
}
xhr.send();
};
</script>
</body>
</html>

Overlapping parts while buffering mp3

I am trying to stream MP3 file from a nodeJS server using BinaryJS - http://binaryjs.com/
But, when I am decoding the buffers on the client side they are seems to be overlapping, Meaning that the new chunk of data is being played few milliseconds before the previous one ended, causing the audio to lag.
is there any way to make the client wait until the current buffer is finished before starting the new one?
Server:
var BinaryServer = require('binaryjs').BinaryServer;
var fs = require('fs');
var server = BinaryServer({port: 9000});
server.on('connection', function(client){
var file = fs.createReadStream(__dirname + '/Song.mp3', {
'flags': 'r',
'bufferSize': 4 * 1024
});
});
client.send(file);
});
Client:
var client = new BinaryClient('ws://localhost:9000');
window.AudioContext = window.AudioContext || window.webkitAudioContext;
var context = new AudioContext();
client.on('stream', function (stream, meta) {
var parts = [];
var last = 0;
stream.on('data', function (data) {
var source = context.createBufferSource();
context.decodeAudioData(data, function (buf) {
source.buffer = buf;
source.connect(context.destination);
source.loop = false;
source.start(last);
last += buf.duration;
source.onended = function() {
console.log('Your audio has finished playing');
};
},
function (e) {
"Error with decoding audio data" + e.err
});
parts.push(data);
});
stream.on('end', function () {
console.log(parts);
});
});
Not sure about this, but instead of initializing last to 0, you might want to initialize it to context.currentTime.

Categories