I have written some code stream video over a websocket so a sourcebuffer which works in Chrome and Edge.
However, when I run this in Firefox, the video never plays back, just a spinning wheel animation is displayed. When I check the <video> statistics, It reads HAVE_METADATA as the ready state and NETWORK_LOADING as the network state.
The code looks follows:
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8"/>
</head>
<body>
<video controls></video>
<script>
var mime = 'video/mp4; codecs="avc1.4D401E,mp4a.40.2"';
var address = 'ws://localhost:54132'
/* Media Source */
var source = new MediaSource();
var video = document.querySelector('video');
video.src = URL.createObjectURL(source);
source.addEventListener('sourceopen', sourceOpen);
/* Buffer */
var buffer;
var socket;
var queue = [];
var offset = -1;
var timescale;
// When the media source opens:
function sourceOpen() {
buffer = source.addSourceBuffer(mime);
buffer.addEventListener('updateend', processQueue);
socket = new WebSocket(address);
socket.binaryType = 'arraybuffer';
socket.onmessage = onMessage;
}
// When more data is received.
function onMessage(event) {
queue.push(event.data);
processQueue();
}
// Process queue if possible.
function processQueue() {
if ((queue.length == 0) || (buffer.updating)) {
return;
}
var data = queue.shift();
if (offset === -1) {
var parsed = parseMP4(data);
if (parsed.hasOwnProperty('moov')) {
timescale = parsed.moov.mvhd.timescale;
} else if (parsed.hasOwnProperty('moof')) {
offset = 0 - (parsed.moof.traf[0].tfdt.baseMediaDecodeTime / this.timescale - 0.4);
buffer.timestampOffset = offset;
}
}
// console.log('appending ' + data.byteLength + ' bytes');
buffer.appendBuffer(data);
}
// Parse out the offset.
function parseMP4(data) {
// SNIP for brevity
}
</script>
</body>
</html>
Could not reproduce <video> element not playing at firefox 47.
Merged approaches at Mocking Websocket Message Events to create mock WebSocket events; bufferAll.html demo at Let's Make a Netflix
An Intro to Streaming Media on the Web for MediaSource usage pattern.
Included <progress> and progress event to notify user of media loading status.
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8"/>
</head>
<body>
<progress min="0" value="0"></progress><br><label></label><br>
<video controls></video>
<script>
// http://nickdesaulniers.github.io/netfix/demo/bufferAll.html
// http://jsfiddle.net/adamboduch/JVfkt/
// The global web socket.
var sock, sourceBuffer;
sock = new WebSocket( "ws://mock" );
sock.onerror = function(e) {
console.log("sock error", e)
}
// This is unchanging production code that doesn"t know
// we"re mocking the web socket.
sock.onmessage = function( e ) {
console.log("socket message", e.data);
sourceBuffer.appendBuffer(e.data);
};
var video = document.querySelector("video");
var progress = document.querySelector("progress");
var label = document.querySelector("label");
var assetURL = "http://nickdesaulniers.github.io/netfix/"
+ "demo/frag_bunny.mp4";
// Need to be specific for Blink regarding codecs
// ./mp4info frag_bunny.mp4 | grep Codec
var mimeCodec = 'video/mp4; codecs="avc1.42E01E, mp4a.40.2"';
if ("MediaSource" in window
&& MediaSource.isTypeSupported(mimeCodec)) {
var mediaSource = new MediaSource;
//console.log(mediaSource.readyState); // closed
video.src = URL.createObjectURL(mediaSource);
mediaSource.addEventListener("sourceopen", sourceOpen);
} else {
console.error("Unsupported MIME type or codec: ", mimeCodec);
}
video.addEventListener("canplay", function() {
alert("video canplay")
})
function sourceOpen (_) {
//console.log(this.readyState); // open
var mediaSource = this;
sourceBuffer = mediaSource.addSourceBuffer(mimeCodec);
fetchAB(assetURL, function (buf) {
sourceBuffer.addEventListener("updateend", function (event) {
console.log("sourceBuffer updateend event;"
+ "mediaSource.readyState:"
, mediaSource.readyState);
// mediaSource.endOfStream();
// video.play();
// console.log(mediaSource.readyState); // ended
});
});
};
// mock `WebSocket` message
function fetchAB (url, cb) {
var xhr = new XMLHttpRequest;
xhr.open("get", url);
var file = url.split("/").pop();
xhr.responseType = "arraybuffer";
xhr.onload = function () {
// mock `WebSocket` message
sock.dispatchEvent( new MessageEvent( "message", {
data: xhr.response
}));
console.log("video sent to sock", sock);
cb();
};
xhr.onprogress = function(e) {
progress.max = e.total;
progress.value = e.loaded;
label.innerHTML = "loading " + file + " ...<br>"
+ e.loaded + " of "
+ e.total + " bytes loaded";
}
xhr.send();
};
</script>
</body>
</html>
plnkr http://plnkr.co/edit/RCIqDXTB2BL3lec9bhfz
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8"/>
</head>
<body>
<progress min="0" value="0"></progress><br><label></label><br>
<video controls></video>
<script>
// http://nickdesaulniers.github.io/netfix/demo/bufferAll.html
// http://jsfiddle.net/adamboduch/JVfkt/
// The global web socket.
var sock, sourceBuffer;
sock = new WebSocket( "ws://mock" );
sock.onerror = function(e) {
console.log("sock error", e)
}
// This is unchanging production code that doesn"t know
// we"re mocking the web socket.
sock.onmessage = function( e ) {
console.log("socket message", e.data);
sourceBuffer.appendBuffer(e.data);
};
var video = document.querySelector("video");
var progress = document.querySelector("progress");
var label = document.querySelector("label");
var assetURL = "http://nickdesaulniers.github.io/netfix/"
+ "demo/frag_bunny.mp4";
// Need to be specific for Blink regarding codecs
// ./mp4info frag_bunny.mp4 | grep Codec
var mimeCodec = 'video/mp4; codecs="avc1.42E01E, mp4a.40.2"';
if ("MediaSource" in window
&& MediaSource.isTypeSupported(mimeCodec)) {
var mediaSource = new MediaSource;
//console.log(mediaSource.readyState); // closed
video.src = URL.createObjectURL(mediaSource);
mediaSource.addEventListener("sourceopen", sourceOpen);
} else {
console.error("Unsupported MIME type or codec: ", mimeCodec);
}
video.addEventListener("canplay", function() {
alert("video canplay")
})
function sourceOpen (_) {
//console.log(this.readyState); // open
var mediaSource = this;
sourceBuffer = mediaSource.addSourceBuffer(mimeCodec);
fetchAB(assetURL, function (buf) {
sourceBuffer.addEventListener("updateend", function (event) {
console.log("sourceBuffer updateend event;"
+ "mediaSource.readyState:"
, mediaSource.readyState);
// mediaSource.endOfStream();
// video.play();
// console.log(mediaSource.readyState); // ended
});
});
};
// mock `WebSocket` message
function fetchAB (url, cb) {
var xhr = new XMLHttpRequest;
xhr.open("get", url);
var file = url.split("/").pop();
xhr.responseType = "arraybuffer";
xhr.onload = function () {
// mock `WebSocket` message
sock.dispatchEvent( new MessageEvent( "message", {
data: xhr.response
}));
console.log("video sent to sock", sock);
cb();
};
xhr.onprogress = function(e) {
progress.max = e.total;
progress.value = e.loaded;
label.innerHTML = "loading " + file + " ...<br>"
+ e.loaded + " of "
+ e.total + " bytes loaded";
}
xhr.send();
};
</script>
</body>
</html>
Related
Closed. This question needs to be more focused. It is not currently accepting answers.
Want to improve this question? Update the question so it focuses on one problem only by editing this post.
Closed 6 years ago.
Improve this question
I am trying to trim the length of a video within the browser, from either the beginning or the end. What I currently have is a MediaStream which is being recorded using the MediaRecorder API, which I use to produce a Blob at the end of the recording with type set to 'video/mp4'. This works great and I am able to play the video back, however I would like a way to trim the video either at the start or end.
Upon further investigation of the MediaStream API I came across the SourceBuffer object, that you can obtain from a MediaStreamTrack and use it to remove a time slice, which is exactly what I want. However I am unsure of how to obtain the MediaStreamTrack from the video (blob) since video.srcObject property returns null.
I am trying to trim the length of a video within the browser, from
either the beginning or the end.
You can use Array.prototype.slice() to remove blobs in chunks of one second 1000ms of data, or other time range, segments from end of array containing blobs pushed to an array at MediaRecorder dataavailable event. Where MediaRecorder .start() is called with parameter 1000, to set each recorded Blob to 1000ms of recorded data.
Approach utilizes modified version of https://github.com/samdutton/simpl/tree/gh-pages/mediarecorder . Added <input type="number"> elements to set remove chunks of 1s from recorded video for both playback and download by using .slice()
html
<video id="gum" autoplay muted controls></video>
<video id="recorded" autoplay controls></video>
<div>
<button id="record">Start Recording</button><label for="record"></label><br>
<span>Seconds of recorded video to play (min 1):</span><input min="1" type="number" disabled />
<button id="play" disabled>Play</button>
<span>Seconds of recorded video to download (min 1):</span><input min="1" type="number" disabled /><button id="download" disabled>Download</button>
</div>
javascript
'use strict';
/* globals MediaRecorder */
// This code is adapted from
// https://rawgit.com/Miguelao/demos/master/mediarecorder.html
'use strict';
/* globals MediaRecorder */
// This code is adapted from
// https://rawgit.com/Miguelao/demos/master/mediarecorder.html
var mediaSource = new MediaSource();
mediaSource.addEventListener('sourceopen', handleSourceOpen, false);
var mediaRecorder;
var recordedBlobs;
var sourceBuffer;
var gumVideo = document.querySelector('video#gum');
var recordedVideo = document.querySelector('video#recorded');
var input = document.querySelectorAll("input[type=number]");
recordedVideo.ontimeupdate = function(e) {
console.log("recorded video currentTime:", e.target.currentTime)
}
gumVideo.onprogress = function(e) {
// console.log("getUserMedia video currentTime:", e.target.currentTime)
}
var recordButton = document.querySelector('button#record');
var playButton = document.querySelector('button#play');
var downloadButton = document.querySelector('button#download');
recordButton.onclick = toggleRecording;
playButton.onclick = play;
downloadButton.onclick = download;
var currentTimes = [];
recordButton.nextElementSibling.innerHTML = "recorded video "
+ currentTimes.length
+ "s";
// window.isSecureContext could be used for Chrome
var isSecureOrigin = location.protocol === 'https:' ||
location.host === 'localhost';
if (!isSecureOrigin) {
alert('getUserMedia() must be run from a secure origin: HTTPS or localhost.' +
'\n\nChanging protocol to HTTPS');
location.protocol = 'HTTPS';
}
// Use old-style gUM to avoid requirement to enable the
// Enable experimental Web Platform features flag in Chrome 49
navigator.getUserMedia = navigator.getUserMedia ||
navigator.webkitGetUserMedia || navigator.mozGetUserMedia;
var constraints = {
audio: true,
video: true
};
navigator.getUserMedia(constraints, successCallback, errorCallback);
function successCallback(stream) {
console.log('getUserMedia() got stream: ', stream);
window.stream = stream;
if (window.URL) {
gumVideo.src = window.URL.createObjectURL(stream);
} else {
gumVideo.src = stream;
}
}
function errorCallback(error) {
console.log('navigator.getUserMedia error: ', error);
}
// navigator.mediaDevices.getUserMedia(constraints)
// .then(function(stream) {
// console.log('getUserMedia() got stream: ', stream);
// window.stream = stream; // make available to browser console
// if (window.URL) {
// gumVideo.src = window.URL.createObjectURL(stream);
// } else {
// gumVideo.src = stream;
// }
// }).catch(function(error) {
// console.log('navigator.getUserMedia error: ', error);
// });
function handleSourceOpen(event) {
console.log('MediaSource opened');
sourceBuffer = mediaSource.addSourceBuffer('video/webm; codecs="vp8"');
console.log('Source buffer: ', sourceBuffer);
}
function handleDataAvailable(event) {
if (event.data && event.data.size > 0) {
currentTimes.push(gumVideo.currentTime);
recordedBlobs.push(event.data);
recordButton.nextElementSibling.innerHTML = "recorded video "
+ recordedBlobs.length
+ "s";
}
}
function handleStop(event) {
console.log('Recorder stopped: ', event);
console.log("recorded times from getUserMedia video:", currentTimes);
}
function toggleRecording() {
if (recordButton.textContent === 'Start Recording') {
startRecording();
} else {
stopRecording();
recordButton.textContent = 'Start Recording';
playButton.disabled = false;
downloadButton.disabled = false;
}
}
// The nested try blocks will be simplified when Chrome 47 moves to Stable
function startRecording() {
var options = {
mimeType: 'video/webm',
bitsPerSecond: 100000
};
recordedBlobs = [];
currentTimes = [];
for (var i = 0; i < input.length; i++) {
input[i].setAttribute("max", 1);
input[i].setAttribute("disabled", "disabled");
}
playButton.disabled = true;
downloadButton.disabled = true;
try {
mediaRecorder = new MediaRecorder(window.stream, options);
} catch (e0) {
console.log('Unable to create MediaRecorder with options Object: ', e0);
try {
options = {
mimeType: 'video/webm,codecs=vp9',
bitsPerSecond: 100000
};
mediaRecorder = new MediaRecorder(window.stream, options);
} catch (e1) {
console.log('Unable to create MediaRecorder with options Object: ', e1);
try {
options = 'video/vp8'; // Chrome 47
mediaRecorder = new MediaRecorder(window.stream, options);
} catch (e2) {
alert('MediaRecorder is not supported by this browser.\n\n' +
'Try Firefox 29 or later, or Chrome 47 or later,'
+ ' with Enable experimental Web Platform features enabled '
+ ' from chrome://flags.');
console.error('Exception while creating MediaRecorder:', e2);
return;
}
}
}
console.log('Created MediaRecorder', mediaRecorder, 'with options', options);
recordButton.textContent = 'Stop Recording';
playButton.disabled = true;
downloadButton.disabled = true;
mediaRecorder.onstop = handleStop;
mediaRecorder.ondataavailable = handleDataAvailable;
mediaRecorder.start(1000); // collect 1000ms of data
console.log('MediaRecorder started', mediaRecorder);
}
function stopRecording() {
mediaRecorder.stop();
for (var i = 0; i < input.length; i++) {
input[i].setAttribute("max", recordedBlobs.length);
input[i].removeAttribute("disabled");
}
console.log('Recorded Blobs: ', recordedBlobs);
recordedVideo.controls = true;
}
function play() {
console.log(`playing ${input[0].value}s of getUserMedia video`
+ `recorded by MediaRecorder from time ranges`
, currentTimes.slice(0, input[0].value));
// slice `input[0].value` amount, in seconds, from end of recorded video
// for playback
var file = recordedBlobs.slice(0, input[0].value);
var superBuffer = new Blob(file, {
type: 'video/webm'
});
recordedVideo.src = window.URL.createObjectURL(superBuffer);
}
function download() {
// slice `input[1].value` amount, in seconds, from end of recorded video
// for download
var file = recordedBlobs.slice(0, input[1].value);
var blob = new Blob(file, {
type: 'video/webm'
});
var url = window.URL.createObjectURL(blob);
var a = document.createElement('a');
a.style.display = 'none';
a.href = url;
a.download = 'test.webm';
document.body.appendChild(a);
a.click();
setTimeout(function() {
document.body.removeChild(a);
window.URL.revokeObjectURL(url);
}, 100);
}
plnkr https://plnkr.co/edit/LxuV5jMX0RZtDxOxT1qa?p=preview
The specific is this: I use java to read an audio file on the back end, and then transfer it to the front end of the web page through websocket. On the web page, I want to play the data received by the websocket through the Audio () object. I know that I know the SrcObject property used for specific playback, but I don't know how to implement it?
Below is the code of my web pageļ¼
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<title>Title</title>
</head>
<body>
<button onclick="send()">play</button>
</body>
</html>
<script>
var websocket = null;
//whether support or not WebSocket
if ('WebSocket' in window) {
websocket = new WebSocket("ws://localhost:8080/ws");
websocket.binaryType = "arraybuffer";
}
else {
alert('The current browser does not support websocket');
}
function send() {
websocket.send("play");
}
//
websocket.onmessage = function (event) {
var data = event.data;
// console.log(new Float32Array(data));
// play(data);
};
websocket.onopen = function () {
console.log("onopen...");
};
websocket.onclose = function () {
console.log("onclose...");
};
websocket.onerror = function () {
console.log("onerror...");
};
window.onbeforeunload = function () {
closeWebSocket();
};
function closeWebSocket() {
websocket.close();
}
var context;
var audio = new Audio();
var promise;
var flag = false;
var b = false;
var scriptProcessorNode;
var gainNode;
var streamAudioDestinationNode;
// data: arrayBuffer
function play(data) {
if (context === undefined) {
context = new AudioContext();
}
var buffer = context.createBuffer(2, 48000 * 2, 48000);
buffer.copyToChannel(new Float32Array(data), 1, 1);
var bufferSourceNode = context.createBufferSource();
bufferSourceNode.buffer = buffer;
if (!b) {
scriptProcessorNode = context.createScriptProcessor();
gainNode = context.createGain();
streamAudioDestinationNode = context.createMediaStreamDestination();
b = true;
}
bufferSourceNode.connect(scriptProcessorNode);
scriptProcessorNode.connect(gainNode);
gainNode.connect(streamAudioDestinationNode);
audio.srcObject = streamAudioDestinationNode.stream;
// audio.load();
if (!flag) {
promise = audio.play();
console.log("method play");
flag = true;
}
}
</script>
I am building an application for ASP.Net MVC 5, in which I need to record a live video and audio stream from the webcam/microphone. I have decided on a web app based solution, RecordRTC. I have used the demo from, Muaz-Khan's github as a starting point, and modified it for my specifications.
The problem I am having is that I am unable to determine how to reduce the size of the webm that is being created. Right now it is about 1.7MB a second, my goal is for it to be 4-10MB a minute.
Also the file that is created from this does not possess time metadata, meaning I cannot use the seek-bar to go to a specific time. i.e. it says 3 secs have elapsed 0 remain, and still runs.
Any help would be appreciated.
NOTE: I am aware that support for the below code will only work in Firefox. I will simply wait for Chrome to add support, and therefore have removed the workaround Muaz Khan has in the demo.
< script >
//script 1
document.createElement('article')< /script>
<script>
//script 2
function PostBlob(blob, fileType, fileName) {
/ / FormData
var formData = new FormData();
formData.append(fileType + '-filename', fileName);
formData.append(fileType + '-blob', blob);
// progress-bar
var hr = document.createElement('hr');
container.appendChild(hr);
var strong = document.createElement('strong');
strong.id = 'percentage';
strong.innerHTML = fileType + ' upload progress: ';
container.appendChild(strong);
var progress = document.createElement('progress');
container.appendChild(progress);
// POST the Blob using XHR2
xhr('/Video/PostRecordedAudioVideo', formData, progress, percentage, function(fName) {
container.appendChild(document.createElement('hr'));
var mediaElement = document.createElement(fileType);
var source = document.createElement('source');
source.src = location.href + 'Files/' + fName.replace(/"/g, '');
if (fileType == 'video') source.type = 'video/webm; codecs="vp8, vorbis"';
if (fileType == 'audio') source.type = 'audio/ogg';
mediaElement.appendChild(source);
mediaElement.controls = true;
container.appendChild(mediaElement);
mediaElement.play();
progress.parentNode.removeChild(progress);
strong.parentNode.removeChild(strong);
hr.parentNode.removeChild(hr);
});
}
var record = document.getElementById('record');
var stop = document.getElementById('stop');
var audio = document.querySelector('audio');
var recordVideo = document.getElementById('record-video');
var preview = document.getElementById('preview');
var container = document.getElementById('container');
var recordAudio, recordVideo;
record.onclick = function() {
record.disabled = true;
navigator.getUserMedia({
audio: {
mandatory: {
googEchoCancellation: false,
googAutoGainControl: false,
googNoiseSuppression: false,
googHighpassFilter: false,
sampleRate: true
},
optional: []
},
video: true
}, function(stream) {
preview.src = window.URL.createObjectURL(stream);
preview.play();
//var legalBufferValues = [256, 512, 1024, 2048, 4096, 8192, 16384];
// sample-rates in at least the range 22050 to 96000.
recordAudio = RecordRTC(stream, {
onAudioProcessStarted: function() {}
});
recordAudio.startRecording();
stop.disabled = false;
}, function(error) {
alert(JSON.stringify(error, null, '\t'));
});
};
var fileName;
stop.onclick = function() {
record.disabled = false;
stop.disabled = true;
preview.src = '';
fileName = Math.round(Math.random() * 99999999) + 99999999 + ".webm";
recordAudio.stopRecording(function(url) {
preview.src = url;
PostBlob(blob, 'video', fileName);
});
};
function xhr(url, data, progress, percentage, callback) {
var request = new XMLHttpRequest();
request.onreadystatechange = function() {
if (request.readyState == 4 && request.status == 200) {
callback(request.responseText);
}
};
if (url.indexOf('/Video/DeleteFile') == -1) {
request.upload.onloadstart = function() {
percentage.innerHTML = 'Upload started...';
};
request.upload.onprogress = function(event) {
progress.max = event.total;
progress.value = event.loaded;
percentage.innerHTML = 'Upload Progress ' + Math.round(event.loaded / event.total * 100) + "%";
};
request.upload.onload = function() {
percentage.innerHTML = 'Saved!';
};
}
request.open('POST', url);
request.send(data);
}; < /script>
#{ ViewBag.Title = "Video Record"; }
<--Script 1 goes here-->
<!-- script used for audio/video/gif recording -->
#Scripts.Render("~/bundles/RTCRecording")
<article>
<section class="experiment">
<p style="text-align:center;">
<video id="preview" controls style="border: 1px solid rgb(15, 158, 238);"></video>
</p>
<hr />
<button id="record">Record</button>
<button id="stop" disabled>Stop</button>
<div id="container" style="padding:1em 2em;"></div>
</section>
<--Script 2 goes here-->
</article>
Hello Web Audio Developers,
I am receiving "decodeAudioData error null" in Chrome
and
"decodeAudioData error undefined" in Firebug.
Firebug also says
"The buffer passed to decodeAudioData contains an unknown content type."
Is there something wrong with this code or
is there something else that needs to be worked out with the
"Web Audio API"?
<!DOCTYPE html>
<html lang="en-US">
<head>
<meta charset="utf-8">
<title>Together 2 </title>
</head>
<body>
<script type="text/javascript">
window.onload = init;
var context;
var bufferLoader;
function init() {
window.AudioContext = window.AudioContext || window.webkitAudioContext;
context = new AudioContext();
bufferLoader = new BufferLoader(context,
[ '../web-audio/path/chrono.mp3' ],
finishedLoading
);
bufferLoader.load();
}
function finishedLoading(bufferList) {
var source1 = context.createBufferSource();
source1.buffer = bufferList[0];
source1.connect(context.destination);
source1.start(0);
}
function BufferLoader(context, urlList, callback) {
this.context = context;
this.urlList = urlList;
this.onload = callback;
this.bufferList = new Array();
this.loadCount = 0;
}
BufferLoader.prototype.loadBuffer = function(url, index) { // Load buffer asynchronously
var request = new XMLHttpRequest();
request.open("GET", url, true);
request.responseType = "arraybuffer";
var loader = this;
request.onload = function() {
// Asynchronously decode the audio file data in request.response
loader.context.decodeAudioData(
request.response,
function(buffer) {
if (!buffer) {
alert('error decoding file data: ' + url);
return;
}
loader.bufferList[index] = buffer;
if (++loader.loadCount == loader.urlList.length)
loader.onload(loader.bufferList);
},
function(error) {
console.error('decodeAudioData error', error);
}
);
}
request.onerror = function() {
alert('BufferLoader: XHR error');
}
request.send();
}
BufferLoader.prototype.load = function() {
for (var i = 0; i < this.urlList.length; ++i)
this.loadBuffer(this.urlList[i], i);
}
</script>
</body>
</html>
I had the same problem and it was down to the type of codec used on the wav file itself. Only PCM is supported whereas the files that were throwing me errors were MS-ADPCM. You can use QuickTime or VLC to get the codec information and convert them to PCM if necessary.
I'd imagine that it is a similar issue with your MP3.
Trying to learn the Audio API, but I get an Uncaught reference error for BufferLoader class. I'm on chrome and it's up to date. Shouldn't this class be working with no problems?
<html>
<head>
<script type=text/javascript>
window.onload = init;
var context;
var bufferLoader;
function init(){
context = new webkitAudioContext();
bufferLoader = new BufferLoader(
context,
[
' https://dl.dropboxusercontent.com/u/1957768/kdFFO3.wav',
' https://dl.dropboxusercontent.com/u/1957768/geniuse%20meodies.wav',
],
finishedLoading
);
bufferLoader.load();
}
function finishedLoading(bufferList){
//make two sources and play them
var source1 = context.createBufferSource();
var source2 = context.createBufferSource();
source1.buffer = bufferList[0];
source2.buffer = bufferList[1];
source1.connect(context.destination);
source2.connect(context.destination);
source1.start(0);
source2.start(0);
}
</script>
</head>
<body>
</body>
</html>
The BufferLoader "class" is a custom function created to abstract the use of the Web Audio API. It's not a built-in feature, and must be included in your page in order to be used; there is nothing special about Chrome having this. Here's an example of where it is explained: http://www.html5rocks.com/en/tutorials/webaudio/intro/#toc-abstract
To use, include this code before it is used:
function BufferLoader(context, urlList, callback) {
this.context = context;
this.urlList = urlList;
this.onload = callback;
this.bufferList = new Array();
this.loadCount = 0;
}
BufferLoader.prototype.loadBuffer = function(url, index) {
// Load buffer asynchronously
var request = new XMLHttpRequest();
request.open("GET", url, true);
request.responseType = "arraybuffer";
var loader = this;
request.onload = function() {
// Asynchronously decode the audio file data in request.response
loader.context.decodeAudioData(
request.response,
function(buffer) {
if (!buffer) {
alert('error decoding file data: ' + url);
return;
}
loader.bufferList[index] = buffer;
if (++loader.loadCount == loader.urlList.length)
loader.onload(loader.bufferList);
},
function(error) {
console.error('decodeAudioData error', error);
}
);
}
request.onerror = function() {
alert('BufferLoader: XHR error');
}
request.send();
}
BufferLoader.prototype.load = function() {
for (var i = 0; i < this.urlList.length; ++i)
this.loadBuffer(this.urlList[i], i);
}