Receiving "decodeAudioData error null" in Chrome - javascript

Hello Web Audio Developers,
I am receiving "decodeAudioData error null" in Chrome
and
"decodeAudioData error undefined" in Firebug.
Firebug also says
"The buffer passed to decodeAudioData contains an unknown content type."
Is there something wrong with this code or
is there something else that needs to be worked out with the
"Web Audio API"?
<!DOCTYPE html>
<html lang="en-US">
<head>
<meta charset="utf-8">
<title>Together 2 </title>
</head>
<body>
<script type="text/javascript">
window.onload = init;
var context;
var bufferLoader;
function init() {
window.AudioContext = window.AudioContext || window.webkitAudioContext;
context = new AudioContext();
bufferLoader = new BufferLoader(context,
[ '../web-audio/path/chrono.mp3' ],
finishedLoading
);
bufferLoader.load();
}
function finishedLoading(bufferList) {
var source1 = context.createBufferSource();
source1.buffer = bufferList[0];
source1.connect(context.destination);
source1.start(0);
}
function BufferLoader(context, urlList, callback) {
this.context = context;
this.urlList = urlList;
this.onload = callback;
this.bufferList = new Array();
this.loadCount = 0;
}
BufferLoader.prototype.loadBuffer = function(url, index) { // Load buffer asynchronously
var request = new XMLHttpRequest();
request.open("GET", url, true);
request.responseType = "arraybuffer";
var loader = this;
request.onload = function() {
// Asynchronously decode the audio file data in request.response
loader.context.decodeAudioData(
request.response,
function(buffer) {
if (!buffer) {
alert('error decoding file data: ' + url);
return;
}
loader.bufferList[index] = buffer;
if (++loader.loadCount == loader.urlList.length)
loader.onload(loader.bufferList);
},
function(error) {
console.error('decodeAudioData error', error);
}
);
}
request.onerror = function() {
alert('BufferLoader: XHR error');
}
request.send();
}
BufferLoader.prototype.load = function() {
for (var i = 0; i < this.urlList.length; ++i)
this.loadBuffer(this.urlList[i], i);
}
</script>
</body>
</html>

I had the same problem and it was down to the type of codec used on the wav file itself. Only PCM is supported whereas the files that were throwing me errors were MS-ADPCM. You can use QuickTime or VLC to get the codec information and convert them to PCM if necessary.
I'd imagine that it is a similar issue with your MP3.

Related

How to use JavaScript to play audio stream using srcObject property of Audio object?

The specific is this: I use java to read an audio file on the back end, and then transfer it to the front end of the web page through websocket. On the web page, I want to play the data received by the websocket through the Audio () object. I know that I know the SrcObject property used for specific playback, but I don't know how to implement it?
Below is the code of my web pageļ¼š
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<title>Title</title>
</head>
<body>
<button onclick="send()">play</button>
</body>
</html>
<script>
var websocket = null;
//whether support or not WebSocket
if ('WebSocket' in window) {
websocket = new WebSocket("ws://localhost:8080/ws");
websocket.binaryType = "arraybuffer";
}
else {
alert('The current browser does not support websocket');
}
function send() {
websocket.send("play");
}
//
websocket.onmessage = function (event) {
var data = event.data;
// console.log(new Float32Array(data));
// play(data);
};
websocket.onopen = function () {
console.log("onopen...");
};
websocket.onclose = function () {
console.log("onclose...");
};
websocket.onerror = function () {
console.log("onerror...");
};
window.onbeforeunload = function () {
closeWebSocket();
};
function closeWebSocket() {
websocket.close();
}
var context;
var audio = new Audio();
var promise;
var flag = false;
var b = false;
var scriptProcessorNode;
var gainNode;
var streamAudioDestinationNode;
// data: arrayBuffer
function play(data) {
if (context === undefined) {
context = new AudioContext();
}
var buffer = context.createBuffer(2, 48000 * 2, 48000);
buffer.copyToChannel(new Float32Array(data), 1, 1);
var bufferSourceNode = context.createBufferSource();
bufferSourceNode.buffer = buffer;
if (!b) {
scriptProcessorNode = context.createScriptProcessor();
gainNode = context.createGain();
streamAudioDestinationNode = context.createMediaStreamDestination();
b = true;
}
bufferSourceNode.connect(scriptProcessorNode);
scriptProcessorNode.connect(gainNode);
gainNode.connect(streamAudioDestinationNode);
audio.srcObject = streamAudioDestinationNode.stream;
// audio.load();
if (!flag) {
promise = audio.play();
console.log("method play");
flag = true;
}
}
</script>

Chrome crashes when use XHR to send large file

I use XMLHttpRequest to send POST data to nodejs (expressjs api). The size of file about 200mb. When I do, Chrome crashes, but the Firefox doesn't. Does Chrome have a limited file size?
And how can I upload the large file via JavaScript?
This is my code send xhr request:
// create http request API AI Engine
var xhr = new XMLHttpRequest();
xhr.open('POST', url, true);
xhr.responseType = 'arraybuffer';
xhr.timeout = 3600000;
xhr.onload = function() {
// reponse measage
if (this.status === 200) {
var errorArea = document.getElementById('error-area');
errorArea.style.display = 'none';
zip.files = {};
// unzip File
zip.loadAsync(this.response)
.then(function(zip) {
// will be called, even if content is corrupted
for (const file in zip.files) {
if (zip.files.hasOwnProperty(file)) {
const fileObject = zip.files[file];
fileObject.async('arraybuffer').then(function(fileData) {
var fileSave = new Blob([new Uint8Array(
fileData)]);
// Save file
saveAs(fileSave, fileObject.name);
})
}
}
}, function(e) {
var errorArea = document.getElementById('error-area');
errorArea.style.display = 'block';
errorArea.innerHTML =
'<strong>Error</strong> Cant not unzip file return.';
});
// get response stream data
} else {
var errorArea = document.getElementById('error-area');
errorArea.style.display = 'block';
errorArea.innerHTML =
'<strong>Error</strong> Cant not analyze file: ' + this.statusText;
}
};
// Handle when have error with xhr, show message
xhr.onerror = function(err) {
var errorArea = document.getElementById('error-area');
errorArea.style.display = 'block';
errorArea.innerHTML =
'<strong>Error</strong> Cant not analyze file: ' + this.statusText;
};
// Handle when have timeout with xhr, show message
xhr.ontimeout = function() {
var errorArea = document.getElementById('error-area');
errorArea.style.display = 'block';
errorArea.innerHTML =
'<strong>Error</strong> Cant not analyze file: Request time out';
};
// Add custom header
xhr.setRequestHeader('Content-type', 'application/octet-stream');
xhr.setRequestHeader('file-name', Date.now().toString() + '.zip');
xhr.setRequestHeader('file-length', data.byteLength);
// Send arraybuffer to server
xhr.send(data);
});

Unable to stream video over a websocket to Firefox

I have written some code stream video over a websocket so a sourcebuffer which works in Chrome and Edge.
However, when I run this in Firefox, the video never plays back, just a spinning wheel animation is displayed. When I check the <video> statistics, It reads HAVE_METADATA as the ready state and NETWORK_LOADING as the network state.
The code looks follows:
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8"/>
</head>
<body>
<video controls></video>
<script>
var mime = 'video/mp4; codecs="avc1.4D401E,mp4a.40.2"';
var address = 'ws://localhost:54132'
/* Media Source */
var source = new MediaSource();
var video = document.querySelector('video');
video.src = URL.createObjectURL(source);
source.addEventListener('sourceopen', sourceOpen);
/* Buffer */
var buffer;
var socket;
var queue = [];
var offset = -1;
var timescale;
// When the media source opens:
function sourceOpen() {
buffer = source.addSourceBuffer(mime);
buffer.addEventListener('updateend', processQueue);
socket = new WebSocket(address);
socket.binaryType = 'arraybuffer';
socket.onmessage = onMessage;
}
// When more data is received.
function onMessage(event) {
queue.push(event.data);
processQueue();
}
// Process queue if possible.
function processQueue() {
if ((queue.length == 0) || (buffer.updating)) {
return;
}
var data = queue.shift();
if (offset === -1) {
var parsed = parseMP4(data);
if (parsed.hasOwnProperty('moov')) {
timescale = parsed.moov.mvhd.timescale;
} else if (parsed.hasOwnProperty('moof')) {
offset = 0 - (parsed.moof.traf[0].tfdt.baseMediaDecodeTime / this.timescale - 0.4);
buffer.timestampOffset = offset;
}
}
// console.log('appending ' + data.byteLength + ' bytes');
buffer.appendBuffer(data);
}
// Parse out the offset.
function parseMP4(data) {
// SNIP for brevity
}
</script>
</body>
</html>
Could not reproduce <video> element not playing at firefox 47.
Merged approaches at Mocking Websocket Message Events to create mock WebSocket events; bufferAll.html demo at Let's Make a Netflix
An Intro to Streaming Media on the Web for MediaSource usage pattern.
Included <progress> and progress event to notify user of media loading status.
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8"/>
</head>
<body>
<progress min="0" value="0"></progress><br><label></label><br>
<video controls></video>
<script>
// http://nickdesaulniers.github.io/netfix/demo/bufferAll.html
// http://jsfiddle.net/adamboduch/JVfkt/
// The global web socket.
var sock, sourceBuffer;
sock = new WebSocket( "ws://mock" );
sock.onerror = function(e) {
console.log("sock error", e)
}
// This is unchanging production code that doesn"t know
// we"re mocking the web socket.
sock.onmessage = function( e ) {
console.log("socket message", e.data);
sourceBuffer.appendBuffer(e.data);
};
var video = document.querySelector("video");
var progress = document.querySelector("progress");
var label = document.querySelector("label");
var assetURL = "http://nickdesaulniers.github.io/netfix/"
+ "demo/frag_bunny.mp4";
// Need to be specific for Blink regarding codecs
// ./mp4info frag_bunny.mp4 | grep Codec
var mimeCodec = 'video/mp4; codecs="avc1.42E01E, mp4a.40.2"';
if ("MediaSource" in window
&& MediaSource.isTypeSupported(mimeCodec)) {
var mediaSource = new MediaSource;
//console.log(mediaSource.readyState); // closed
video.src = URL.createObjectURL(mediaSource);
mediaSource.addEventListener("sourceopen", sourceOpen);
} else {
console.error("Unsupported MIME type or codec: ", mimeCodec);
}
video.addEventListener("canplay", function() {
alert("video canplay")
})
function sourceOpen (_) {
//console.log(this.readyState); // open
var mediaSource = this;
sourceBuffer = mediaSource.addSourceBuffer(mimeCodec);
fetchAB(assetURL, function (buf) {
sourceBuffer.addEventListener("updateend", function (event) {
console.log("sourceBuffer updateend event;"
+ "mediaSource.readyState:"
, mediaSource.readyState);
// mediaSource.endOfStream();
// video.play();
// console.log(mediaSource.readyState); // ended
});
});
};
// mock `WebSocket` message
function fetchAB (url, cb) {
var xhr = new XMLHttpRequest;
xhr.open("get", url);
var file = url.split("/").pop();
xhr.responseType = "arraybuffer";
xhr.onload = function () {
// mock `WebSocket` message
sock.dispatchEvent( new MessageEvent( "message", {
data: xhr.response
}));
console.log("video sent to sock", sock);
cb();
};
xhr.onprogress = function(e) {
progress.max = e.total;
progress.value = e.loaded;
label.innerHTML = "loading " + file + " ...<br>"
+ e.loaded + " of "
+ e.total + " bytes loaded";
}
xhr.send();
};
</script>
</body>
</html>
plnkr http://plnkr.co/edit/RCIqDXTB2BL3lec9bhfz
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8"/>
</head>
<body>
<progress min="0" value="0"></progress><br><label></label><br>
<video controls></video>
<script>
// http://nickdesaulniers.github.io/netfix/demo/bufferAll.html
// http://jsfiddle.net/adamboduch/JVfkt/
// The global web socket.
var sock, sourceBuffer;
sock = new WebSocket( "ws://mock" );
sock.onerror = function(e) {
console.log("sock error", e)
}
// This is unchanging production code that doesn"t know
// we"re mocking the web socket.
sock.onmessage = function( e ) {
console.log("socket message", e.data);
sourceBuffer.appendBuffer(e.data);
};
var video = document.querySelector("video");
var progress = document.querySelector("progress");
var label = document.querySelector("label");
var assetURL = "http://nickdesaulniers.github.io/netfix/"
+ "demo/frag_bunny.mp4";
// Need to be specific for Blink regarding codecs
// ./mp4info frag_bunny.mp4 | grep Codec
var mimeCodec = 'video/mp4; codecs="avc1.42E01E, mp4a.40.2"';
if ("MediaSource" in window
&& MediaSource.isTypeSupported(mimeCodec)) {
var mediaSource = new MediaSource;
//console.log(mediaSource.readyState); // closed
video.src = URL.createObjectURL(mediaSource);
mediaSource.addEventListener("sourceopen", sourceOpen);
} else {
console.error("Unsupported MIME type or codec: ", mimeCodec);
}
video.addEventListener("canplay", function() {
alert("video canplay")
})
function sourceOpen (_) {
//console.log(this.readyState); // open
var mediaSource = this;
sourceBuffer = mediaSource.addSourceBuffer(mimeCodec);
fetchAB(assetURL, function (buf) {
sourceBuffer.addEventListener("updateend", function (event) {
console.log("sourceBuffer updateend event;"
+ "mediaSource.readyState:"
, mediaSource.readyState);
// mediaSource.endOfStream();
// video.play();
// console.log(mediaSource.readyState); // ended
});
});
};
// mock `WebSocket` message
function fetchAB (url, cb) {
var xhr = new XMLHttpRequest;
xhr.open("get", url);
var file = url.split("/").pop();
xhr.responseType = "arraybuffer";
xhr.onload = function () {
// mock `WebSocket` message
sock.dispatchEvent( new MessageEvent( "message", {
data: xhr.response
}));
console.log("video sent to sock", sock);
cb();
};
xhr.onprogress = function(e) {
progress.max = e.total;
progress.value = e.loaded;
label.innerHTML = "loading " + file + " ...<br>"
+ e.loaded + " of "
+ e.total + " bytes loaded";
}
xhr.send();
};
</script>
</body>
</html>

Blob Url obtained from URL.createObjectUrl(mediastream) returns HTTP 404

I'm trying to get a MediaStream object into a blob, but I have some problems.
This is the part of my script for the UserMedia component:
var mediastream = undefined;
var blob = undefined;
var video = document.getElementById('test');
This is my script where I'm trying my test:
var mediastream = undefined;
var blobUrl = undefined;
var video = document.getElementById('test');
function successCallback (stream) {
mediastream = stream;
blobUrl = URL.createObjectURL(mediastream);
var xhr = new XMLHttpRequest();
xhr.open('GET', blobUrl, true);
xhr.responseType = 'arraybuffer';
xhr.withCredentials = true;
xhr.onreadystatechange = function () {
console.log('xhr.readyState='+xhr.readyState);
if (xhr.readyState !== 4) {
return;
} else {
var myBlob = this.response;
}
function errorCallback (stream) {
console.log('error');
}
Setting the blobUrl to the video.src I don't have any problem:
video.src = blobUrl; <- WORKS
But if I call the urlBob url (like this one: blob:http%3A//localhost%3A8080/b1ffa5b7-c0cc-4acf-8890-f5d0f08de9cb ) I obtain HTTP status 404.
For sure there is something wrong on my implementation.
Any suggestions?
Thank you!

Uncaught reference error BufferLoader is not defined

Trying to learn the Audio API, but I get an Uncaught reference error for BufferLoader class. I'm on chrome and it's up to date. Shouldn't this class be working with no problems?
<html>
<head>
<script type=text/javascript>
window.onload = init;
var context;
var bufferLoader;
function init(){
context = new webkitAudioContext();
bufferLoader = new BufferLoader(
context,
[
' https://dl.dropboxusercontent.com/u/1957768/kdFFO3.wav',
' https://dl.dropboxusercontent.com/u/1957768/geniuse%20meodies.wav',
],
finishedLoading
);
bufferLoader.load();
}
function finishedLoading(bufferList){
//make two sources and play them
var source1 = context.createBufferSource();
var source2 = context.createBufferSource();
source1.buffer = bufferList[0];
source2.buffer = bufferList[1];
source1.connect(context.destination);
source2.connect(context.destination);
source1.start(0);
source2.start(0);
}
</script>
</head>
<body>
</body>
</html>
The BufferLoader "class" is a custom function created to abstract the use of the Web Audio API. It's not a built-in feature, and must be included in your page in order to be used; there is nothing special about Chrome having this. Here's an example of where it is explained: http://www.html5rocks.com/en/tutorials/webaudio/intro/#toc-abstract
To use, include this code before it is used:
function BufferLoader(context, urlList, callback) {
this.context = context;
this.urlList = urlList;
this.onload = callback;
this.bufferList = new Array();
this.loadCount = 0;
}
BufferLoader.prototype.loadBuffer = function(url, index) {
// Load buffer asynchronously
var request = new XMLHttpRequest();
request.open("GET", url, true);
request.responseType = "arraybuffer";
var loader = this;
request.onload = function() {
// Asynchronously decode the audio file data in request.response
loader.context.decodeAudioData(
request.response,
function(buffer) {
if (!buffer) {
alert('error decoding file data: ' + url);
return;
}
loader.bufferList[index] = buffer;
if (++loader.loadCount == loader.urlList.length)
loader.onload(loader.bufferList);
},
function(error) {
console.error('decodeAudioData error', error);
}
);
}
request.onerror = function() {
alert('BufferLoader: XHR error');
}
request.send();
}
BufferLoader.prototype.load = function() {
for (var i = 0; i < this.urlList.length; ++i)
this.loadBuffer(this.urlList[i], i);
}

Categories