so, what am trying to do is to play audio file while am downloading it , the problem that am facing is the audio player play the audio but only after it finish the download, here is my code:
audio tag
<audio controls preload="all" muted="muted" > </audio>
this is my JS
var audio = document.querySelector('audio');
var assetURL = 'url/audios/file';
var token = 'Bearer token'
var mimeCodec = 'audio/wav';
var mediaSource = new MediaSource;
audio.src = URL.createObjectURL(mediaSource);
mediaSource.addEventListener('sourceopen', sourceOpen);
function sourceOpen(_) {
const playPromise = audio.play();
console.log(this.readyState);
var mediaSource = this;
var sourceBuffer = mediaSource.addSourceBuffer(mimeCodec);
fetchAB(assetURL, function (buf) {
sourceBuffer.addEventListener('updateend', function (_) {
mediaSource.endOfStream();
audio.play();
console.log(mediaSource.readyState); // ended
});
sourceBuffer.appendBuffer(buf);
});
};
function fetchAB(url, cb) {
console.log(url);
var xhr = new XMLHttpRequest;
xhr.open('get', url);
xhr.setRequestHeader('Authorization', token);
xhr.responseType = 'arraybuffer';
xhr.onload = function () {
cb(xhr.response);
};
xhr.send();
};
am not really sure how to do it, any help will be appreciate it
You cannot partially play an audiofile using the Web Audio API. You should create an <audio> element and set its src-attribute to load the file, that approach will let you stream a file.
const audio = document.createElement('audio');
audio.src = 'file.wav';
audio.play();
However, your example shows you're setting headers when loading the file, that is not going to work with the above approach so you should get rid of that (although seems to be a way to circumvent it: Send custom HTTP request header with HTML5 audio tag)
I think you can simplify you code to this:
var audio = new Audio('file.wav');
var promise = audio.play();
// Older browsers may not return promise
if (promise)
promise.catch(function() {
// Couldn't play audio for some reason
});
The built-in Audio class supports buffering and playback will start as soon as possible, without waiting for whole file to be downloaded.
Related
I have video files hosted on the CDN, the video file is encrypted. So I need the decrypt it before play it in the browser. But the web video tag has no interface to modify the media stream.
So I want to run a proxy in the client side with javascript to proxy the media stream request, and decrypt the stream before feet to the video tag.
Is it possible?
By math-chen's answer, I have tryed below code, but when I paly it, the video keep spin and not render the frame like below image.
I use a very small unencrypted video file out.mp4, so it can be loaded by once.
<html>
<video id="video" controls src="out.mp4">
</video>
<script>
const video = document.querySelector('#video');
const mediaSource = new MediaSource();
video.src = URL.createObjectURL(mediaSource);
mediaSource.addEventListener('sourceopen', sourceOpen);
function sourceOpen() {
var mime = 'video/mp4; codecs="avc1.42E01E, mp4a.40.2"'
var sourceBuffer = mediaSource.addSourceBuffer(mime);
fetchBuffer('out.mp4', buffer => {
sourceBuffer.appendBuffer(buffer)
})
}
function fetchBuffer (url, callback) {
var xhr = new XMLHttpRequest;
xhr.open('get', url);
xhr.responseType = 'arraybuffer';
xhr.onload = function () {
callback(xhr.response);
};
xhr.send();
}
</script>
</html>
it does not need a proxy
const video = document.querySelector('#video');
const mediaSource = new MediaSource();
video.src = URL.createObjectURL(mediaSource);
mediaSource.addEventListener('sourceopen', sourceOpen);
//you can implement logic in function sourceOpen
function sourceOpen() {
//mime is type of video
const sourceBuffer = mediaSource.addSourceBuffer(mime);
fetch(videoUrl).then(function(response) {
//decrypt
return response.arrayBuffer();
}).then(buffer => {
sourceBuffer.appendBuffer(arrayBuffer);
});
}
I am obtaining 1000ms audio chunks from client using getUserMedia() and converting it into audio blob of 1 second and sending them to the server(and to rest of the clients) using socket-io till the user stops recording.
On the server I re-emit the obtained blob to other clients where the blob's URL is obtained using createObjectURL() and then this URL is set as src for <audio> tag on the frontend. Every 1000ms, the clients get a new blob and its URL is obtained and then fed to the <audio> tag.
However this transition from previous url to new url has some visible delay, which causes audio to be patchy and less smooth and consistent.
On the client side this is how I record the audio blobs,
var chunks = [];
var mediaRecorder = new MediaRecorder(stream, { bitsPerSecond: 32000 });
mediaRecorder.ondataavailable = function (e) {
chunks.push(e.data);
}
mediaRecorder.start();
setInterval(() => {
mediaRecorder.stop();
mediaRecorder.start();
}, 1000);
mediaRecorder.onstop = function (e) {
var blob = new Blob(chunks, { 'type': 'audio/ogg; codecs=opus' });
chunks = [];
socket.emit('audio-blob', blob);
}
On the server I just re-emit the blob to clients,
socket.to(String(socket.room)).broadcast.emit('audio-blob', blob);
and on the receiving clients, the blob is played like this,
socket.on('audio-blob', blob => {
var newblob = new File([blob], "filename")
var audioURL = window.URL.createObjectURL(newblob);
window.audio = new Audio();
window.audio.src = audioURL;
window.audio.play();
})
How do I make the audio consistent and reduce to noticeable lag during the transition
My code:
// Create an AudioContext instance for this sound
var audioContext = new (window.AudioContext || window.webkitAudioContext)();
var maxChannelCount = audioContext.destination.maxChannelCount;
var gainNode = audioContext.createGain()
audioContext.destination.channelCount = maxChannelCount;
var merger = audioContext.createChannelMerger(maxChannelCount);
merger.connect(audioContext.destination);
gainNode.gain.value = 0.1 // 10 %
gainNode.connect(audioContext.destination)
// Create a buffer for the incoming sound content
var source = audioContext.createBufferSource();
// Create the XHR which will grab the audio contents
var request = new XMLHttpRequest();
// Set the audio file src here
request.open('GET', 'phonemes/bad-bouyed/bad.mp3', true);
// Setting the responseType to arraybuffer sets up the audio decoding
request.responseType = 'arraybuffer';
request.onload = function() {
// Decode the audio once the require is complete
audioContext.decodeAudioData(request.response, function(buffer) {
source.buffer = buffer;
// Connect the audio to source (Can I also set the gain within this declaration?)
source.connect(merger, 0,10);
// Simple setting for the buffer
source.loop = false;
// Play the sound!
source.start(0);
}, function(e) {
console.log('Audio error! ', e);
});
}
// Send the request which kicks off
request.send();
The above code loads an mp3 file in and plays it using Web Audio API, this works great although I am just wondering if it is possible to set the gain of the source AND also set the output channel using my already existing code. You can see above I have already created a gain node and connected the audio context. What is the syntax for specifying both? Do I declare them in the order I wish for them to be executed?
For example
source.connect(merger, 0,10);
source.connect(gainNode);
Any help on this would be great.
I'm trying to follow a tutorial online by piecing together the examples. I feel like this should be playing the mp3 file. I'm using the Chrome browser and it's up to date. I don't get any errors on the console. I'm not sure what I need to change or add to make this work.
<script type="text/javascript">
//creating an audio context
window.addEventListener('load',init);
function init()
{
try
{
window.AudioContext = window.AudioContext || window.webkitAudioContext;
var context=new AudioContext();
}
catch(e)
{
alert("Your browser doesn't support Web Audio API");
}
loadSound();
playSound();
}
//loading sound into the created audio context
function loadSound()
{
//set the audio file's URL
var audioURL='audio files/song.mp3';
//creating a new request
var request = new XMLhttpRequest();
request.open("GET",audioURL,true);
request.responseType= 'arraybuffer';
request.onLoad funtion(){
//take the audio from http request and decode it in an audio buffer
var audioBuffer = null;
context.decodeAudioData(request.response, function(buffer){ audioBuffer= buffer;});
}
request.send();
}, onError);
//playing the audio file
function playSound(buffer) {
//creating source node
var source = audioContext.createBufferSource();
//passing in file
source.buffer = audioBuffer;
//start playing
source.start(0);
}
</script>
</head>
</html>
You are using async XMLHttpRequest (note that it should be spelled by capital "H"), so most probably playSound function is called before request.onLoad (note: missing =) completes.
Try to trace execution of your script using console.log or similar to find bugs like this and use JavaScript Console to catch syntax errors.
i got this thing fixed :) i made use of audio tag along with web audio API. here's the code :
var audio = new Audio();
audio.src = 'audio files/song.mp3';
audio.controls = true;
audio.autoplay = true;
document.body.appendChild(audio);
var context = new webkitAudioContext();
var analyser = context.createAnalyser();
window.addEventListener('load', function(e) {
// Our <audio> element will be the audio source.
var source = context.createMediaElementSource(audio);
source.connect(analyser);
analyser.connect(context.destination);
}, false);
thnks for trying to help :))
Is your audioURL correct?
audio files/song.mp3 Why is there an empty space?
============Edit============
<script>
//creating an audio context
var context;
var audioBuffer;
window.addEventListener('load', init);
function init()
{
try
{
window.AudioContext = window.AudioContext || window.webkitAudioContext;
context=new AudioContext();
}
catch(e)
{
alert("Your browser doesn't support Web Audio API");
}
loadSound();
// playSound(); // comment here
}
//loading sound into the created audio context
function loadSound()
{
// set the audio file's URL
var audioURL='AllofMe.mp3';
//creating a new request
var request = new XMLHttpRequest();
request.open("GET",audioURL,true);
request.responseType= 'arraybuffer';
request.onload = function(){
//take the audio from http request and decode it in an audio buffer
context.decodeAudioData(request.response, function(buffer){
audioBuffer = buffer;
console.log(audioBuffer);
if(audioBuffer){ // check here
playSound();
}
});
};
request.send();
}
//playing the audio file
function playSound() {
//creating source node
var source = context.createBufferSource();
//passing in file
source.buffer = audioBuffer;
//start playing
source.connect(context.destination); // added
source.start(0);
console.log('playing');
}
</script>
I was looking for the solution to play mp3 on a mobile device, and found this page, I've made provided example to work with a help from here. Providing working example below:
var context;
var saved;
try {
context = new (window.AudioContext || window.webkitAudioContext)();
}
catch (e) {
console.log("Your browser doesn't support Web Audio API");
}
if (saved) {
playSound(saved);
} else {
loadSound();
}
//loading sound into the created audio context
function loadSound() {
//set the audio file's URL
var audioURL = '/path/to/file.mp3';
//creating a new request
var request = new XMLHttpRequest();
request.open('GET', audioURL, true);
request.responseType = 'arraybuffer';
request.onload = function () {
//take the audio from http request and decode it in an audio buffer
context.decodeAudioData(request.response, function (buffer) {
// save buffer, to not load again
saved = buffer;
// play sound
playSound(buffer);
});
};
request.send();
}
//playing the audio file
function playSound(buffer) {
//creating source node
var source = context.createBufferSource();
//passing in data
source.buffer = buffer;
//giving the source which sound to play
source.connect(context.destination);
//start playing
source.start(0);
}
It looks like playing files works fine on Android device, but not iOS. For that you have to follow this guide: How to: Web Audio on iOS (but use touchend event and replace noteOn method to start).
I'd like to dynamically generate a bitstream in JavaScript that is e.g. a large OGG-video.
Is it possible to tell the browser to ask a JavaScript function for the bitstream instead of making a HTTP-GET-Request to some location?
The only possible way to feed data to the video-tag, that I found, would contain data:-URLs. But that requires the whole video to be encoded in the document.
This is a bad solution for large videos, that would normally be streamed. AFAIK you can't add more data dynamically to data-URLs.
Does anyone know if this is possible somehow?
I don't know if is possible with Javascript, but you can probably do something like that with a Java or Javascript (?) player, like Cortado.
http://www.flumotion.net/cortado/
If your video is encoded for streaming, it will be downloaded progressively by whatever browser is requesting it. That's just how it works. You will need both OGG and MP4 for
FF/Chrome/IE9.
http://www.mediacollege.com/video/streaming/http.html
"encoded on the document" doesn't make any sense. The video is encoded by your encoder and can further have settings to optimize for streaming (that is it encodes the first XX seconds at a lower bitrate to get started faster).
This should now be possible with the MediaSource API.
Here is an example from the link above:
var video = document.querySelector('video');
var assetURL = 'frag_bunny.mp4';
// Need to be specific for Blink regarding codecs
// ./mp4info frag_bunny.mp4 | grep Codec
var mimeCodec = 'video/mp4; codecs="avc1.42E01E, mp4a.40.2"';
if ('MediaSource' in window && MediaSource.isTypeSupported(mimeCodec)) {
var mediaSource = new MediaSource();
//console.log(mediaSource.readyState); // closed
video.src = URL.createObjectURL(mediaSource);
mediaSource.addEventListener('sourceopen', sourceOpen);
} else {
console.error('Unsupported MIME type or codec: ', mimeCodec);
}
function sourceOpen (_) {
//console.log(this.readyState); // open
var mediaSource = this;
var sourceBuffer = mediaSource.addSourceBuffer(mimeCodec);
fetchAB(assetURL, function (buf) {
sourceBuffer.addEventListener('updateend', function (_) {
mediaSource.endOfStream();
video.play();
//console.log(mediaSource.readyState); // ended
});
sourceBuffer.appendBuffer(buf);
});
};
function fetchAB (url, cb) {
console.log(url);
var xhr = new XMLHttpRequest;
xhr.open('get', url);
xhr.responseType = 'arraybuffer';
xhr.onload = function () {
cb(xhr.response);
};
xhr.send();
};