Detecting WebAudio sound end - javascript

I'm using a javascript build a radio and I'm trying to give it different functionalities but I need to be able to detect the end of a song to make it work. Is there any way to do this?
I've tried different methods I've found online like .ended and such but I don't think those work without using the html audio tag. So I tried to make an audio tag that uses the same data for the source that my js radio uses and get the file length to stop my sourceNode at the end time and make a new one but but i keep getting null returned as the data so that doesn't work either.
I want to do something like:
context.onended = function() {
$.ajax({
url: '../scripts/radio.php',
data: {
attr1: 'value1'
},
success: function(data) {
console.log(data);
fileChosen = true;
setupAudioNodes();
var request = new XMLHttpRequest();
request.addEventListener("progress", updateProgress);
request.addEventListener("load", transferComplete);
request.addEventListener("error", transferFailed);
request.addEventListener("abort", transferCanceled);
request.open('GET', data, true);
request.responseType = 'arraybuffer';
// When loaded decode the data
request.onload = function() {
$("#title").html("Title");
$("#album").html("Goes");
$("#artist").html("Here");
onWindowResize();
$("#title, #artist, #album").css("visibility", "visible");
// decode the data
context.decodeAudioData(request.response, function(buffer) {
// when the audio is decoded play the sound
sourceNode.buffer = buffer;
sourceNode.start(0);
$("#freq, body").addClass("animateHue");
//on error
}, function(e) {
console.log(e);
});
};
request.send();
}
});
I want for this to run at the end of a song and play the next file. Which it would work if I could get the end time of the currently playing song.

To fix the above issue I added the .ended event inside the function that the source was set up:
function setupAudioNodes() {
// setup a analyser
analyser = context.createAnalyser();
// create a buffer source node
sourceNode = context.createBufferSource();
//connect source to analyser as link
sourceNode.connect(analyser);
// and connect source to destination
sourceNode.connect(context.destination);
//start updating
rafID = window.requestAnimationFrame(updateVisualization);
//I had to place a function for the .ended event inside the function sourceNode was set up.
sourceNode.onended = function() {
sourceNode.stop(0);
playFirst();
}
}

Related

Chunking a large video file and appending to BLOB for Continuous playback

I tried MediaSource API but it's not working correctly due to the latest browser does not support some of the functions from MediaSource API for example webkitMediaSourceURL.
https://wwwhtml5rockscom.readthedocs.io/en/latest/content/tutorials/streaming/multimedia/en/
I'm looking for open source one javascript.
I tried another example but There is a problem with seamless playback, a gap between two chunks is noticeable.
var player = document.querySelector('video');
getBlob('webm-chunk1')
.then(function (chunk1) {
var url = URL.createObjectURL(chunk1);
player.src = url;
player.play();
return getBlob('webm-chunk2');
})
.then(function (chunk2) {
var url = URL.createObjectURL(chunk2);
player.addEventListener('ended', handler, false);
function handler() {
player.removeEventListener('ended', handler);
player.src = url;
player.load();
player.play();
}
});

Why is my JavaScript Code not playing Music?

I want that my buttons ( Home, About, Blog ) on my website plays some sound or music when someone is clicking on them. Now I tried and tried, but cant find out the error in the javascript code.
<body onload="htmlIsReady()">
<script type="text/javascript">
var audio;
var audioLink;
window.onmessage = function(event) {
if (event.data.data) {
//Passes the audio url to the new variable
audioLink = event.data.data;
//Created a new audio object with the received audio url
audio = new Audio('URL of AUDIO-FILE');
}
};
function htmlIsReady() {
window.parent.postMessage("html_is_ready", "*");
}
//Funcion that plays the audio
function playAudio() {
audio.play.play();
}
</script>
// Button that executes the function that plays the audio everytime it's
clicked
<button class="button" onclick="playAudio()">Play</button>
</body>
You're using audio.play.play(), which should be: audio.play(). However, you also haven't provided any actual proper URL/relative filepath as the audio source and at the moment your audio variable is never actually assigned as a new Audio.
It's not too clear why you're using window.onmessage to obtain the url of the audio file but it won't work no matter what because event.data.data is simply wrong. To obtain the data of a posted message you have to use event.data.
Furthermore here you're assigning the data to a variable called audioLink
audioLink = event.data.data;
but don't actually use it for the instantiation of your HtmlAudioElement audio.
Lastly, audio.play.play(); is wrong. To start playback you simply use the play() method on the audio object - audio.play().
Here's a working example:
var audio;
var audioLink;
function playAudio() {
audio.play();
}
window.onmessage = function(event) {
if (event.data) {
audioLink = event.data;
audio = new Audio(audioLink);
}
};
window.postMessage("https://www.w3schools.com/tags/horse.ogg", "*");
<button class="button" onclick="playAudio()">Play</button>

safari ios audio- refusing to play with error

The javascript error is: Unhandled Promise Rejection: NotAllowedError: The request is not allowed by the user agent or the platform in the current context, possibly because the user denied permission.
My setup works across other browsers, desktop and mobile.
The way it works is:
have a flag first_audio_played = false;
add a touch event listener that plays some audio, and sets first_audio_played = true; (then removes the touch listener)
all subsequent audio checks if(first_audio_played) some_other_audio.play();
this way, only the first audio played requires direct user input. after that, all audio is free to be triggered by in-game events, timing, etc...
this appears to be the "rule" for audio across most browsers. is the iOS "rule" that every audio needs to be triggered by user input? or is there some other step I'm missing?
For my javascript game, sounds stopped working on iOS recently. They all have readyState=4, but only the sound I played on tap works, the others won't play. Maybe you could play all the sounds on the first tap. But the solution I found that works for now for me is to load all the sounds from ajax arraybuffers and use decodeAudioData(). Then once you've played 1 sound from user tap (on not the body), they all play whenever.
Here is my working code where the second way of doing it is on bottom. When I tap to play sound2, sound1 starts working also.
<html>
<body>
<div id=all style='font-size:160%;background:#DDD' onclick="log('clicked');playSound(myAudio)">
Sound1 should be playing every couple seconds.
<br />Tap here to play sound1.
</div>
<div id=debug style='font-size:120%;' onclick="playSound(myAudio2)">
Tap here to play the sound2.
</div>
<script>
var url = "http://curtastic.com/drum.wav"
var url2 = "http://curtastic.com/gold.wav"
var myAudio, myAudio2
if(0)
{
var playSound = function(sound)
{
log("playSound() readyState="+sound.readyState)
log("gold readyState="+myAudio2.readyState)
sound.play()
}
var loadSound = function(url, callback)
{
var audio = new Audio(url)
audio.addEventListener('canplaythrough', function()
{
log('canplaythrough');
if(callback)
callback()
}, false)
audio.load()
if(audio.readyState > 3)
{
log('audio.readyState > 3');
if(callback)
callback()
}
return audio
}
myAudio = loadSound(url, startInterval)
myAudio2 = loadSound(url2)
}
else
{
var playSound = function(sound)
{
log("playSound()")
var source = audioContext.createBufferSource()
if(source)
{
source.buffer = sound
if(!source.start)
source.start = source.noteOn
if(source.start)
{
var gain = audioContext.createGain()
source.connect(gain)
gain.connect(audioContext.destination)
source.start()
}
}
}
var loadSound = function(url, callback)
{
log("start loading sound "+url)
var ajax = new XMLHttpRequest()
ajax.open("GET", url, true)
ajax.responseType = "arraybuffer"
ajax.onload = function()
{
audioContext.decodeAudioData(
ajax.response,
function(buffer)
{
log("loaded sound "+url)
log(buffer)
callback(buffer)
},
function(error)
{
log(error)
}
)
}
ajax.send()
}
var AudioContext = window.AudioContext || window.webkitAudioContext
var audioContext = new AudioContext()
loadSound(url, function(r) {myAudio = r; startInterval()})
loadSound(url2, function(r) {myAudio2 = r})
}
function startInterval()
{
log("startInterval()")
setInterval(function()
{
playSound(myAudio)
}, 2000)
}
function log(m)
{
console.log(m)
debug.innerHTML += m+"<br />"
}
</script>
</body>
</html>
You can use either [WKWebViewConfiguration setMediaTypesRequiringUserActionForPlayback:WKAudiovisualMediaTypeNone] or [UIWebView setMediaPlaybackRequiresUserAction:NO] depending on your WebView class (or Swift equivalent).

Does the "preload" attribute of <audio> affect the window.onload event time?

I have assumed (possibly incorrectly?), due to the asynchronous nature of HTML, that the timeline of page loading is as follows:
... etc loading html into DOM
encounter <audio> tag
preload is specified as "auto"; trigger buffering
continue loading html into DOM... etc
fire window.onload event callbacks
asynchronously some time later: audio resource is found, begin buffering, or server error is returned; fire readystatechange event callbacks
Whereas what I'm hoping for is that the preload attribute with a value of "auto" will delay the window.onload event from firing or delay tag-to-DOM processing until the audio resource is found and has had begun buffering, or a server error is returned and loading is canceled.
I can't imagine withholding window.onload for an audio resource, but then I have seen page processing come to a halt for flash resource loading or tracking script loading in the past.
TLDR: What is the exact timeline of window.onload with regard to resource loading--specifically the audio tag?
window.onload event appears to be called before the media src is fully loaded. Using approaches described at How do you check if a HTML5 audio element is loaded? ; and including .webkitAudioDecodedByteCount
<!DOCTYPE html>
<html>
<head>
<script>
window.addEventListener("load", function() {
var media = document.querySelector("audio");
console.log("window onload event"
, media.webkitAudioDecodedByteCount
, media.readyState)
})
function myOnCanPlayFunction() {
console.log("Can play", event.target.webkitAudioDecodedByteCount
, event.target.seekable.start(0)
, event.target.seekable.end(0));
}
function myOnCanPlayThroughFunction() {
console.log("Can play through", event.target.webkitAudioDecodedByteCount
, event.target.seekable.start(0)
, event.target.seekable.end(0));
}
function myOnLoadedData() {
console.log("Loaded data", event.target.webkitAudioDecodedByteCount
, event.target.seekable.start(0)
, event.target.seekable.end(0));
}
</script>
</head>
<body>
<audio oncanplay="myOnCanPlayFunction()"
oncanplaythrough="myOnCanPlayThroughFunction()"
onloadeddata="myOnLoadedData()"
src="/path/to/audio/file"
preload autoplay buffered controls></audio>
</body>
</html>
plnkr version 1 http://plnkr.co/edit/zIIDDLZeVU7NHdfAtFka?p=preview
An alternative approach using XMLHttpRequest , onended event of AudioContext; Promise; recursion to request, play array of files in sequence. See AudioContext.decodeAudioData()
<!DOCTYPE html>
<html>
<head>
<link rel="stylesheet" href="style.css">
<script>
var sources = ["/path/to/audio/src/1"
, "/path/to/audio/src/2"];
var src = sources.slice(0); // copy original array
function getAudio(url) {
return new Promise(function(resolve, reject) {
var audioCtx = new(window.AudioContext || window.webkitAudioContext)();
var source = audioCtx.createBufferSource();
var request = new XMLHttpRequest();
request.open("GET", url, true);
request.responseType = "arraybuffer";
request.onload = function() {
var audioData = request.response;
audioCtx.decodeAudioData(audioData).then(function(decodedData) {
source.buffer = decodedData;
source.connect(audioCtx.destination);
console.log(source, decodedData);
// do stuff when current audio has ended
source.onended = function() {
console.log("onended:", url);
if (src.length)
resolve(src)
else resolve("complete")
}
source.start(0);
});
}
request.send();
})
}
var audio = (function tracks(s) {
return getAudio(s.shift())
.then(function(data) {
if (Array.isArray(data) && data.length) return tracks(data)
else return data
})
}(src));
// do stuff when all `src` have been requested, played, ended
audio.then(function(msg) {
console.log(msg)
})
</script>
</head>
<body>
</body>
</html>
plnkr version 2 http://plnkr.co/edit/zIIDDLZeVU7NHdfAtFka?p=preview

RecordRTC video upload AmazonS3 Timeout error

I am currently developing a component that allows you to make webcam videos and upload them directly to amazon s3. For that purpose I user RecordRTC library and Amazon S3 storage. I have discovered a strange issue, and I am not sure whether it has to do with RecordRTC blobs or with amazon configuration. When the file size is over 1MB, the Amazon server hangs and after 20 seconds returns a timeout error. Could anyone help me figure this out? Here is my code for the recorder component (p() is the same as console.log()):
navigator.record_function = ( navigator.getUserMedia ||
navigator.webkitGetUserMedia ||
navigator.mozGetUserMedia ||
navigator.msGetUserMedia);
if (navigator.record_function) {
navigator.record_function(videoObj, function (stream) {
video.src = window.URL.createObjectURL(stream);
video.play();
$("#stop_recording").click(function () {
stream.stop();
});
// init recorders
audio_recorder = RecordRTC(stream, {type: "audio", bufferSize: 16384});
video_recorder = RecordRTC(stream, videoOptions);
}, errBack);
}
$("#start_recording").click(function () {
// record the audio and video
video_recorder.startRecording();
audio_recorder.startRecording();
});
And for the uploader component:
// Populate the Post paramters.
fd.append('key', "users/" + Main.current_user.id + "/test.webm");
fd.append('AWSAccessKeyId', msg.access_key_id);
fd.append('acl', 'private');
fd.append('policy', msg.policy);
fd.append('signature', msg.signature);
fd.append('Content-Type', '$Content-Type');
fd.append('x-amz-server-side-encryption', 'AES256');
fd.append('success_action_status', '201');
fd.append('name', 'test.webm');
fd.append('Filename', 'test.webm');
fd.append("file", file);
xhr.open('POST', 'https://' + msg.bucket + '.s3.amazonaws.com', true);
xhr.upload.addEventListener('progress', function (e) {
p(e);
}, false);
xhr.onreadystatechange = function () {
p(xhr.readyState);
};
xhr.send(fd);
$("#stop_recording").click(function () {
// stop recorders
audio_recorder.stopRecording(function () {
var audio_blob = audio_recorder.getBlob();
p(audio_blob);
// VideoUploader.upload_user_audio(audio_blob);
}
);
video_recorder.stopRecording(function () {
var video_blob = video_recorder.getBlob();
p(video_blob);
VideoUploader.upload_user_video(video_blob);
});
});
The error message on timeout is:
Your socket connection to the server was not read from or written to within the timeout period. Idle connections will be closed.
I appreciate any help I can get, I am really lost here.
Thanks in advance.
I have managed to find a pretty... strange solution for this issue. It appears the problem comes from the way RecordRTC saves the blob object, which makes it impossible to upload in Firefox 35 on Mac. I could not find the code in RecordRTC that causes the problem, the Blob seems to be generated correctly, but the workaround that worked for me was encoding the Blob again into a new Blob via Filereader.
video_recorder.stopRecording(function () {
var video_blob = video_recorder.getBlob();
var arrayBuffer;
var fileReader = new FileReader();
fileReader.onload = function (ex) {
arrayBuffer = this.result;
video_blob = new Blob([arrayBuffer], {type: "video/webm"});
VideoUploader.upload_user_video(video_blob)
};
fileReader.readAsArrayBuffer(video_blob);
});
As for why that happens, I have no idea, but other projects that use the same technology are affected as well http://videobooth.herokuapp.com/ (the same exact problem in the same exact browser, upload hangs). Maybe this information could be useful for people working on RecordRTC, as I think this workaround could be made into a patch.
Solution is tested on Mac Firefox 35 and Mac Chrome 40.

Categories