I have a use case where we donot want the user who is taking a test get sleep timeout while in test which is of 3 hrs. I was looking at nosleep.js but i has cpu overuse problem as it was not working in windows.
I was wondering how test taking apps do it, also how youtube achieves this.
Any help on this is really appreciated
Wake Lock API arrives in Chrome 79. However none of the other browser support this as of now.
https://developers.google.com/web/updates/2019/12/nic79#wake-lock
YouTube handles this by simply playing video. When you play a video with sound in the browser, the browser requests a wake lock automatically so as not to interrupt the user.
It isn't clear from your question what your specific requirements are, but if playing a video is possible, try that.
So after dealing with the pain of this problem for nice afternoon. I'm posting my solution. I'm using modules so delete export statements if you're not. My goal was to minimize effect on battery. So I play empty video file (10 x 10px, 1 frame lenght with empty audio track) every 10 seconds. I play it for the first time when user first clicks so the browser will allow me to play it. There are still side effects though. I.e. in Safari, the audio icon will blink and on iPhone lockscreen, the page will be shown as the last one playing video/audio.
let noSleep = null
let video = null
// --- Public ---
export function activate() {
if (!noSleep) noSleep = new NoSleep()
}
export function deactivate() {
if (noSleep) {
noSleep.stop()
noSleep = null
}
}
// --- Private ---
class NoSleep {
constructor() {
this._noSleep = true
this._keepAwake()
}
stop() {
this._noSleep = false
}
async _keepAwake() {
while(this._noSleep) {
if (video) video.play()
document.body.innerHTML += "<br>Played"
await wait(10000)
}
}
}
window.addEventListener("click", function enable() {
window.removeEventListener("click", enable)
// Initialize video element
video = document.createElement("video")
video.setAttribute("playsinline", "")
// Add mp4 source
let source = document.createElement("source")
source.src = mp4Src
source.type = "video/mp4"
video.append(source)
// Add webm source
source = document.createElement("source")
source.src = webmSrc
source.type = "video/webm"
video.append(source)
// Play it as a result of user interaction
video.play()
document.body.innerHTML = "Activated"
activate()
})
function wait(milliseconds) {
return new Promise(resolve => setTimeout(resolve, milliseconds))
}
const mp4Src = "data:video/mp4;base64,AAAAHGZ0eXBpc29tAAACAGlzb21pc28ybXA0MQAAAyBtb292AAAAbG12aGQAAAAAAAAAAAAAAAAAAAPoAAAAGwABAAABAAAAAAAAAAAAAAAAAQAAAAAAAAAAAAAAAAAAAAEAAAAAAAAAAAAAAAAAAEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACAAACSnRyYWsAAABcdGtoZAAAAAMAAAAAAAAAAAAAAAEAAAAAAAAAGwAAAAAAAAAAAAAAAQEAAAAAAQAAAAAAAAAAAAAAAAAAAAEAAAAAAAAAAAAAAAAAAEAAAAAAAAAAAAAAAAAAACRlZHRzAAAAHGVsc3QAAAAAAAAAAQAAABoAAAgAAAEAAAAAAcJtZGlhAAAAIG1kaGQAAAAAAAAAAAAAAAAAAKxEAAAEgFXEAAAAAAAxaGRscgAAAAAAAAAAc291bgAAAAAAAAAAAAAAAENvcmUgTWVkaWEgQXVkaW8AAAABaW1pbmYAAAAQc21oZAAAAAAAAAAAAAAAJGRpbmYAAAAcZHJlZgAAAAAAAAABAAAADHVybCAAAAABAAABLXN0YmwAAAB7c3RzZAAAAAAAAAABAAAAa21wNGEAAAAAAAAAAQAAAAAAAAAAAAIAEAAAAACsRAAAAAAAM2VzZHMAAAAAA4CAgCIAAQAEgICAFEAVAAAAAAJ/9wACf/cFgICAAhIQBoCAgAECAAAAFGJ0cnQAAAAAAAJ/9wACf/cAAAAgc3R0cwAAAAAAAAACAAAAAwAABAAAAAABAAAAgAAAABxzdHNjAAAAAAAAAAEAAAABAAAABAAAAAEAAAAkc3RzegAAAAAAAAAAAAAABAAAAXMAAAF0AAABcwAAAXQAAAAUc3RjbwAAAAAAAAABAAADTAAAABpzZ3BkAQAAAHJvbGwAAAACAAAAAf//AAAAHHNiZ3AAAAAAcm9sbAAAAAEAAAAEAAAAAQAAAGJ1ZHRhAAAAWm1ldGEAAAAAAAAAIWhkbHIAAAAAAAAAAG1kaXJhcHBsAAAAAAAAAAAAAAAALWlsc3QAAAAlqXRvbwAAAB1kYXRhAAAAAQAAAABMYXZmNTguNzYuMTAwAAAACGZyZWUAAAXWbWRhdCERRQAUUAFG//EKWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaXemCFLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS8IRFFABRQAUb/8QpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpd6aIUtLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS8IRFFABRQAUb/8QpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpd6YIUtLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLwhEUUAFFABRv/xClpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWl3pohS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLw="
const webmSrc = "data:video/webm;base64,GkXfo59ChoEBQveBAULygQRC84EIQoKEd2VibUKHgQRChYECGFOAZwEAAAAAAANXEU2bdLpNu4tTq4QVSalmU6yBoU27i1OrhBZUrmtTrIHYTbuMU6uEElTDZ1OsggE/TbuMU6uEHFO7a1OsggNB7AEAAAAAAABZAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAVSalmsirXsYMPQkBNgI1MYXZmNTguNzYuMTAwV0GNTGF2ZjU4Ljc2LjEwMESJiEBBAAAAAAAAFlSua+KuAQAAAAAAAFnXgQFzxYjHh4Jmxpm3C5yBACK1nIN1bmSGhkFfT1BVU1aqg2MuoFa7hATEtACDgQLhkZ+BArWIQOdwAAAAAABiZIEYY6KTT3B1c0hlYWQBAjgBgLsAAAAAABJUw2dB03NzAQAAAAAAAQ5jwIBnyAEAAAAAAAAVRaOLTUFKT1JfQlJBTkREh4RxdCAgZ8gBAAAAAAAAFEWjjU1JTk9SX1ZFUlNJT05Eh4EwZ8gBAAAAAAAAG0WjkUNPTVBBVElCTEVfQlJBTkRTRIeEcXQgIGfIAQAAAAAAABlFo4hUSU1FQ09ERUSHizAwOjAwOjAwOjAwZ8gBAAAAAAAAKkWjn0NPTS5BUFBMRS5RVUlDS1RJTUUuRElTUExBWU5BTUVEh4VlbXB0eWfIAQAAAAAAACRFo5lDT00uQVBQTEUuUVVJQ0tUSU1FLlRJVExFRIeFZW1wdHlnyAEAAAAAAAAaRaOHRU5DT0RFUkSHjUxhdmY1OC43Ni4xMDBzcwEAAAAAAACxY8CLY8WIx4eCZsaZtwtnyAEAAAAAAAAiRaOMSEFORExFUl9OQU1FRIeQQ29yZSBNZWRpYSBBdWRpb2fIAQAAAAAAABtFo4lWRU5ET1JfSUREh4xbMF1bMF1bMF1bMF1nyAEAAAAAAAAjRaOHRU5DT0RFUkSHlkxhdmM1OC4xMzQuMTAwIGxpYm9wdXNnyKJFo4hEVVJBVElPTkSHlDAwOjAwOjAwLjAzNDAwMDAwMAAAH0O2daTngQCjh4EAAID8//6gAQAAAAAAAA+hh4EAFQD8//51ooNwiJgcU7trkbuPs4EAt4r3gQHxggMY8IED"
Implementation is made so multiple calls to activate()/deactivate() have no effect (which was beneficial in my use case). I create NoSleep object to make sure that it's gonna deactivate (operation may take up to 10 seconds to deactivate). Without it quick successive activations and deactivations could result in multiple wake cycles to run at the same time without quiting themselves eventually.
Play looped VIDEO or AUDIO on your page
You can use this a quick example to add a Fake looped video to your page and prevent mobile device from sleep:
// Create the root video element
var video = document.createElement('video');
video.setAttribute('loop', '');
// Add some styles if needed
video.setAttribute('style', 'position: fixed;');
// A helper to add sources to video
function addSourceToVideo(element, type, dataURI) {
var source = document.createElement('source');
source.src = dataURI;
source.type = 'video/' + type;
element.appendChild(source);
}
// A helper to concat base64
var base64 = function(mimeType, base64) {
return 'data:' + mimeType + ';base64,' + base64;
};
// Add Fake sourced
addSourceToVideo(video,'webm', base64('video/webm', 'GkXfo0AgQoaBAUL3gQFC8oEEQvOBCEKCQAR3ZWJtQoeBAkKFgQIYU4BnQI0VSalmQCgq17FAAw9CQE2AQAZ3aGFtbXlXQUAGd2hhbW15RIlACECPQAAAAAAAFlSua0AxrkAu14EBY8WBAZyBACK1nEADdW5khkAFVl9WUDglhohAA1ZQOIOBAeBABrCBCLqBCB9DtnVAIueBAKNAHIEAAIAwAQCdASoIAAgAAUAmJaQAA3AA/vz0AAA='));
addSourceToVideo(video, 'mp4', base64('video/mp4', 'AAAAHGZ0eXBpc29tAAACAGlzb21pc28ybXA0MQAAAAhmcmVlAAAAG21kYXQAAAGzABAHAAABthADAowdbb9/AAAC6W1vb3YAAABsbXZoZAAAAAB8JbCAfCWwgAAAA+gAAAAAAAEAAAEAAAAAAAAAAAAAAAABAAAAAAAAAAAAAAAAAAAAAQAAAAAAAAAAAAAAAAAAQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAIAAAIVdHJhawAAAFx0a2hkAAAAD3wlsIB8JbCAAAAAAQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABAAAAAAAAAAAAAAAAAAAAAQAAAAAAAAAAAAAAAAAAQAAAAAAIAAAACAAAAAABsW1kaWEAAAAgbWRoZAAAAAB8JbCAfCWwgAAAA+gAAAAAVcQAAAAAAC1oZGxyAAAAAAAAAAB2aWRlAAAAAAAAAAAAAAAAVmlkZW9IYW5kbGVyAAAAAVxtaW5mAAAAFHZtaGQAAAABAAAAAAAAAAAAAAAkZGluZgAAABxkcmVmAAAAAAAAAAEAAAAMdXJsIAAAAAEAAAEcc3RibAAAALhzdHNkAAAAAAAAAAEAAACobXA0dgAAAAAAAAABAAAAAAAAAAAAAAAAAAAAAAAIAAgASAAAAEgAAAAAAAAAAQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABj//wAAAFJlc2RzAAAAAANEAAEABDwgEQAAAAADDUAAAAAABS0AAAGwAQAAAbWJEwAAAQAAAAEgAMSNiB9FAEQBFGMAAAGyTGF2YzUyLjg3LjQGAQIAAAAYc3R0cwAAAAAAAAABAAAAAQAAAAAAAAAcc3RzYwAAAAAAAAABAAAAAQAAAAEAAAABAAAAFHN0c3oAAAAAAAAAEwAAAAEAAAAUc3RjbwAAAAAAAAABAAAALAAAAGB1ZHRhAAAAWG1ldGEAAAAAAAAAIWhkbHIAAAAAAAAAAG1kaXJhcHBsAAAAAAAAAAAAAAAAK2lsc3QAAAAjqXRvbwAAABtkYXRhAAAAAQAAAABMYXZmNTIuNzguMw=='));
// Append the video to where ever you need
document.body.appendChild(video);
// Start playing video after any user interaction.
// NOTE: Running video.play() handler without a user action may be blocked by browser.
var playFn = function() {
video.play();
document.body.removeEventListener('touchend', playFn);
};
document.body.addEventListener('touchend', playFn);
Related
I'm making an iOS app to wrap my javascript game. On mobile safari it works fine because after I play a sound in ontouchstart, then I'm allowed to play any audio whenever I want, and I can set their volume too.
Problem 1: In a WKWebView I can only play the specific sounds that I played in ontouchstart, not the rest. So I'm playing every single audio in my game on the first tap. It sounds really bad. Otherwise in the javascript on audio.play() I get
Unhandled Promise Rejection: NotAllowedError: The request is not allowed by the user agent or the platform in the current context, possibly because the user denied permission.
Problem 2: I also can't lower the volume of sounds in a WKWebView. If I set myAudio.volume=.5 it's instantly 1 again. Which means the user has to actually hear them, in order for them to get to readyState=4
Any good solution or hack? Right now I'm playing every single sound on the first tap, and everything is full volume.
<html>
<body style='bakcground:#DDD;height:333px'>
<div id=debug style='font-size:20px' onclick="playSound(myAudio)">
Debug<br />
Tap here to play the sound.<br />
</div>
<script>
var context = new webkitAudioContext()
var myAudio = new Audio("sound/upgrade.wav")
myAudio.addEventListener('canplaythrough', function(){log('canplaythrough1')}, false)
var myAudio2 = new Audio("sound/dragon.wav")
myAudio2.addEventListener('canplaythrough', function(){log('canplaythrough2')}, false)
function playSound(sound)
{
log("playSound() readyState="+sound.readyState)
try{
sound.play()
context.resume().then(() => {
log("Context resumed")
sound.play()
})
}
catch(e)
{
log(e)
}
}
function log(m)
{
console.log(m)
debug.innerHTML += m+"<br />"
}
window.ontouchstart = function()
{
log("ontouchstart()")
playSound(myAudio)
setTimeout(function() {
playSound(myAudio2, 1111)
})
}
</script>
</body>
</html>
And ViewController.swift
import UIKit
import WebKit
class ViewController: UIViewController {
private var myWebView3 : WKWebView!
override func viewDidLoad() {
super.viewDidLoad()
self.myWebView3 = WKWebView(frame: .zero)
self.myWebView3.configuration.preferences.setValue(true, forKey: "allowFileAccessFromFileURLs")
self.myWebView3.configuration.mediaTypesRequiringUserActionForPlayback = []
self.myWebView3.configuration.allowsInlineMediaPlayback = true
let url = Bundle.main.url(forResource: "index6", withExtension: "html", subdirectory: "/")!
self.myWebView3.loadFileURL(url, allowingReadAccessTo: url)
let request = URLRequest(url: url)
self.myWebView3.load(request)
self.view.addSubview(self.myWebView3)
}
override func viewDidLayoutSubviews() {
self.myWebView3.frame = self.view.bounds
}
}
I'm using Xcode 10.1
iPhone SE 12.1.1
I have also tried on an iPad with iOS 10 and get the same error.
I have also tried context.decodeAudioData() / context.createBufferSource() and get the same error.
Re: problem 1, you need to create the configuration object before you set the web view's configuration. The line
self.myWebView3.configuration.mediaTypesRequiringUserActionForPlayback = []
will fail silently. Just create a new WKWebViewConfiguration object, set its properties, and then create the web view:
webView = WKWebView(frame: .zero, configuration: webConfiguration)
In the WKWebView's delegate, after webview loading finished, execute javascript that addEventListener to audio and video tag with event play, pause, ended. Then check the playback state for purpose.
I am trying to preload audio files via JavaScript and the HTML Audio API.
Safari on iOS (aka safari mobile) only allows .load() on "user interaction". Now I am wondering what that means exactly.
For example: I trigger the code described in the top answer of
Preload multiple audio files :
var audioFiles = [
"http://www.teanglann.ie/CanC/nua.mp3",
"http://www.teanglann.ie/CanC/ag.mp3",
"http://www.teanglann.ie/CanC/dul.mp3",
"http://www.teanglann.ie/CanC/freisin.mp3"
];
function preloadAudio(url) {
var audio = new Audio();
// once this file loads, it will call loadedAudio()
// the file will be kept by the browser as cache
audio.addEventListener('canplaythrough', loadedAudio, false);
audio.src = url;
}
var loaded = 0;
function loadedAudio() {
// this will be called every time an audio file is loaded
// we keep track of the loaded files vs the requested files
loaded++;
if (loaded == audioFiles.length){
// all have loaded
init();
}
}
var player = document.getElementById('player');
function play(index) {
player.src = audioFiles[index];
player.play();
}
function init() {
// do your stuff here, audio has been loaded
// for example, play all files one after the other
var i = 0;
// once the player ends, play the next one
player.onended = function() {
i++;
if (i >= audioFiles.length) {
// end
return;
}
play(i);
};
// play the first file
play(i);
}
// we start preloading all the audio files
for (var i in audioFiles) {
preloadAudio(audioFiles[i]);
}
<audio id="player"></audio>
on a button click but still have issues with files not being cached...
Does the for loop break the user interaction? How many steps/function calls can I do in the background until a load() is not considered an "user interaction" anymore?
I am a total JavaScript amateur. I want to create a section where people can switch between 2 audios with a video running along with it. I want to show a before and after version of the audio.
Basically, the video should run with the "before" version of the audio initially. Then there should be a button to switch the audio to the "after" version, and go back and forth.
I tried the "audioTracks" method, but the browser compatibility is not good with it. What is the best possible way to achieve it?
Thanks in advance.
You just use two HTML Audio elements and then sync their times:
var audio1 = new Audio()
var audio2 = new Audio()
audio1.src = 'pathToSource',
audio2.src = 'pathToSource2',
var video = document.getElementByID('myvideo');
video.muted = true; //mute video cuz we have audio already
function switchAudio(one, two, vid) {
if(one.paused) {
two.pause();
one.currentTime = vid.currentTime;
one.play
} else {
one.pause();
two.currentTime = vid.currentTime;
two.play()
}
}
That did it :) I made some changes though. The audio seems to be working great. I could play and switch it. But the video was not starting up with the button. So I changed the code to this:
function switchAudio(one, two, vid) {
if (one.paused) {
two.pause();
vid.play();
one.play();
one.currentTime = vid.currentTime;
} else {
vid.pause();
one.pause();
vid.play();
two.play();
two.currentTime = vid.currentTime;
}
}
After this the video is playing along with the audio. But when I switch the audio, their timing does not match. The audio seems to be playing just a little bit late.
I am making a web app that can be open for a long time. I don't want to load audio at load time (when the HTML gets downloaded and parsed) to make the first load as fast as possible and to spare precious resources for mobile users. Audio is disabled by default.
Putting the audio in CSS or using preload is not appropriate here because I don't want to load it at load time with the rest.
I am searching for the ideal method to load audio at run time, (after a checkbox has been checked, this can be after 20 minutes after opening the app) given a list of audio elements.
The list is already in a variable allSounds. I have the following audio in a webpage (there are more):
<audio preload="none">
<source src="sound1.mp3">
</audio>
I want to keep the same HTML because after second visit I can easily change it to (this works fine with my server-side HTML generation)
<audio preload="auto">
<source src="sound1.mp3">
</audio>
and it works.
Once the option is turned on, I want to load the sounds, but not play them immediately. I know that .play() loads the sounds. But I want to avoid the delay between pressing a button and the associated feedback sound.
It is better to not play sound than delayed (in my app).
I made this event handler to load sounds (it works) but in the chrome console, it says that download was cancelled, and then restarted I don't know exactly what I am doing wrong.
Is this is the correct way to force load sounds? What are the other ways? If possible without changing the HTML.
let loadSounds = function () {
allSounds.forEach(function (sound) {
sound.preload = "auto";
sound.load();
});
loadSounds = function () {}; // Execute once
};
here is playSound function, but not very important for the questions
const playSound = function (sound) {
// PS
/* only plays ready sound to avoid delays between fetching*/
if (!soundEnabled)) {
return;
}
if (sound.readyState < sound.HAVE_ENOUGH_DATA) {
return;
}
sound.play();
};
Side question: Should there be a preload="full" in the HTML spec?
See also:
Preload mp3 file in queue to avoid any delay in playing the next file in queue
how we can Play Audio with text highlight word by word in angularjs
To cache the audio will need to Base64 encode your MP3 files, and start the Base64 encoded MP3 file with data:audio/mpeg;base64,
Then you can pre-load/cache the file with css using something like:
body::after {
content:url(myfile.mp3);
display:none;
}
I think I would just use the preloading functionality without involving audio tag at all...
Example:
var link = document.createElement('link')
link.rel = 'preload'
link.href = 'sound1.mp3'
link.as = 'audio'
link.onload = function() {
// Done loading the mp3
}
document.head.appendChild(link)
I'm quite sure that I've found a solution for you. As far as I'm concerned, your sounds are additional functionality, and are not required for everybody. In that case I would propose to load the sounds using pure javascript, after user has clicked unmute button.
A simple sketch of solution is:
window.addEventListener('load', function(event) {
var audioloaded = false;
var audioobject = null;
// Load audio on first click
document.getElementById('unmute').addEventListener('click', function(event) {
if (!audioloaded) { // Load audio on first click
audioobject = document.createElement("audio");
audioobject.preload = "auto"; // Load now!!!
var source = document.createElement("source");
source.src = "sound1.mp3"; // Append src
audioobject.appendChild(source);
audioobject.load(); // Just for sure, old browsers fallback
audioloaded = true; // Globally remember that audio is loaded
}
// Other mute / unmute stuff here that you already got... ;)
});
// Play sound on click
document.getElementById('playsound').addEventListener('click', function(event) {
audioobject.play();
});
});
Of course, button should have id="unmute", and for simplicity, body id="body" and play sound button id="playsound. You can modify that of course to suit your needs. After that, when someone will click unmute, audio object will be generated and dynamically loaded.
I didn't try this solution so there may be some little mistakes (I hope not!). But I hope this will get you an idea (sketch) how this can be acomplished using pure javascript.
Don't be afraid that this is pure javascript, without html. This is additional functionality, and javascript is the best way to implement it.
You can use a Blob URL representation of the file
let urls = [
"https://upload.wikimedia.org/wikipedia/commons/b/be/Hidden_Tribe_-_Didgeridoo_1_Live.ogg"
, "https://upload.wikimedia.org/wikipedia/commons/6/6e/Micronesia_National_Anthem.ogg"
];
let audioNodes, mediaBlobs, blobUrls;
const request = url => fetch(url).then(response => response.blob())
.catch(err => {throw err});
const handleResponse = response => {
mediaBlobs = response;
blobUrls = mediaBlobs.map(blob => URL.createObjectURL(blob));
audioNodes = blobUrls.map(blobURL => new Audio(blobURL));
}
const handleMediaSelection = () => {
const select = document.createElement("select");
document.body.appendChild(select);
const label = new Option("Select audio to play");
select.appendChild(label);
select.onchange = () => {
audioNodes.forEach(audio => {
audio.pause();
audio.currentTime = 0;
});
audioNodes[select.value].play();
}
select.onclick = () => {
const media = audioNodes.find(audio => audio.currentTime > 0);
if (media) {
media.pause();
media.currentTime = 0;
select.selectedIndex = 0;
}
}
mediaBlobs.forEach((blob, index) => {
let option = new Option(
new URL(urls[index]).pathname.split("/").pop()
, index
);
option.onclick = () => console.log()
select.appendChild(option);
})
}
const handleError = err => {
console.error(err);
}
Promise.all(urls.map(request))
.then(handleResponse)
.then(handleMediaSelection)
.catch(handleError);
So i'm currently building an app using the multiplatform framework of nativescript. I've got the two plugins in question to work by themselves, but when both of them runs at the same time, and i wish to pause both tracks (video and audio with separate track files), only one of them stops, while the other continues. Why is that?
To start off, my nativescript audioplayer code is:
var videoViewModel = new Observable();
videoViewModel.audioPlayer = new TNSPlayer();
let audioParams = {
audioFile: selectedVideoEntity.audioPath_,
loop: true,
completeCallback: function(){},
errorCallback: function(error){console.log("failed with: ", error)}};
let audioPlayerCreatedCallback = () => {
videoViewModel.audioPlayer.getAudioTrackDuration().then((duration) => {
// iOS: duration is in seconds
// Android: duration is in milliseconds
console.log(`song duration:`, duration);
});
}
//todo: change to something more meaningfull (new RegExp('^' + "https\:\/\/|http\:\/\/", 'i')).test(selectedVideoEntity.audioPath)
if(selectedVideoEntity.audioPath.startsWith("http")){
console.log("getting from url");
videoViewModel.audioPlayer.playFromUrl(audioParams).then(audioPlayerCreatedCallback);
}
else{
videoViewModel.audioPlayer.playFromFile(audioParams).then(audioPlayerCreatedCallback);
}
When this is setup, I move on to making the videoplayer. This is implemented in xml.
<Page xmlns="http://schemas.nativescript.org/tns.xsd" xmlns:VideoPlayer="nativescript-videoplayer"
loaded="loaded" class="page" navigatingFrom="onNavigatingFrom" actionBarHidden="true">
<StackLayout id="layout">
<VideoPlayer:Video id="videoPlayer" controls="true" loop="false"
finished="{{ videoFinished }} "
src="{{ videoPlayerViewModel.videoPath }}" observeCurrentTime="true" muted="true" />
<VideoPlayer:Video id="rewardVideoPlayer" controls="true" loop="false"
finished="{{ rewardVideoFinished }} "/>
</StackLayout>
</Page>
And then I grab the object from the associated page file
let videoPlayer = page.getViewById("videoPlayer");
Expect the video to have it's source and all set at this point.
Now is where the issues begin. The thing i want is to pause the video and the audio at the same time, so my approach is to pause the audio, whenever i call the cycleVideoPlayers pause function and afterwords, like this:
videoViewModel.audioPlayer.pause();
videoViewModel.videoPlayer.pause();
At this point only one of them stops (mostly the audio) and the video continues on. Any ideas why?
Links to the plugins:
https://github.com/bradmartin/nativescript-audio
https://github.com/bradmartin/nativescript-videoplayer
TO CLARIFY THE SITUATION
This only pauses one of the tracks and resumes the paused track, without having any effect
videoViewModel.audioPlayer.pause();
videoViewModel.videoPlayer.pause();
// Wait a few seconds
videoViewModel.audioPlayer.resume();
videoViewModel.videoPlayer.play();
This pauses the video and resumes flawlessly
videoViewModel.videoPlayer.pause();
// Wait a few seconds
videoViewModel.videoPlayer.play();
Same goes for audio
videoViewModel.audioPlayer.pause();
// Wait a few seconds
videoViewModel.audioPlayer.resume();
The explicit code that tries to pause and resume is given here. This code is in an exports.loaded function, in one of my nativescript page .js files.
cycleVideoPlayer.on(gestures.GestureTypes.swipe, function(eventdata) {
console.log("Swipe detected, with direction: " + eventdata.direction);
if (!paused) {
(function() {
vm.audioPlayer.pause();
vm.cycleVideoPlayerViewModel.pause(); //This doesnt work alongside audiopause.. why?
paused = true;
console.log("Paused");
});
} else {
(function() {
vm.audioPlayer.resume();
vm.cycleVideoPlayerViewModel.play();
paused = false;
console.log("Resumed");
});
}
});
If anybody have an idea or need clarification on the matter, please comment :)