I am trying to build a sound player based on Web Audio API. I have an issue with Safari that to me looks like a grave Safari bug, so I don't know if anything can be done about it.
Basically I need to stream an mp3 file into the Web Audio API graph. This goes via createMediaElementSource. The problem in Safari (9.1.1) is that everytime one calls play() on the underlying audio element, the initial audio buffer is corrupted, giving a brief "hiccup". Depending on the complexity of the actual Web Audio graph, these drop outs can last up to several hundred milliseconds. Firefox and Chrome don't have this problem.
The following illustrates the problem. Click on "Load" to set the audio element's src. Wait. Then Play, Stop, RTZ (return-to-zero) repeatedly -- you should hear an undistorted sine tone, but you actually hear that the initial buffer upon play is corrupted.
JSFiddle: http://jsfiddle.net/4zgf33os/6/
HTML:
<div id="load" class="button">Load</div>
<div id="play" class="button">Play</div>
<div id="stop" class="button">Stop</div>
<div id="rtz" class="button">RTZ</div>
CSS:
div.button {
position: relative;
display: inline;
width: 100px;
height: 24px;
margin: 4px;
padding: 4px;
background: black;
color: white;
cursor: pointer;
}
JavaScript:
window.AudioContext = window.AudioContext || window.webkitAudioContext;
var elem = document.createElement("audio");
elem.setAttribute("crossorigin", "anonymous");
var context = new window.AudioContext;
var node = context.createMediaElementSource(elem);
node.connect(context.destination);
elem.preload = "auto";
elem.addEventListener("error", function(e) {
alert("Error!");
});
var load = function() {
elem.src = "https://ia800209.us.archive.org/12/items/SineWaveStudiosSineWaveTestFile/sinetest.mp3";
};
var rtz = function() {
elem.pause();
elem.currentTime = 0.0;
};
var play = function() {
elem.play();
};
var stop = function() {
elem.pause();
};
$("#load").click(load);
$("#rtz" ).click(rtz );
$("#play").click(play);
$("#stop").click(stop);
Related
I have a simple setup where I have two buttons and a <video> element. I am using Shaka player to play an adaptive DASH file. I am also using Google's IMA Ads SDK to play ads along with the video.
The expected outcome is that when I press button 1, I should see the pre-roll 1 followed by video 1. When I press button 2, I should see the pre-roll 2 followed by video 2.
However, after clicking any button, the ad plays only once. If I toggle the buttons, the ads do not play thereafter. Am I missing anything here? Or do we need to somehow clear the ads request before making another request.
const adUrl1 = "https://pubads.g.doubleclick.net/gampad/ads?iu=/21775744923/external/vmap_ad_samples&sz=640x480&cust_params=sample_ar%3Dpreonly&ciu_szs=300x250%2C728x90&gdfp_req=1&ad_rule=1&output=vmap&unviewed_position_start=1&env=vp&impl=s&correlator=";
const adUrl2 = "https://pubads.g.doubleclick.net/gampad/ads?iu=/21775744923/external/vmap_ad_samples&sz=640x480&cust_params=sample_ar%3Dpremidpost&ciu_szs=300x250&gdfp_req=1&ad_rule=1&output=vmap&unviewed_position_start=1&env=vp&impl=s&cmsid=496&vid=short_onecue&correlator=";
const manifestUrl1 = "https://storage.googleapis.com/wvmedia/clear/h264/tears/tears.mpd";
const manifestUrl2 = "https://storage.googleapis.com/shaka-demo-assets/angel-one/dash.mpd";
let player, ui, video, controls, adManager;
async function initApp() {
shaka.polyfill.installAll();
if (shaka.Player.isBrowserSupported()) {
// When using the UI, the player is made automatically by the UI object.
video = document.getElementById('video');
ui = video['ui'];
controls = ui.getControls();
player = controls.getPlayer();
// Listen for error events.
player.addEventListener('error', onPlayerErrorEvent);
// controls.addEventListener('error', onUIErrorEvent);
adManager = player.getAdManager();
// Attach player and ui to the window to make it easy to access in the JS console.
window.player = player;
window.ui = ui;
window.video = video;
window.controls = controls;
window.adManager = adManager;
} else {
console.error("Browser not supported");
}
}
function initializeAdManager() {
const container = ui.getControls().getClientSideAdContainer();
adManager.initClientSide(container, video);
}
function onError(error) {
// Log the error.
console.error('Error code', error.code, 'object', error);
}
async function playAd1() {
console.log('playing Video with ads 1');
initializeAdManager();
fetchAd(adUrl1);
await playVideo(manifestUrl1);
}
async function playAd2() {
console.log('playing video with ads 2');
initializeAdManager();
fetchAd(adUrl2);
await playVideo(manifestUrl2);
}
async function playVideo(url) {
try {
await player.load(url);
} catch (e) {
onError(e);
}
}
function fetchAd(url) {
const adRequest = new google.ima.AdsRequest();
adRequest.adTagUrl = url;
adManager.requestClientSideAds(adRequest);
}
function onPlayerErrorEvent(errorEvent) {
// Extract the shaka.util.Error object from the event.
onPlayerError(errorEvent.detail);
}
function onPlayerError(error) {
// Handle player error
console.error('Error code', error.code, 'object', error);
}
function onUIErrorEvent(errorEvent) {
// Extract the shaka.util.Error object from the event.
onPlayerError(errorEvent.detail);
}
document.addEventListener('shaka-ui-loaded', initApp);
<html>
<head>
<meta content="text/html; charset=utf-8" http-equiv="Content-type"/>
<title id="sample_app_page_title">Ad Ping</title>
<!--for UI builds: -->
<script src="https://cdnjs.cloudflare.com/ajax/libs/shaka-player/4.3.0/shaka-player.ui.min.js"></script>
<link href="https://cdnjs.cloudflare.com/ajax/libs/shaka-player/4.3.0/controls.min.css" rel="stylesheet">
<!-- IMA HTML5 SDK (for serving Client Side ads): -->
<script src="https://imasdk.googleapis.com/js/sdkloader/ima3.js" type="text/javascript"></script>
</head>
<body>
<button onclick="playAd1()">Play Video with Ad1</button>
<button onclick="playAd2()">Play Video with Ad2</button>
<div>
<div data-shaka-player-cast-receiver-id="8D8C71A7" data-shaka-player-container>
<video data-shaka-player controls autoplay id="video" style="width:400px; height:200px" />
</div>
</div>
Figured the answer. Please check the following snippet to see this in action.
// Copyright 2013 Google Inc. All Rights Reserved.
// You may study, modify, and use this example for any purpose.
// Note that this example is provided "as is", WITHOUT WARRANTY
// of any kind either expressed or implied.
var adsManager;
var adsLoader;
var adDisplayContainer = document.getElementById('adContainer');
var intervalTimer;
var videoContent = document.getElementById('contentElement');
var testButton1 = document.getElementById('playButton');
var testButton2 = document.getElementById('testButton');
const adUrl1 = "https://pubads.g.doubleclick.net/gampad/ads?sz=640x480&iu=/124319096/external/single_ad_samples&ciu_szs=300x250&impl=s&gdfp_req=1&env=vp&output=vast&unviewed_position_start=1&cust_params=deployment%3Ddevsite%26sample_ct%3Dskippablelinear&correlator=";
var adUrl2 = "https://pubads.g.doubleclick.net/gampad/ads?iu=/21775744923/external/vmap_ad_samples&sz=640x480&cust_params=sample_ar%3Dpreonly&ciu_szs=300x250%2C728x90&gdfp_req=1&ad_rule=1&output=vmap&unviewed_position_start=1&env=vp&impl=s&correlator=";
const video1 = "https://storage.googleapis.com/gvabox/media/samples/stock.mp4";
const video2 = "https://commondatastorage.googleapis.com/gtv-videos-bucket/sample/BigBuckBunny.mp4"
function init() {
testButton1.addEventListener('click', function(e){
e.preventDefault();
videoContent.setAttribute('src', video1);
requestAds(adUrl1);
});
testButton2.addEventListener('click', function(e) {
e.preventDefault();
videoContent.setAttribute('src', video2);
requestAds(adUrl2);
});
}
function setUpIMA() {
// Create the ad display container.
createAdDisplayContainer();
videoContent.load();
adDisplayContainer.initialize();
// Create ads loader.
adsLoader = new google.ima.AdsLoader(adDisplayContainer);
// Listen and respond to ads loaded and error events.
adsLoader.addEventListener(
google.ima.AdsManagerLoadedEvent.Type.ADS_MANAGER_LOADED,
onAdsManagerLoaded,
false);
adsLoader.addEventListener(
google.ima.AdErrorEvent.Type.AD_ERROR,
onAdError,
false);
}
function createAdDisplayContainer() {
// We assume the adContainer is the DOM id of the element that will house
// the ads.
adDisplayContainer = new google.ima.AdDisplayContainer(
document.getElementById('adContainer'), videoContent);
}
function onAdsManagerLoaded(adsManagerLoadedEvent) {
// Get the ads manager.
var adsRenderingSettings = new google.ima.AdsRenderingSettings();
adsRenderingSettings.restoreCustomPlaybackStateOnAdBreakComplete = true;
///adsRenderingSettings.AUTO_SCALE;
//console.log(typeof(auto));
//console.log(AUTO_SCALE);
// videoContent should be set to the content video element.
adsManager = adsManagerLoadedEvent.getAdsManager(
videoContent, adsRenderingSettings);
// Add listeners to the required events.
adsManager.addEventListener(
google.ima.AdErrorEvent.Type.AD_ERROR,
onAdError);
adsManager.addEventListener(
google.ima.AdEvent.Type.CONTENT_PAUSE_REQUESTED,
onContentPauseRequested);
adsManager.addEventListener(
google.ima.AdEvent.Type.CONTENT_RESUME_REQUESTED,
onContentResumeRequested);
adsManager.addEventListener(
google.ima.AdEvent.Type.ALL_ADS_COMPLETED,
onAdEvent);
// Listen to any additional events, if necessary.
adsManager.addEventListener(
google.ima.AdEvent.Type.LOADED,
onAdEvent);
adsManager.addEventListener(
google.ima.AdEvent.Type.STARTED,
onAdEvent);
adsManager.addEventListener(
google.ima.AdEvent.Type.COMPLETE,
onAdEvent);
try {
// Initialize the ads manager. Ad rules playlist will start at this time.
adsManager.init(adsRenderingSettings.AUTO_SCALE, adsRenderingSettings.AUTO_SCALE, google.ima.ViewMode.NORMAL);
//console.log("size change");
adsManager.resize(640,360, google.ima.ViewMode.NORMAL);
// Call play to start showing the ad. Single video and overlay ads will
// start at this time; the call will be ignored for ad rules.
adsManager.start();
} catch (adError) {
// An error may be thrown if there was a problem with the VAST response.
videoContent.play();
}
}
function onAdEvent(adEvent) {
// Retrieve the ad from the event. Some events (e.g. ALL_ADS_COMPLETED)
// don't have ad object associated.
var ad = adEvent.getAd();
switch (adEvent.type) {
case google.ima.AdEvent.Type.LOADED:
// This is the first event sent for an ad - it is possible to
// determine whether the ad is a video ad or an overlay.
if (!ad.isLinear()) {
// Position AdDisplayContainer correctly for overlay.
// Use ad.width and ad.height.
videoContent.play();
}
break;
case google.ima.AdEvent.Type.STARTED:
if (ad.isLinear()) {
// For a linear ad, a timer can be started to poll for
// the remaining time.
intervalTimer = setInterval(
function() {
var remainingTime = adsManager.getRemainingTime();
},
300); // every 300ms
//console.log(intervalTimer);
//if(intervalTimer == )
}
break;
case google.ima.AdEvent.Type.COMPLETE:
// This event indicates the ad has finished - the video player
// can perform appropriate UI actions, such as removing the timer for
// remaining time detection.
if (ad.isLinear()) {
clearInterval(intervalTimer);
}
break;
}
}
function requestAds(url){
setUpIMA();
if (adsManager) {
//console.log(adsManager);
adsManager.destroy();
//adsManager = null;
//console.log(adsManager);
}
var adsRequest = new google.ima.AdsRequest();
adsRequest.adTagUrl = url;
// Specify the linear and nonlinear slot sizes. This helps the SDK to
// select the correct creative if multiple are returned.
adsRequest.linearAdSlotWidth = 640;
adsRequest.linearAdSlotHeight = 400;
adsRequest.nonLinearAdSlotWidth = 640;
adsRequest.nonLinearAdSlotHeight = 150;
//log("adsRequest: " + tag);
adsLoader.requestAds(adsRequest);
}
function onAdError(adErrorEvent) {
// Handle the error logging.
console.log(adErrorEvent.getError());
adsManager.destroy();
alert('error in VAST response');
}
function onContentPauseRequested() {
videoContent.pause();
// This function is where you should setup UI for showing ads (e.g.
// display ad timer countdown, disable seeking etc.)
// setupUIForAds();
}
function onContentResumeRequested() {
videoContent.play();
// This function is where you should ensure that your UI is ready
// to play content. It is the responsibility of the Publisher to
// implement this function when necessary.
// setupUIForContent();
}
// Wire UI element references and UI event listeners.
init();
#mainContainer {
position: relative;
width: 640px;
height: 360px;
}
#content, #adContainer {
position: absolute;
top: 0px;
left: 0px;
width: 640px;
height: 360px;
}
#contentElement {
width: 640px;
height: 360px;
overflow: hidden;
}
#playButton, #testButton{
margin-top:10px;
vertical-align: top;
width: 350px;
height: 60px;
padding: 0;
font-size: 22px;
color: white;
text-align: center;
text-shadow: 0 1px 2px rgba(0, 0, 0, 0.25);
background: #2c3e50;
border: 0;
border-bottom: 2px solid #22303f;
cursor: pointer;
-webkit-box-shadow: inset 0 -2px #22303f;
box-shadow: inset 0 -2px #22303f;
}
<script src="https://imasdk.googleapis.com/js/sdkloader/ima3.js"></script>
<div id="mainContainer">
<div id="content">
<video id="contentElement">
<!-- <source src="https://storage.googleapis.com/interactive-media-ads/media/android.mp4" /> -->
</video>
</div>
<div id="adContainer"></div>
</div>
<button id="playButton">Btn1 </button>
<button id="testButton">Btn 2</button>
I've created a basic MediaStream which gets a video & audio track in my react-app like this:
const getLocalStream: MediaStream = await navigator.mediaDevices.getUserMedia({
video: true,
audio: true,
});
setLocalStream(getLocalStream);
const handleShowCamClick = async () => {
if (!callContext.localStream) return;
callContext.localStream.getVideoTracks().forEach((track: MediaStreamTrack) => track.enabled = true);
callContext.setShowCam(true);
};
const handleHideCamClick = () => {
if (!callContext.localStream) return;
// callContext.localStream.getVideoTracks().forEach((track: MediaStreamTrack) => track.enabled = false);
callContext.localStream.getVideoTracks().forEach((track: MediaStreamTrack) => track.stop());
callContext.setShowCam(false);
};
So now I want the user to be able to disable its webcam. Setting track.enabled = false will result in the webcam still being used by the webapp but turning the video to black, which is not the behaviour I want.
Instead I want the webcam to no longer be used by the webapp.
I have a webcam with a blue light shining every time to show that the cam is recording. With track.enabled = false my webcam shows me that its still technically recording.
If I remove the video, track.stop() will result in the behaviour I want. The webcam is no longer used, but I how do I add the video track of the webcam back to the localStream?
track.stop() removes the track from localStream and frees the webcam from the MediaStream, but since the video track is not there anymore, how can I request a new video track of the webcam and attach it to localStream without re-initializing a MediaStream?
The following solution uses Vanilla Javascript because this is a problem that I'm sure a lot of folks will be interested in solving in the future.
Treat this as a proof of concept that can be adapted to ANY Javascript framework to support WebRTC tasks.
Final Note - This example only deals with the fetching, displaying, and merging of Media Streams. All other WebRTC stuff like sending the stream across via RTCPeerConnection, etc. has to be performed using the streams created here - and replacing/updating those is beyond the scope of this example.
Core Idea:
Fetch Stream via getUserMedia()
Assign Stream to HTMLMediaElement
Use getVideoTracks() to stop the video track only.
Use getUserMedia() again to fetch a new stream without audio.
Use MediaStream constructor to create a new stream using - the video from the new stream + audio from existing stream as follows -
new MediaStream([...newStream.getVideoTracks(), ...existingStream.getAudioTracks()]);
Use newly generated MediaStream as required (i.e. replace in RTCPeerConnection, etc.).
CodePen Demo
let localStream = null;
let mediaWrapperDiv = document.getElementById('mediaWrapper');
let videoFeedElem = document.createElement('video');
videoFeedElem.id = 'videoFeed';
videoFeedElem.width = 640;
videoFeedElem.height = 360;
videoFeedElem.autoplay = true;
videoFeedElem.setAttribute('playsinline', true);
mediaWrapperDiv.appendChild(videoFeedElem);
let fetchStreamBtn = document.getElementById('fetchStream');
let killEverythingBtn = document.getElementById('killSwitch');
let killOnlyVideoBtn = document.getElementById('killOnlyVideo');
let reattachVideoBtn = document.getElementById('reattachVideo');
async function fetchStreamFn() {
localStream = await navigator.mediaDevices.getUserMedia({
video: true,
audio: true
});
if (localStream) {
await attachToDOM(localStream);
}
}
async function killEverythingFn() {
localStream.getTracks().map(track => track.stop());
localStream = null;
}
async function killOnlyVideoFn() {
localStream.getVideoTracks().map(track => track.stop());
}
async function reAttachVideoFn() {
let existingStream = localStream;
let newStream = await navigator.mediaDevices.getUserMedia({
video: true,
audio: false
});
localStream = new MediaStream([...newStream.getVideoTracks(), ...existingStream.getAudioTracks()]);
if (localStream) {
await attachToDOM(localStream);
}
}
async function attachToDOM(stream) {
videoFeedElem.srcObject = new MediaStream(stream.getTracks());
}
fetchStreamBtn.addEventListener('click', fetchStreamFn);
killOnlyVideoBtn.addEventListener('click', killOnlyVideoFn);
reattachVideoBtn.addEventListener('click', reAttachVideoFn);
killEverythingBtn.addEventListener('click', killEverythingFn);
div#mediaWrapper {
margin: 0 auto;
text-align: center;
}
div#mediaWrapper video {
object-fit: cover;
}
div#mediaWrapper video#videoFeed {
border: 2px solid blue;
}
div#btnWrapper {
text-align: center;
margin-top: 10px;
}
button {
border-radius: 0.25rem;
color: #ffffff;
display: inline-block;
font-size: 1rem;
font-weight: 400;
line-height: 1.6;
padding: 0.375rem 0.75rem;
text-align: center;
cursor: pointer;
}
button.btn-blue {
background-color: #007bff;
border: 1px solid #007bff;
}
button.btn-red {
background-color: #dc3545;
border: 1px solid #dc3545;
}
button.btn-green {
background-color: #28a745;
border: 1px solid #28a745;
}
<h3>How to check if this actually works?
<h3>
<h4>Just keep speaking in an audibly loud volume, you'll hear your own audio being played from your device's speakers.<br> You should be able to hear yourself even after you "Kill Only Video" (i.e. Webcam light goes off)
</h4>
<div id="mediaWrapper"></div>
<div id="btnWrapper">
<button id="fetchStream" class="btn-blue" type="button" title="Fetch Stream (Allow Access)">Fetch Stream</button>
<button id="killOnlyVideo" class="btn-red" type="button" title="Kill Only Video">Kill Only Video</button>
<button id="reattachVideo" class="btn-green" type="button" title="Re-attach Video">Re-attach Video</button>
<button id="killSwitch" class="btn-red" type="button" title="Kill Everything">Kill Everything</button>
</div>
Getting my feet wet with WebRTC and running into a problem with the RTCPeerConnection.ontrack event not firing whenever a new MediaStreamTrack object has been created (by the RTCPeerConnection.addTrack() function).
Using the textbook WebRTC getUserMedia example, I grab a single stream from my laptop's camera which I set as the srcObject for one element (local) when the Start button is clicked. When the Call button is clicked, I use the addTrack method on the grabbed stream, which I hold in the global localStream variable. At this point the globally defined ontrack event handler should fire and give me the second video, right? No dice, though.
I was able to get this to work with addStream and onaddstream - but both have been removed from the latest WebRTC spec and are not supported by the most recent versions of Chrome and FireFox.
Pic and script attached - any guidance would be greatly appreciated!
Pic with Web Console output
'use strict';
var localStream;
var yourVideo = document.querySelector('#yours');
var theirVideo = document.querySelector('#theirs');
var callBtn = document.querySelector('#callBtn');
var startBtn = document.querySelector('#startBtn');
startBtn.onclick = hasUserMedia;
callBtn.onclick = call;
var cfg = null;
var pc1 = new RTCPeerConnection(cfg);
var pc2 = new RTCPeerConnection(cfg);
pc1.ontrack = function(e){
console.log("ontrack fired!");
theirVideo.srcObject = e.streams[0];
}
function hasUserMedia(){
console.log("entering hasUserMedia()...");
navigator.mediaDevices.getUserMedia({video: true, audio: false}).then(function(stream){
localStream = stream;
console.log("stream val: " + localStream);
yourVideo.srcObject = stream;
});
}
function call(){
console.log("stream val # call(): " + localStream);
localStream.getTracks().forEach(track => pc1.addTrack(track, localStream));
}
index.html
<html lang="en">
<head>
<meta charset="utf-8" />
<title>Learning WebRTC - Chapter 4: Creating a
RTCPeerConnection</title>
<style>
body {
background-color: #3D6DF2;
margin-top: 15px;
}
video {
background: black;
border: 1px solid gray;
}
#container {
position: relative;
display: block;
margin: 0 auto;
width: 500px;
height: 500px;
}
#yours {
width: 150px;
height: 150px;
position: absolute;
top: 15px;
right: 15px;
}
#theirs {
width: 500px;
height: 500px;
}
</style>
</head>
<body>
<div id="container">
<video id="yours" autoplay></video>
<video id="theirs" autoplay></video>
<button id="startBtn">Start</button>
<button id="callBtn">Call</button>
</div>
<script src="https://webrtc.github.io/adapter/adapter-latest.js"></script>
<script type="text/javascript" src="main.js"></script>
</body>
</html>
Both Firefox and Chrome still support onaddstream. Chrome does not yet support addTrack, you can still use addStream (despite Firefox complaining).
Your script is missing the exchange of SDP and ICE candidates for ontrack to fire as well.
Check https://webrtc.github.io/samples/src/content/peerconnection/pc1/ for a complete and working example.
addTrack on Chromium Issue Tracker
addTrack is blocked by RTCRtpSender and RTCRtpReceiver extensions to RTCPeerConnection
I got into the same problem while trying to implement a video chat, but I found a fix. The fix is simple; make sure to add the ontrack event, onicecandidate and the video stream object at the creation of the rtcpeerconnection. See the code below
var configuration = {
offerToReceiveAudio: true,
offerToReceiveVideo: true
}
var peerConn = new RTCPeerConnection(cofiguration);
peerConn.ontrack = gotStream;
peerConn.onicecandidate = gotIceCandidate;
peerConn.addStream(localStream);
//and do the rest your style... Hope this helps.
everyone.
I know how to copy an image from the HTML Image Element into Canvas using follow code snap:
ctx = canvas.getContext("2d");
ctx.drawImage(imgElement, 0, 0, 300, 300);
But what I wanna do is, to drawImage of any part of the page.
So the page might contain ImageElement, some Borders with various colors, Overlaid layers with different background image, some text with custom google fonts, etc.
So I just wanna copy any part of the page into the canvas, as if screen capture tool does.
Is this possible with javascript?
Or theoretically impossible yet? Any plan for browser producers to support this kind of function?
You could use html2canvas to take "screenshots" of your webpage or parts of it.
Here is an example:
var btn = document.getElementsByTagName('button')[0];
var h1 = document.getElementsByTagName('h1')[0];
var h1Rect = h1.getBoundingClientRect();
btn.addEventListener('click', function() {
html2canvas(document.body, {
width: h1Rect.width,
height: h1Rect.height + h1Rect.top,
onrendered: function(canvas) {
document.body.appendChild(canvas);
}
});
});
span {
color: red;
font-weight: normal;
font-style: italic;
}
canvas {
margin-top: 6px;
}
<script src="https://cdnjs.cloudflare.com/ajax/libs/html2canvas/0.4.1/html2canvas.min.js"></script>
<h1><span>Hello</span> HTML2Canvas</h1>
<button>Click to draw h1 to canvas</button>
Using iOS 9.3, link conditioning set to 3g speed, loading a video in the youtube iframe api in safari.
I would expect the iframe api to realize that it has been buffering a bunch and try to get a lower quality stream to keep the video playback smooth, like it does in the native youtube app.
Am I missing something obvious? I basically copied and pasted out of the youtube ios helper wrapper but it still tries to play in a quality that is too high for the connection speed.
<!DOCTYPE html>
<html>
<head>
<style>
body { margin: 0; width:100%; height:100%; background-color:#000000; }
html { width:100%; height:100%; background-color:#000000; }
.embed-container iframe,
.embed-container object,
.embed-container embed {
position: absolute;
top: 0;
left: 0;
width: 100% !important;
height: 100% !important;
}
</style>
</head>
<body>
<div class="embed-container">
<div id="player"></div>
</div>
<script src="https://www.youtube.com/iframe_api" onerror="window.location.href='ytplayer://onYouTubeIframeAPIFailedToLoad'"></script>
<script>
var player;
var error = false;
YT.ready(function() {
player = new YT.Player('player', {
"events" : {
"onPlaybackQualityChange" : "onPlaybackQualityChange",
"onReady" : "onReady",
"onError" : "onPlayerError",
"onStateChange" : "onStateChange"
},
"width" : "100%",
"height" : "100%",
"videoId" : 'NP7nK2zPirc',
"playerVars" : {
"showinfo" : 0,
"modestbranding" : 1,
"autohide" : 1,
"playsinline" : 1,
"controls" : 0
}
});
player.setSize(window.innerWidth, window.innerHeight);
window.location.href = 'ytplayer://onYouTubeIframeAPIReady';
// this will transmit playTime frequently while playng
function getCurrentTime() {
var state = player.getPlayerState();
if (state == YT.PlayerState.PLAYING) {
time = player.getCurrentTime()
// window.location.href = 'ytplayer://onPlayTime?data=' + time;
}
}
window.setInterval(getCurrentTime, 500);
});
function onReady(event) {
// window.location.href = 'ytplayer://onReady?data=' + event.data;
}
function onStateChange(event) {
if (!error) {
// window.location.href = 'ytplayer://onStateChange?data=' + event.data;
}
else {
error = false;
}
}
function onPlaybackQualityChange(event) {
// window.location.href = 'ytplayer://onPlaybackQualityChange?data=' + event.data;
}
function onPlayerError(event) {
if (event.data == 100) {
error = true;
}
// window.location.href = 'ytplayer://onError?data=' + event.data;
}
window.onresize = function() {
player.setSize(window.innerWidth, window.innerHeight);
}
</script>
</body>
</html>
Based from this documentation, if the playback quality changes, the onPlaybackQualityChange event will fire, and your code should respond to the event rather than the fact that it called the setPlaybackQuality function.
onPlaybackQualityChange event fires whenever the video playback quality changes. For example, if you call the setPlaybackQuality(suggestedQuality) function, this event will fire if the playback quality actually changes. Your application should respond to the event and should not assume that the quality will automatically change when the setPlaybackQuality(suggestedQuality) function is called. Similarly, your code should not assume that playback quality will only change as a result of an explicit call to setPlaybackQuality or any other function that allows you to set a suggested playback quality.
Then I recommend calling the getAvailableQualityLevels() function to determine which quality levels are available for a video. For example, if your page displays a 1280px by 720px video player, a hd720 quality video will actually look better than an hd1080 quality video.
Check these related SO threads:
YouTube iFrame API "setPlaybackQuality" or "suggestedQuality" not working
If you use event 3 (buffering) instead of event 5 (playing) there's no
stutter for the user. Quality is changed as soon as it starts loading.
Only weird thing is you need to set it in onPlayerReady as well or it
doesn't work.
function onPlayerReady(event) {
event.target.setPlaybackQuality('hd720');
}
function onPlayerStateChange(event) {
if (event.data == YT.PlayerState.BUFFERING) {
event.target.setPlaybackQuality('hd720');
}
}
Set jw player youtube video playback quality
Hope this helps!