I have run into a rather strange situation while trying to build a HTML5 Video player using the new Adobe Flash HTML5 Canvas projects.
I have added the video to the canvas in flash, however the video only plays if there is an uncaught reference error. If there is no error the video will not play in the canvas.
I am wondering if anyone has any insight into why this may be happening or if you have a better way to add video to this flash Canvas project.
I have included all of my code below.
this.stop();
var mainStage = this;
var movieWidth = 640;//Choose Scaled Width of Video
var movieHeight = 360;//Choose Scaled Height of Video
var autoplayVideo = true;//Make it Autoplay
var vidya = document.createElement('video');//Creates the Video Element that is referenced later
var canvasEle = document.getElementById('canvas');//Identify the Canvas element
ctx = canvasEle.getContext('2d');//get canvas's context
canvasEle.parentNode.insertBefore(vidya, canvasEle);//insert video element before canvas element
vidya.setAttribute("src", "testing.mp4");//Place URL of the Video here (Local in this Example)
vidya.setAttribute("type","video/mp4");//What type of Video it is, Should be MP4
vidya.setAttribute("width", movieWidth);//scales the video to the width you had set above
vidya.setAttribute("controls","");//Turns on the default generic video controls
vidya.setAttribute("id","VIDEO");//gives the element an id for reference(Not Used yet)
if (autoplayVideo == true){ vidya.setAttribute("autoplay","");};
createjs.Ticker.addEventListener("tick", handleTick);
function handleTick(event){
ctx.drawImage(vidya, 30, 70, movieWidth, movieHeight);
console.log(ctx + " "+v);//here is where the uncaught reference is the "v"
}
Related
In my app, I wrote a code which allows a user to take a picture using their camera which is connected to the computer.
Implementation:
I have used a webcam, which show a video element and the source is the camera:
function onSuccess (stream) {
this.video.srcObject = stream;
}
navigator.getUserMedia({video: true}, onSuccess, onFailure)
When the user clicks on a button I create a snapshot from the video:
function takeSnapshot (width, height, savePicture) {
const canvas = document.createElement('canvas');
canvas.width = width;
canvas.height = height;
const ctx = canvas.getContext('2d');
ctx.drawImage(this.video, 0, 0, canvas.width, canvas.height);
canvas.toBlob(function (blob) {
savePicture(blob);
}, "image/jpeg");
}
Now, I need to zoom into the camera and crop the video around the center, since I want to remove the background around the photo.
I don't want to implement this using CSS since I want to save the picture cropped and zoomed too.
I want this zooming in the video view and also when taking the snapshot with zoom.
The camera has no zooming option of its own.
Is there an option to zoom and crop in the video stream?
context.drawImage() has optional parameters to "scale" image (sWidth, sHeight).
To move the image and "crop", you may set additional params (dx, dy).
https://developer.mozilla.org/en-US/docs/Web/API/CanvasRenderingContext2D/drawImage
But this a direct way of zoom&crop by JS. If you need ready UI for such functionality, you may combine your code with something like this: http://deepliquid.com/projects/Jcrop/demos.php?demo=thumbnail (I didn't try it, just have it in bookmarks)
I am developing VR html5 page in javascript (no jQuery or other frameworks) that uses WebGL to render a sphere and has texture to which streamed video is rendered into.
All that works fine on iPhone 6, 6+, however on Android I've hit the wall - simply, video is not being transferred into texture (gl.texSubImage2D). Texture remains black. No WebGL errors thrown.
So I have created a test, without WebGL, that just tries to play a video and draw it's frames into canvas2D, so I can at least verify that frames are indeed extracted from streamed video.
Video is played on user interaction (touch) and when canplay event is triggered, I start frame loop (window.requestAnimationFrame) to draw video into canvas (canvas.drawImage( video, 0,0 ))
Both video and canvas are on the document body visible, next to each other.
Result: On desktop it works as expected, two screens, left is video with native controls, right is canvas. When I click play, video starts, and canvas gets refreshed at the same time. On Android Chrome 48.0.2564.106 no pixels drawn - canvas is totally empty.
I have installed Android Chrome Beta (50.0.2661.57) and it works there, but on Android Chrome 48.0.2564.106 it does not.
Codes to setup video and canvas:
<!doctype html>
<html>
<head>
<meta charset="utf-8" />
<meta name="format-detection" content="telephone-no" />
<meta http-equiv="content-type" content="text/html; charset=utf-8" />
<title>Video Canvas Test</title>
<script>
var video;
var canvas;
var ctx;
var info;
var pass = 0;
window.onload = function()
{
video = document.createElement("video");
video.oncanplay = function(){ initializeCanvas(); }
video.onerror = function(e){ console.error("video problem"); }
video.loop = true;
video.controls = "true";
video.src = "video/big-buck-bunny_trailer.webm";
video.style.width = "400px";
video.style.height = "300px";
video.style.position = "absolute";
video.style.left = "20px";
video.style.top = "20px";
video.style.backgroundColor = "#8080FF";
canvas = document.createElement("canvas");
canvas.style.backgroundColor = "#8080FF";
canvas.style.width = "400px";
canvas.style.height = "300px";
canvas.style.position = "absolute";
canvas.style.left = "420px";
canvas.style.top = "20px";
ctx = canvas.getContext("2d");
info = document.createElement("p");
info.innerHTML = window.navigator.userAgent;
info.style.position = "absolute";
info.style.width = "200px";
info.style.height = "auto";
info.style.position = "absolute";
info.style.left = "20px";
info.style.top = "320px";
document.body.appendChild(video);
document.body.appendChild(canvas);
document.body.appendChild(info);
}
function initializeCanvas()
{
console.log("Video ready to play. Init canvas.");
ctx.canvas.width = 640; // I am 100% sure this is correct video size.
ctx.canvas.height = 360;
console.log("Video size: " + video.videoWidth + "x" + video.videoHeight);
ctx.font = "30px Arial";
updateCanvas();
}
function updateCanvas()
{
pass ++;
ctx.clearRect(0,0,ctx.canvas.width,ctx.canvas.height);
ctx.drawImage(video,0,0);
ctx.fillText("Pass: " + pass,30,120);
window.requestAnimationFrame( updateCanvas );
}
</script>
</head>
<body>
</body>
</html>
Can anyone confirm if canvas.drawImage on Chrome 48 and older cannot accept video as drawing source? This looks impossible to me considering WebGL has been in Chrome's support for ages and with plenty of experiments with video textures out there.
I have tried other samples that copy video into WebGL texture or Canvas2D and they do not work as well.
Is there anything I am missing here?
After thorough research it appears that:
Canvas2D.drawImage( video, 0,0 ) was broken since July 2015 (at least as I found bug reports that date from then) in Android Chrome and was kinda fixed-not-fixed-fixed-again-and-again-not-fixed - but I can confirm that it is fixed in Android Chrome 49 and in Android Chrome 50 (Beta). Same issue with decoding pixels from video to be drawn into Canvas2D has affected drawing video into WebGL texture.
The only workaround is to have either: a) custom-made video streaming server using Websocket + JPG sequence streamer in JavaScript (images received through opened websocket connection) or b) JPG sequence streamer in JavaScript (downloads images one by one and takes care of loading new - deleting old).
I went with option 2.b and with CDN kicked in it really works great for 1024x512 video size, which was good enough solution.
I would first preload mp3 file using http request, then load it into Audio object. Once that's done, JPG streamer would kick in and would then commence playback using sound currentTime value. It was 100% synchronised with sound!
However, issue is fixed in Chrome 49, and Chrome 50 (beta), so this likely will be obsolete in 2-3 months.
My application works fine in Chrome, but in IE/EDGE the canvas doesnt show the video.
This started to happen when i use as source a encrypted video, when i used open source video the canvas showed the video.
I cant find a solution mostly because IE/EDGE doesnt show errors in developer tools console.
IE/EDGE has some policy that doesnt allow to draw a encrypted video?
In future i will remove video element from html, create only in javascript and write some text in canvas as a watermark.
<canvas runat="server" id="canvas1"></canvas>
<video
id="video1"
runat="server"
class="azuremediaplayer amp-default-skin amp-big-play-centered"
controls
poster="">
</video>
<script>
var videoElement = document.getElementById('<%=video1.ClientID%>');
videoElement.setAttribute('webkit-playsinline', 'true');
videoElement.width = '1280';
videoElement.height = '720';
var x, y, min, tempo = 0;
var nroRender = 201;
var myPlayer = amp(videoElement);
myPlayer.src([{
src: '<URL VIDEO>',
protectionInfo: [
{
type: 'Widevine',
authenticationToken: 'Bearer=<TOKEN>'
}, {
type: 'PlayReady',
authenticationToken: 'Bearer=<TOKEN>'
}]
}]);
var canvasElement = document.getElementById('<%=canvas1.ClientID%>');
canvasElement.width = '1280';
canvasElement.height = '720';
var ctx = canvasElement.getContext('2d');
function desenha() {
ctx.clearRect(0, 0, canvasElement.width, canvasElement.height);
ctx.drawImage($('#video1 video')[0], 0, 0, canvasElement.width, canvasElement.height);
}
function loop() {
desenha();
setTimeout(loop, 1000 / 60);
}
loop();
</script>
If you have some problem to understand what is the problem, run in Chrome and then in IE. In Chrome canvas appears like video, in IE canvas appears black.
Full code in https://github.com/tobiasrighi/video-canvas/blob/master/WebForm1.aspx
Because the video is protected with DRM, by design IE/Edge block the ability to capture frames - its actually not an error and this is built down lower in the media pipeline. It seems Chrome's current implementation with Widevine does not block frames, although this may happen in the near future depending on Google's future design considerations.
I'm working on a custom Soundcloud player for my website using waveform.js to generate waveforms. It works great but it lacks the scrubber functionality. How can I add that?
I'm not a JS wizard, still learning my ways around so I'll be very thankful for any help or suggestions!
Update IV: I found a new way of including canvas generated waveforms into original SoundCloud Custom Player sc-player.js.
First of all I found a line of code responsible for HTML structure of the player and added id="waveform" to sc-waveform container on line 529:
.append('<div class="sc-time-span"><div class="sc-waveform-container" id="waveform">
</div><div class="sc-buffer"></div><div class="sc-played"></div></div>')
Then I updated the line 676, replacing img with canvas
$available = $scrubber.find('.sc-waveform-container canvas'),
Next, I located a piece of code responsible for embedding the original image of waveform on line 340 and commented it out:
// $('.sc-waveform-container', $player).html('<img src="' + track.waveform_url +'" />');
And then I posted the code below at the bottom of my page:
<script>
SC.get("/tracks/80348246", function(track){
var waveform = new Waveform({
container: document.getElementById("waveform"),
height: 40,
innerColor: '#ffffff'
});
waveform.dataFromSoundCloudTrack(track);
});
//----------- end of insterted waveform.js code ----------------------
</script>
Results are very promising, Now I have fully customizable waveform and scrubber is working as well. However there are still things I'd like to fix.
In Chrome, when I press play and pause, then click the waveform, the track starts playing, but the play button doesn't change its state. Then need to double click it to stop the track.
The buffer and progress bar are still just the sc-player divs in the background. How could I link sc-player.js and waveform.js together so the progress is generated on the waveform canvas (as in example on http://waveformjs.org/)?
Any ideas how to fix those?
Here's the player on the live website:
http://www.code.spik3s.com/rnr/
on play call
myVar=setInterval(Timed,100);
function Timed() {
total_duration = duration of the track playing;
current_duration = the current position of the track.
width = canvas width;
pointer = the id of the pointer being used to show the progress.
position = (canvas_width / total_duration) * current_duration;
pointer.style.left = position;
}
you'll have to set the information in, but something like this will do
I'm drawing a video on a canvas, this works fine with Safari / Chrome / Firefox / Opera, but on the iPad, even though the video plays, (correct codec, etc) it is never rendered on the canvas,
Basically I just call :
canvas.getContext("2d").drawImage(video, 0, 0);
when the video is playing, and stop doing this when the video is paused or ended.
Is there anything else I should consider? Like clearing the canvas?
For now safari on iPad is not supporting this feature. There are some limits on the attributes and events of canvas tag and video tag of HTML5 particularly on iPad. The attributes and events of canvas and video tags which work fine on desktop browsers wont work on iPad. This is my personal experience too.
See Putting Video on Canvas
You basically can’t pass a video object to the canvas drawImage method. Apple suggests having the video positioned behind the canvas but this won’t help if you want to manipulate the video somehow.
Have you tried wrapping it inside the requestAnimationFrame() function.
<video src="YourSrcFile.webm" autolay muted></video>
// Fallback to mp4 if not supported.
<canvas></canvas>
const video = document.querySelector("video"); // Offscreen Canvas.
const canvas = document.querySelector("canvas"); // Onscreen Canvas.
canvas.style.zIndex = '50';
const ctx = canvas.getContext("2d");
video.addEventListener('play',()=>{
function step() {
ctx.drawImage(video, 0, 0, canvas.width, canvas.height)
requestAnimationFrame(step)
}
requestAnimationFrame(step);
})
Make sure to Match both the onscreen & offscreen canvas to the original videos aspect ratio otherwise the extra calculations make it laggy & poor performance..
You can use Transform Scale inside your css to resize it aslong as its proportionately. Which doesn't seem to make it glitchy, but I'd suggest converting the video from mp4, avi or other file type to webm..
just used this for a vjloop and its running smooth.
Try these and see if it makes any difference..
<script>
document.addEventListener('DOMContentLoaded', function(){
var v = document.getElementById('v');
var canvas = document.getElementById('c');
var context = canvas.getContext('2d');
var cw = Math.floor(canvas.clientWidth / 100);
var ch = Math.floor(canvas.clientHeight / 100);
canvas.width = cw;
canvas.height = ch;
v.addEventListener('play', function(){
draw(this,context,cw,ch);
},false);
},false);
function draw(v,c,w,h) {
if(v.paused || v.ended) return false;
c.drawImage(v,0,0,w,h);
setTimeout(draw,20,v,c,w,h);
}
</script>