I couldn't find plugin html and js for Flutter Web. My purpose is open camera and capture a picture. But it is working on Flutter Web. I wrote html and js camera codes. But it can't working on Flutter Web app. . I tried flutter_html plugin. But has just a few meaningless line in Chrome Web Emulator. Please help me or suggestion to me other plugins.
import "package:flutter/material.dart";
import 'package:flutter_html/flutter_html.dart';
class kamera extends StatefulWidget {
#override
_kameraState createState() => _kameraState();
}
class _kameraState extends State<kamera> {
#override
Widget build(BuildContext context) {
return Html(data:
"""
<video id="video" width="640" height="480" autoplay></video><br>
<button id="snap">Snap Photo</button><br>
<canvas id="canvas" width="640" height="480"></canvas><br>
<script>
var video = document.getElementById('video');
// Get access to the camera!
if(navigator.mediaDevices && navigator.mediaDevices.getUserMedia) {
// Not adding `{ audio: true }` since we only want video now
navigator.mediaDevices.getUserMedia({ video: true }).then(function(stream) {
//video.src = window.URL.createObjectURL(stream);
video.srcObject = stream;
video.play();
});
}
var canvas = document.getElementById('canvas');
var context = canvas.getContext('2d');
var video = document.getElementById('video');
// Trigger photo take
document.getElementById("snap").addEventListener("click", function() {
context.drawImage(video, 0, 0, 640, 480);
});
</script>
""");
}
}
Related
The canvas element is distorting my image and zooming in to the middle left. As seen here.
https://i.ibb.co/ZMCcM88/Screenshot-116.png
The above is what it should look like, the bottom is how its coming out.
My console.log() shows the video and canvas sizes are equal and nothing should be distorted or coming out wrong.
https://i.ibb.co/p0cHDXf/Screenshot-118.png
My code looks like this
Template
<div>
<video ref="video" class="full-width" autoplay playsinline >
<canvas ref="canvas" class="full-width" height="240" >
</div>
Script
export default defineComponent({
name: "CameraPage",
setup() {
const imageCaptured = ref(false);
const video = ref(null);
const canvas = ref(null);
const initCamera = () => {
navigator.mediaDevices
.getUserMedia({
video: true,
})
.then((stream) => {
video.value.srcObject = stream;
});
};
const captureImage = () => {
canvas.width = video.value.getBoundingClientRect().width;
canvas.height = video.value.getBoundingClientRect().height;
let context = canvas.value.getContext("2d");
context.drawImage(video.value, 0, 0, canvas.width, canvas.height);
imageCaptured.value = "True";
};
onMounted(() => {
initCamera();
});
return {
initCamera,
onMounted,
captureImage,
video,
canvas,
imageCaptured,
};
},
});
What am I doing that's distorting the image? Am I getting something in the drawImage() method thats wrong? Is it the "full-width" and "height" in the canvas html attributes?
I just want to "take a screenshot" of the streaming video and make it look like a photo basically.
Im so close but so far away.
I want to take a picture from the user and send it to the server without ajax.
I searched and found this code that takes the photo from video and draws it on canvas. now I want to set that image to an input on form but I don't know how.
this is HTML code:
<video id="player" controls autoplay></video>
<button id="capture" onclick="$('#loader').hide()">Capture</button>
<canvas id="canvas" width=320 height=240></canvas>
<input id="image-input">
and this is javascript code :
const player = document.getElementById('player');
const canvas = document.getElementById('canvas');
const context = canvas.getContext('2d');
const captureButton = document.getElementById('capture');
const constraints = {
video: true,
};
captureButton.addEventListener('click', () => {
// Draw the video frame to the canvas.
context.drawImage(player, 0, 0, canvas.width, canvas.height);
player.srcObject.getVideoTracks().forEach(track => track.stop());
var Pic = document.getElementById("canvas").toDataURL("image/png");
Pic = Pic.replace(/^data:image\/(png|jpg);base64,/, "")
});
// Attach the video stream to the video element and autoplay.
navigator.mediaDevices.getUserMedia(constraints)
.then((stream) => {
player.srcObject = stream;
});
now I want to set this image as the image of input.
how can I do that?
I'm trying to take a snapshot from my webcam with the navigator.mediaDevices.getUserMedia() and canvas.getContext('2d').drawImage() functions.
When I do it like this, it works perfectly:
function init(){
myVideo = document.getElementById("myVideo")
myCanvas = document.getElementById("myCanvas");
videoWidth = myCanvas.width;
videoHeight = myCanvas.height;
startVideoStream();
}
function startVideoStream(){
navigator.mediaDevices.getUserMedia({audio: false, video: { width: videoWidth, height: videoHeight }}).then(function(stream) {
myVideo.src = URL.createObjectURL(stream);
}).catch(function(err) {
console.log("Unable to get video stream: " + err);
});
}
function snapshot(){
myCanvas.getContext('2d').drawImage(myVideo, 0, 0, videoWidth, videoHeight);
}
<!DOCTYPE html>
<html>
<head>
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<script src="debug.js"></script>
</head>
<body onload="init()">
<div id="mainContainer">
<video id="myVideo" width="640" height="480" autoplay style="display: inline;"></video>
<canvas id="myCanvas" width="640" height="480" style="display: inline;"></canvas>
<input type="button" id="snapshotButton" value="Snapshot" onclick="snapshot()"/>
</div>
</body>
</html>
The thing is, I don't want to use a button click for taking the snapshot, but take the snapshot as soon as the the camera stream is loaded.
I tried calling the snapshot() function directly after setting the video source:
function init(){
myVideo = document.getElementById("myVideo")
myCanvas = document.getElementById("myCanvas");
videoWidth = myCanvas.width;
videoHeight = myCanvas.height;
startVideoStream();
}
function startVideoStream(){
navigator.mediaDevices.getUserMedia({audio: false, video: { width: videoWidth, height: videoHeight }}).then(function(stream) {
myVideo.src = URL.createObjectURL(stream);
snapshot();
}).catch(function(err) {
console.log("Unable to get video stream: " + err);
});
}
function snapshot(){
myCanvas.getContext('2d').drawImage(myVideo, 0, 0, videoWidth, videoHeight);
}
But it doesn't work. My canvas stays white. I guess it's because the the camera stream is not fully loaded at this point.
So is there any other event getting fired, which I could use for drawing the snapshot as soon as the camera feed is loaded? Or am I totally on the wrong track?
Thanks in advance!
Wait for the loadedmetadata event:
navigator.mediaDevices.getUserMedia({video: true})
.then(stream => {
video.srcObject = stream;
return new Promise(resolve => video.onloadedmetadata = resolve);
})
.then(() => canvas.getContext('2d').drawImage(video, 0, 0, 160, 120))
.catch(e => console.log(e));
<video id="video" width="160" height="120" autoplay></video>
<canvas id="canvas" width="160" height="120"></canvas>
The above should work in all browsers (that do WebRTC).
In Chrome you can also do this - but play() doesn't return a promise in any other browser yet.
Also note that URL.createObjectURL(stream) is deprecated. Use srcObject.
Update: Thanks to #KyleMcDonald in comments for pointing out the importance of registering the loadedmetadata listener synchronously with setting the srcObject!—Code updated.
I am taking a snapshot of stream and displaying it as a canvas.
How can I save the captured snapshot to file on my server?
Ps: The below code is part of a jsp page and the project is hosted on server(tomcat). I don't want to save it on user system, just on the server hosting project.
<canvas id="snapshot" width=130 height=130 align="right"></canvas>
<video id="player" width="220" height="140" align="right"; controls autoplay></video>
<button id="capture">Capture</button>
<script>
var player = document.getElementById('player');
var snapshotCanvas = document.getElementById('snapshot');
var captureButton = document.getElementById('capture');
var videoTracks;
var handleSuccess = function(stream) {
// Attach the video stream to the video element and autoplay.
player.srcObject = stream;
videoTracks = stream.getVideoTracks();
};
captureButton.addEventListener('click', function() {
var context = snapshot.getContext('2d');
context.drawImage(player, 150, 150,320,240,0,0, snapshotCanvas.width, snapshotCanvas.height);
var imgdata = snapshot.toDataURL("image/png");
var newdata = imgdata.replace(/^data:image\/png/,'data:application/octet-stream');
});
navigator.mediaDevices.getUserMedia({video: true}).then(handleSuccess);
// Stop all video streams.
videoTracks.forEach(function(track) {track.stop()});
</script>
How do you enable the front camera on a Webview? I have enable the features in AndroidManifest.xml
<uses-feature android:name="android.hardware.camera" android:required="true" />
<uses-feature android:name="android.hardware.camera.front" android:required="true" />
The camera is not going to be used for taking photos or recording, just to switch on the front camera.
When I go to the website using the phone browser the phone camera works once allow the prompt message. How can this work with a webview?
In the html file has a Canvas and Video tag that displays webcam It doesn't record or take pictures it just shows you the camera view.
Here is the html code
<canvas id="inCanvas" width="500" height="500" style="display:none"></canvas>
<video id="inputVideo" width="100" height="100" autoplay loop ></video>
It work with webcam but not with webview in android.
I didnt quite understand, but i thing there could one of the following two, what you want.
1) access camera and just show the video on the screen(not capturing
image):
html:
<canvas id='canvas' width='100' height='100'></canvas>
js:
var onFailSoHard = function(e)
{
console.log('failed',e);
}
window.URL = window.URL || window.webkitURL ;
navigator.getUserMedia = navigator.getUserMedia || navigator.webkitGetUserMedia;
var video = document.querySelector('video');
if(navigator.getUserMedia)
{
navigator.getUserMedia({video: true},function(stream) {
video.src = window.URL.createObjectURL(stream);
},onFailSoHard);
}
var canvas = document.getElementById('canvas');
var ctx = canvas.getContext('2d');
setInterval(function(){
ctx.drawImage(video,0,0);
}, 100);
}
2) capture image from the camera:
here is the doc for that.
navigator.camera.getPicture(onSuccess, onFail, { quality: 50,
destinationType: Camera.DestinationType.DATA_URL
});
function onSuccess(imageData) {
var image = document.getElementById('myImage');
image.src = "data:image/jpeg;base64," + imageData;
}
function onFail(message) {
alert('Failed because: ' + message);
}
I would use something similar to the below as a script to access the phone camera.
<script>
var errorCallback = function(e) {
console.log('Rejected!', e);
};
// Not showing vendor prefixes.
navigator.getUserMedia({video: true, audio: true}, function(localMediaStream) {
var video = document.querySelector('video');
video.src = window.URL.createObjectURL(localMediaStream);
// Note: onloadedmetadata doesn't fire in Chrome when using it with getUserMedia.
// See crbug.com/110938.
video.onloadedmetadata = function(e) {
// Ready to go. Do some stuff.
};
}, errorCallback);
</script>
Used the following tutorial to help me.
Hope it sets yuo on the right track :)