I am trying to get the camera working from a button but am getting errors on the commented lines below. I am using the documentation provided by Phonegap/Cordova (v2.1). Any help appreciated.
var pictureSource = navigator.Camera.PictureSourceType.CAMERA; // Cannot read PictureSourceType of undef
var destinationType = navigator.camera.DestinationType.FILE_URI;
function onPhotoURISuccess(imageURI) {
var placeImage = document.getElementById('placeImage');
placeImage.src = imageURI;
placeImage.style.display = 'block';
console.log(imageURI);
}
function getPhoto() {
navigator.Camera.getPicture(onPhotoURISuccess, onFail, { //I am getting an error for this line saying camera is not defined?
quality: 50,
destinationType: camera.destinationType.FILE_URI,
sourceType: pictureSource
});
}
function onFail(message) {
alert('Failed because: ' + message);
}
Make sure you've got "deviceready" event first. For your call to getPicture() do this:
navigator.Camera.getPicture(onPhotoURISuccess, onFail, {
quality: 50,
destinationType: navigator.camera.DestinationType.FILE_URI,
sourceType: pictureSource
});
I am also working on this. If you are talking about Android, you need to test on a real device. No matter you test on browser or simulator in Eclipse, it will give you Camera not defined error. I guess it is because hardware problem.
Related
I am using phonegap to build my app, In the app i am building i have a camera which turns on when i open the app and i see camera controls. once i capture the picture the captured picture should get set as my app background image. how can i do that?
here's what i have tried:
document.addEventListener("deviceready", onDeviceReady, false);
function onDeviceReady() {
navigator.camera.getPicture(onSuccess, onFail, {
quality: 50,
destinationType: Camera.DestinationType.FILE_URI,
encodingType: Camera.EncodingType.JPEG,
saveToPhotoAlbum: true,
showOverlay: false
});
function onSuccess(imageURI) {
var image = document.getElementById('myImage');
image.src = imageURI;
alert(image.src);
document.body.style.backgroundImage = "url(image.src)";
}
function onFail(message) {
/*alert("you are closing the camera")*/
}
}
Your onSuccess function should look like this
function onSuccess(imageData) {
var image = "data:image/jpeg;base64," + imageData;
document.body.style.backgroundImage = "url(" + image + ")";
}
I'm using the phonegap file upload to upload files to a server. When i select a portrait photo from the gallery it rotates to landscape
function selectPicture() {
navigator.camera.getPicture(
function(uri) {
var img = document.getElementById('camera_image');
img.style.visibility = "visible";
img.style.display = "block";
img.src = uri;
document.getElementById('camera_status').innerHTML = "Success";
},
function(e) {
console.log("Error getting picture: " + e);
document.getElementById('camera_status').innerHTML = "Error getting picture.";
},
{ quality: 50, destinationType: navigator.camera.DestinationType.FILE_URI,correctOrientation : true, sourceType: navigator.camera.PictureSourceType.PHOTOLIBRARY});
};
there is a "correctOrientation" parameter for getPicture (http://docs.phonegap.com/en/edge/cordova_camera_camera.md.html) try setting it to false (or true :D)
Hope this helps
I am currently developing a component that allows you to make webcam videos and upload them directly to amazon s3. For that purpose I user RecordRTC library and Amazon S3 storage. I have discovered a strange issue, and I am not sure whether it has to do with RecordRTC blobs or with amazon configuration. When the file size is over 1MB, the Amazon server hangs and after 20 seconds returns a timeout error. Could anyone help me figure this out? Here is my code for the recorder component (p() is the same as console.log()):
navigator.record_function = ( navigator.getUserMedia ||
navigator.webkitGetUserMedia ||
navigator.mozGetUserMedia ||
navigator.msGetUserMedia);
if (navigator.record_function) {
navigator.record_function(videoObj, function (stream) {
video.src = window.URL.createObjectURL(stream);
video.play();
$("#stop_recording").click(function () {
stream.stop();
});
// init recorders
audio_recorder = RecordRTC(stream, {type: "audio", bufferSize: 16384});
video_recorder = RecordRTC(stream, videoOptions);
}, errBack);
}
$("#start_recording").click(function () {
// record the audio and video
video_recorder.startRecording();
audio_recorder.startRecording();
});
And for the uploader component:
// Populate the Post paramters.
fd.append('key', "users/" + Main.current_user.id + "/test.webm");
fd.append('AWSAccessKeyId', msg.access_key_id);
fd.append('acl', 'private');
fd.append('policy', msg.policy);
fd.append('signature', msg.signature);
fd.append('Content-Type', '$Content-Type');
fd.append('x-amz-server-side-encryption', 'AES256');
fd.append('success_action_status', '201');
fd.append('name', 'test.webm');
fd.append('Filename', 'test.webm');
fd.append("file", file);
xhr.open('POST', 'https://' + msg.bucket + '.s3.amazonaws.com', true);
xhr.upload.addEventListener('progress', function (e) {
p(e);
}, false);
xhr.onreadystatechange = function () {
p(xhr.readyState);
};
xhr.send(fd);
$("#stop_recording").click(function () {
// stop recorders
audio_recorder.stopRecording(function () {
var audio_blob = audio_recorder.getBlob();
p(audio_blob);
// VideoUploader.upload_user_audio(audio_blob);
}
);
video_recorder.stopRecording(function () {
var video_blob = video_recorder.getBlob();
p(video_blob);
VideoUploader.upload_user_video(video_blob);
});
});
The error message on timeout is:
Your socket connection to the server was not read from or written to within the timeout period. Idle connections will be closed.
I appreciate any help I can get, I am really lost here.
Thanks in advance.
I have managed to find a pretty... strange solution for this issue. It appears the problem comes from the way RecordRTC saves the blob object, which makes it impossible to upload in Firefox 35 on Mac. I could not find the code in RecordRTC that causes the problem, the Blob seems to be generated correctly, but the workaround that worked for me was encoding the Blob again into a new Blob via Filereader.
video_recorder.stopRecording(function () {
var video_blob = video_recorder.getBlob();
var arrayBuffer;
var fileReader = new FileReader();
fileReader.onload = function (ex) {
arrayBuffer = this.result;
video_blob = new Blob([arrayBuffer], {type: "video/webm"});
VideoUploader.upload_user_video(video_blob)
};
fileReader.readAsArrayBuffer(video_blob);
});
As for why that happens, I have no idea, but other projects that use the same technology are affected as well http://videobooth.herokuapp.com/ (the same exact problem in the same exact browser, upload hangs). Maybe this information could be useful for people working on RecordRTC, as I think this workaround could be made into a patch.
Solution is tested on Mac Firefox 35 and Mac Chrome 40.
I read several other questions on a related issue, but none answered my question. I have an odd issue where I am able to use WebRTC to audio chat from chrome to firefox but not firefox to chrome.
Basically, when a user wishes to audio chat, he/she clicks a button #audioChatBtn, which uses getUserMedia() to setup a stream. The thing is, clicking #audioChatBtn from Firefox doesn't fire the onaddstream callback on Chrome, but clicking the button from Chrome fires onaddstream on Firefox. Thus, I can audio chat from Chrome to Firefox but not the other way around. I have been trying to figure this out for several hours, but I'm hoping maybe someone here has an answer.
Relevant source:
var configuration = {
'iceServers': [
{ url: 'stun:stun.l.google.com:19302' },
{ url: 'stun:stun1.l.google.com:19302' },
{ url: 'stun:stun2.l.google.com:19302' },
{ url: 'stun:stun3.l.google.com:19302' },
{ url: 'stun:stun4.l.google.com:19302' }
]
};
var pc = RTCPeerConnection(configuration);
var myStream = null;
var currentAudioIndex = 0; // Number of created channels
var myAudioEnabled = false;
// send any ice candidates to the other peer
pc.onicecandidate = function (evt) {
if (evt.candidate)
$(document).trigger("persistState", { mode: 'rtc', 'candidate': evt.candidate });
};
// let the 'negotiationneeded' event trigger offer generation
pc.onnegotiationneeded = function () {
pc.createOffer(localDescCreated, logError);
}
// once remote stream arrives, play it in the audio element
pc.onaddstream = function (evt) {
console.log('creating and binding audio');
var idx = (currentAudioIndex++);
var audioElement = $('#audio' + idx);
if (audioElement.length == 0) {
var audio = $('<audio id="audio' + idx + '" autoplay>');
$('body').append(audio);
audioElement = $('#audio' + idx);
}
var audioObject = audioElement[0];
attachMediaStream(audioObject, evt.stream);
};
function localDescCreated(desc) {
pc.setLocalDescription(desc, function () {
$(document).trigger("persistState", { mode: 'rtc', 'sdp': pc.localDescription });
}, logError);
}
function logError(e) {
bootbox.alert("Audio chat could not be started.");
}
function hasGetUserMedia() {
return !!(navigator.getUserMedia || navigator.webkitGetUserMedia ||
navigator.mozGetUserMedia || navigator.msGetUserMedia);
}
server.onPersist = function(msg) {
if (msg.mode == "rtc") {
if (msg.sdp)
pc.setRemoteDescription(new RTCSessionDescription(msg.sdp), function () {
// if we received an offer, we need to answer
if (pc.remoteDescription.type == 'offer')
pc.createAnswer(localDescCreated, logError);
}, logError);
else
pc.addIceCandidate(new RTCIceCandidate(msg.candidate));
}
}
// On click, start audio chat from this user.
$('#audioChatBtn').click(function() {
if (!hasGetUserMedia()) {
bootbox.alert('Audio conferencing is not supported by your browser. (Currently only supported by Chrome, Firefox, and Opera web browsers.)');
return;
}
if (myAudioEnabled) {
myStream.stop();
displayAlert('Streaming closed', 'Audio chat is off');
$('#audioChatBtn').removeClass('btn-success').addClass('btn-primary');
} else {
getUserMedia({ video: false, audio: true }, function (localMediaStream) {
myStream = localMediaStream;
pc.addStream(localMediaStream);
displayAlert('Streaming...', 'Audio chat is enabled');
$('#audioChatBtn').removeClass('btn-primary').addClass('btn-success');
}, logError);
}
myAudioEnabled = !myAudioEnabled;
});
What I've tried
Tried using 'optional': [{ 'DtlsSrtpKeyAgreement': 'true' }] in the configuration after reading this question
Tried creating a new RTCPeerConnection() each request
Tried using native browser functions instead of adapter.js.
Explored Web Audio API instead of getUserMedia()
Firefox does not currently support onnegotiationneeded, because we currently don't support re-negotiation of an existing connection. All addStream/addTrack and a single createDataChannel (if you want to use them) need to be done before createOffer() or createAnswer. You can createDataChannel() after you connect, if you created on before createOffer.
Adding a stream after they're connected won't work.
An (annoying) alternative is to create a new set of PeerConnections to replace the old ones (using a DataChannel in the old pair as a signaling channel for lower latency)
Resolving this is high on our priority list, but will take a few more releases.
After a lot of debugging, I came to realize that the bug has nothing to do with my code but has to do with Firefox's implementation of WebRTC. Firefox doesn't trigger the onnegotiationneeded callback, so I have to do it hackily using a timeout (and hope that the stream information has been relayed to the remote client before the function fires). Obviously, this is a firefox bug and I will report it, hoping that they fix the bug in the next build.
getUserMedia({ video: false, audio: true }, function (localMediaStream) {
myStream = localMediaStream;
pc.addStream(localMediaStream);
displayAlert('Streaming...', 'Audio chat is enabled');
$('#audioChatBtn').removeClass('btn-primary').addClass('btn-success');
// Need this for Firefox
if (webrtcDetectedBrowser == 'firefox')
setTimeout(pc.onnegotiationneeded, 5000);
}, logError);
var pictureSource; // picture source
var destinationType; // sets the format of returned value
var photoid=window.localStorage.getItem("photoid");
var photoData=null;
// Wait for PhoneGap to connect with the device
//
document.addEventListener("deviceready",onDeviceReady,false);
// PhoneGap is ready to be used!
//
function onDeviceReady() {
pictureSource=navigator.camera.PictureSourceType;
destinationType=navigator.camera.DestinationType;
}
// A button will call this function
//
function getPhoto(source) {
alert("Entered sd card");
// Retrieve image file location from specified source
navigator.camera.getPicture(onPhotoDataSuccess, onFail, { quality: 50,
destinationType: destinationType.FILE_URI,
sourceType: source });
}
function onPhotoDataSuccess(imageData) {
console.log(imageData);
// Get image handle
var smallImage = document.getElementById('photos');
// Unhide image elements
//
smallImage.style.display = 'block';
// Show the captured photo
// The inline CSS rules are used to resize the image
//
smallImage.src = "data:image/jpeg;base64," + imageData;
alert(imageData);
photoData = imageData;
window.requestFileSystem(LocalFileSystem.PERSISTENT, 0, gotFS, fail);
}
function gotFS(fileSystem) {
fileSystem.root.getFile("/sdcard/external_sd/"+photoid+".jpg", null, gotFileEntry, fail);
}
function gotFileWriter(writer) {
writer.onwrite = function(evt) {
alert("write success");
};
writer.write(photoData);
}
function fail(error) {
alert(error.code);
}
/* function onPhotoURISuccess(imageURI) {
// Uncomment to view the image file URI
console.log(imageURI);
alert("photo captured");
uploadPhoto(imageURI);
} */
/* function getPhoto(source) {
// Retrieve image file location from specified source
navigator.camera.getPicture(onPhotoURISuccess, onFail, { quality: 50,
destinationType: destinationType,
sourceType: source });
} */
// Called if something bad happens.
//
function onFail(message) {
alert('Failed because: ' + message);
}
I have used the above code to access data in the sd card. But now what i need to do is, get the path of the images present there and put it in a diff object that can access the path and display those images. I have no clue on how to go about that.
Any help is appreciated.
What you can do is write a phonegap plugin for the platform you are developing for. I'll assume it's android. Writing android plugins.
When you call the Phonegap.exec to call the plugin, the plugin, gets the sd card path through
Environment.getExternalStorageDirectory().getAbsolutePath()
and then does a basic search to get all the .jpg and .png files and return a json of all the paths of the files.