I used the code from Nikolay answer https://jsfiddle.net/makhalin/nzw5tv1q/ on my Ionic - Angular PWA (I put it on a custom.js file and imported it on angular.json). It's working great if I open it in Chrome or Edge on Android but if I install it as a PWA it works the first time, then stops working.
Is there anything I must do to make it work as a PWA?
//have a console on mobile
const consoleOutput = document.getElementById("console");
const log = function (msg) {
consoleOutput.innerText = `${consoleOutput.innerText}\n${msg}`;
console.log(msg);
}
//Test browser support
const SUPPORTS_MEDIA_DEVICES = 'mediaDevices' in navigator;
if (SUPPORTS_MEDIA_DEVICES) {
//Get the environment camera (usually the second one)
navigator.mediaDevices.enumerateDevices().then(devices => {
const cameras = devices.filter((device) => device.kind === 'videoinput');
if (cameras.length === 0) {
log('No camera found on this device.');
}
// Create stream and get video track
navigator.mediaDevices.getUserMedia({
video: {
facingMode: 'environment',
}
}).then(stream => {
const track = stream.getVideoTracks()[0];
//Create image capture object and get camera capabilities
const imageCapture = new ImageCapture(track)
imageCapture.getPhotoCapabilities().then(capabilities => {
//let there be light!
const btn = document.querySelector('.switch');
const torchSupported = !!capabilities.torch || (
'fillLightMode' in capabilities &&
capabilities.fillLightMode.length != 0 &&
capabilities.fillLightMode != 'none'
);
if (torchSupported) {
let torch = false;
btn.addEventListener('click', function (e) {
try {
track.applyConstraints({
advanced: [{
torch: (torch = !torch)
}]
});
} catch (err) {
log(err);
}
});
} else {
log("No torch found");
}
}).catch(log);
}).catch(log);
}).catch(log);
//The light will be on as long the track exists
}
Related
I'm building an audio player as a mobile app using React Native and aiming to achieve perfect transition i.e audio to next audio without any silence, when looping. There's a heart beat, half second skip in between each song and when looping any given song that there's no sound.
Currently using Expo Audio player:
https://docs.expo.dev/versions/latest/sdk/audio/
Also tried:
https://www.npmjs.com/package/react-native-vlc-media-player
My Audioprovider.js where the logic is located looks like this:
onPlaybackStatusUpdate = async playbackStatus => {
if (playbackStatus.isLoaded && playbackStatus.isPlaying) {
this.updateState(this, {
playbackPosition: playbackStatus.positionMillis,
playbackDuration: playbackStatus.durationMillis,
});
}
if (playbackStatus.isLoaded && !playbackStatus.isPlaying) {
storeAudioForNextOpening(
this.state.currentAudio,
this.state.currentAudioIndex,
playbackStatus.positionMillis
);
}
let durationSeconds = playbackStatus.durationMillis/1000;
let positionSeconds = playbackStatus.positionMillis/1000;
if (playbackStatus.didJustFinish || (parseInt(durationSeconds)-2 === parseInt(positionSeconds))) {
if (this.state.isPlayListRunning) {
let audio;
const indexOnPlayList = this.state.activePlayList.audios.findIndex(
({ id }) => id === this.state.currentAudio.id
);
const nextIndex = this.state.isLoop ? indexOnPlayList : indexOnPlayList + 1;
audio = this.state.activePlayList.audios[nextIndex];
if (!audio) audio = this.state.activePlayList.audios[0];
const indexOnAllList = this.state.audioFiles.findIndex(
({ id }) => id === audio.id
);
const status = await playNext(this.state.playbackObj, audio.url);
return this.updateState(this, {
soundObj: status,
isPlaying: true,
currentAudio: audio,
currentAudioIndex: indexOnAllList,
});
}
Any ideas? Is it even possible?
Github:
https://github.com/samirhassen/OctaCoil
I am using twilio TURN server for webRTC peer connecting two browsers located on different sides of the world, still the connection does not open.
Log shows the local and remote descriptions are set on both sides. Audio/video tracks are also pushed and received, but the "onopen" method on either of the data channels are not firing. Below is the code extract.
create offer code
async createOffer(){
this.initiated = true
this.conn = new RTCPeerConnection(this.servers);
if (this.conn){
this.conn.ontrack = e => {
e.streams[0].getTracks().forEach(track => {
this.calleeStream?.addTrack(track);
this.logs.push('received track:' + track.label);
})
}
if (this.callerStream)
{
const s = this.callerStream;
this.callerStream.getTracks().forEach(track =>
{
this.conn?.addTrack(track,s);
this.logs.push('pushed track:' + track.label);
});
}
}
this.channel = this.conn.createDataChannel('channelX');
this.channel.onmessage = e => this.logs.push('received =>'+ e.data);
this.channel.onopen = e => {
this.logs.push('connection OPENED!!!');
this.enabletestmessage = true;
};
this.conn.onicecandidate = async e=> {
if (e.candidate===null && !this.iceCandiSent){
this.iceCandiSent = true;
this.logs.push('new ICE candidate received- reprinting SDP'+JSON.stringify(this.conn?.localDescription));
await this.dataService.createOffer(this.data.callerid,this.data.calleeid,JSON.stringify(this.conn?.localDescription));
this.logs.push('offer set in db');
this.logs.push('waiting for answer...');
}
}
const offer = await this.conn.createOffer();
await this.conn?.setLocalDescription(offer);
this.logs.push('local description (offer) set');
}
create answer code
async createAnswer(offerSDP:string){
this.initiated = true;
this.conn = new RTCPeerConnection(this.servers);
if (this.conn)
{
this.conn.ontrack = e => {
e.streams[0].getTracks().forEach(track => {
this.callerStream?.addTrack(track);
this.logs.push('received track:' + track.label);
})
}
if (this.calleeStream)
{
const s = this.calleeStream;
this.calleeStream.getTracks().forEach(track =>
{
this.conn?.addTrack(track,s);
this.logs.push('pushed track:' + track.label);
});
}
}
await this.conn.setRemoteDescription(JSON.parse(offerSDP));
this.logs.push('remote description (offer) set');
this.conn.onicecandidate = async e => {
if (e.candidate === null && !this.iceCandiSent){
this.iceCandiSent=true;
this.logs.push('new ICE candidate received- reprinting SDP'+JSON.stringify(this.conn?.localDescription));
await this.dataService.updateAnswer(this.data.callerid,this.data.calleeid,JSON.stringify(this.conn?.localDescription));
this.logs.push('answer set in db');
}
}
this.conn.ondatachannel = e => {
this.channel = e.channel;
this.channel.onmessage = e => this.logs.push('received =>'+ e.data);
this.channel.onopen = e => {
this.logs.push('connection RECEIVED!!!');
this.enabletestmessage = true;
};
}
const answer = await this.conn.createAnswer();
await this.conn.setLocalDescription(answer);
this.logs.push('local description (answer) set');
}
server side code for retrieving ice servers from Twillio
const twilio = require('twilio');
const client = twilio(<MY ACCOUNT SID>,<MY AUTH TOKEN>);
const result = await client.tokens.create();
return result.iceServers; //this is set to this.servers in the code above
Everything works when I run on two browser windows in my local machine. However even afer implementing TURN they dont work between browsers in Nepal and USA. The onopen event handlers on data channel does notfire even though local and remote descriptions are set on both sides. What am I missing ?
NOTE: signalling is done inside the onicecandidate event handler ( the line that calls dataService createOffer/updateAnswer methods)
I am learning about webRTC and am trying to create a really simple chat app for multiple peers. Everything works great when using devices in the same network, but when I try to access my site on mobile using 4g it doesn't seem to connect (or atleast send messages). I added a google stun server to my config, but that didn't solve the problem. Does anyone see what could possibly cause my trouble? I am not receiving any errors in chrome, but firefox does tell me: WebRTC: ICE failed, add a TURN server and see about:webrtc for more details.
class socket
{
constructor()
{
this.socket = new WebSocket(`${window.location.protocol == "https:" ? "wss" : "ws"}://${window.location.host}`);
this.socket.onmessage = e => this.messageHandler(e.data);
}
async messageHandler(data)
{
data = JSON.parse(data);
let channelName = data.sender_channel_name;
switch (data.type)
{
case "sendOffer":
peerConnections[channelName] = new peerConnection();
const offer = await peerConnections[channelName].createOffer();
this.socket.send(JSON.stringify({type: 'sendOffer', sdp: offer, 'sender_channel_name':channelName}));
break;
case "offer":
peerConnections[channelName] = new peerConnection();
let answer = await peerConnections[channelName].sendAnswer(data.sdp);
this.socket.send(JSON.stringify({'type':'offer', 'sender_channel_name':channelName, 'anwser':JSON.stringify(answer)}))
break;
case "answer":
peerConnections[channelName].setAnswer(data.answer);
break;
}
}
}
class peerConnection
{
constructor ()
{
let peerConnectionConfig = {
iceServers:[
{urls:["stun:stun.l.google.com:19302"]}
]};
this.pc = new RTCPeerConnection(peerConnectionConfig);
this.pc.ondatachannel = e => {
this.dc = e.channel;
this.dc.onmessage = e => this.messageCallback(e.data);
}
this.pc.addEventListener("iceconnectionstatechange", (e) => ((pc) => {
if(pc.pc.iceConnectionState == "disconnected") {
delete peerConnections[Object.keys(peerConnections).find(key => peerConnections[key] === pc)];
}
})(this), false);
}
waitToCompleteIceGathering() {
return new Promise(resolve => {
this.pc.addEventListener('icegatheringstatechange', e => (e.target.iceGatheringState === 'complete') && resolve(this.pc.localDescription));
});
}
async createOffer()
{
this.dc = this.pc.createDataChannel("channel");
this.dc.onmessage = e => this.messageCallback(e.data);
this.pc.createOffer().then( o => this.pc.setLocalDescription(o) )
const offer = await this.waitToCompleteIceGathering();
return JSON.stringify(offer);
}
async sendAnswer (sdp)
{
this.pc.setRemoteDescription(JSON.parse(sdp));
this.pc.createAnswer().then(a => this.pc.setLocalDescription(a));
const answer = await this.waitToCompleteIceGathering();
return answer;
}
setAnswer (sdp)
{
this.pc.setRemoteDescription(JSON.parse(sdp));
}
messageCallback (data)
{
data = JSON.parse(data);
var para = document.createElement("p");
var node = document.createTextNode(data.message);
para.appendChild(node);
document.getElementById('mailbox').appendChild(para);
}
}
window.sendData = (value) =>
{
for (const connection in peerConnections)
{
try
{
peerConnections[connection].dc.send(JSON.stringify({"message":value}));
}
catch (DOMException)
{
// This when someone refreshes page, but 5 seconds to update didn't pass yet.
// Not really a problem.
}
}
}
let s = new socket();```
Hey I am using WEBRTC for screen share. But I am stuck at point where i need user entire screen, but browser is providing user with more options like application and browser tabs, so i want to check what option is user selecting from the popup produce by the browser if its not entire screen then i can generate a error message to user, Please see this popup image
const constraints = {
audio: false,
video: {
width: { max: 1920 },
height: { max: 1080 },
frameRate: { max: 10 }
}
}
const stream = await navigator.mediaDevices.getDisplayMedia(constraints);
After searching for a long time I found something that works for me. if it works for you that's great.
const isFirefox = typeof InstallTrigger !== 'undefined';
const isChrome = !!window.chrome && (!!window.chrome.webstore || !!window.chrome.runtime);
const videoTrack = mediaStream.getVideoTracks()[0];
if (isFirefox) {
if(videoTrack.label === "Primary Monitor"){
return true;
} else {
return false;
}
} else if (isChrome) {
const videoSetting = videoTrack.getSettings();
if (videoSetting && videoSetting.displaySurface !== "monitor") {
return false;
} else {
return true;
}
}
you can check like this
const videoTrack = stream.getVideoTracks()[0];
console.log(videoTrack.getSettings());
In console, you will see 'displaySurface' property.
Also this MDN document will help you :)
Firefox multiple screens maybe:
if (isFirefox) {
const videoTrack = stream.getVideoTracks()[0];
return (videoTrack.label === "Primary Monitor" || -1 !== videoTrack.label.indexOf('Screen'))
} # else ...
Source: https://github.com/anoek/webrtc-group-chat-example/blob/master/client.html
I'm trying to modify this Webrtc example to add the ability of changing camera (Cross-browser support).
Normal usage works perfectly, after changing camera, failed in renegotiation.
1) Get a list of devices via navigator.mediaDevices.enumerateDevices()
2) Change local_media_stream after getting new stream
local_media_stream.getTracks().forEach(function(track) {
track.stop();
});
local_media_stream = stream;
3) Trigger renegotiation function (Copied from line 132 of Source code)
function renegotiate(){
console.log("Creating RTC offer to ", peer_id);
peer_connection.createOffer(
function (local_description) {
console.log("Local offer description is: ", local_description);
peer_connection.setLocalDescription(local_description,
function() {
signaling_socket.emit('relaySessionDescription',
{'peer_id': peer_id, 'session_description': local_description});
console.log("Offer setLocalDescription succeeded");
},
function() { Alert("Offer setLocalDescription failed!"); }
);
},
function (error) {
console.log("Error sending offer: ", error);
});
};
I believe that my approaches are wrong, but I've tried many different ways found on google to edit the codes for renegotiation, however I'm not familiar to WebRTC and Socket.io, still can't make the thing works.
After changing the camera, the video shown on other participant just became a static image from video last frame.
Can anybody please help to point my mistake? Thanks in advance.
Previously I done it in the following way (an order is important).
Let's say you list all our available devices:
var devicesIds = [];
navigator.mediaDevices.enumerateDevices().then(function(devices) {
devices.forEach(function(device) {
devicesIds.push(device.deviceId);
});
});
And now you want to switch:
1) Stop current tracks
localStream.getTracks().forEach(function(track) {
track.stop();
});
2) Obtain new stream
var constraints = {video: {deviceId: devicesIds[1]}, audio: true};
navigator.mediaDevices.getUserMedia(constraints).then(function(stream) {
replaceTracks(stream);
}).catch(function(error) {
});
3) Replace tracks:
function replaceTracks(newStream){
detachMediaStream(elementId);
newStream.getTracks().forEach(function(track) {
localStream.addTrack(track);
});
attachMediaStream(elementId, newStream);
// optionally, if you have active peer connections:
_replaceTracksForPeer(peerConnection);
function _replaceTracksForPeer(peer) {
peer.getSenders().map(function(sender) {
sender.replaceTrack(newStream.getTracks().find(function(track) {
return track.kind === sender.track.kind;
}));
});
}
}
function detachMediaStream = function(id) {
var elem = document.getElementById(id);
if (elem) {
elem.pause();
if (typeof elem.srcObject === 'object') {
elem.srcObject = null;
} else {
elem.src = '';
}
}
};
function attachMediaStream = function(id, stream) {
var elem = document.getElementById(id);
if (elem) {
if (typeof elem.srcObject === 'object') {
elem.srcObject = stream;
} else {
elem.src = window.URL.createObjectURL(stream);
}
elem.onloadedmetadata = function(e) {
elem.play();
};
} else {
throw new Error('Unable to attach media stream');
}
};