How to mute microphone (but not incoming sound)? - javascript

When using
const audioIncoming = document.createElement('audio');
For calls over the browser.
What is the proper way of muting the microphone but not the incoming audio sound from the network? Or is this even possible?
Here's a complete code example:
const audioProgress = document.createElement('audio');
const audioIncoming = document.createElement('audio');
/*** Define listener for managing calls ***/
var callListeners = {
onCallProgressing: function(call) {
audioProgress.src = 'style/ringback.wav';
audioProgress.loop = true;
audioProgress.play();
$('div#callLog').append('<div id="stats">Ringing...</div>');
},
onCallEstablished: function(call) {
audioIncoming.srcObject = call.incomingStream;
audioIncoming.play();
audioProgress.pause();
//Report call stats
var callDetails = call.getDetails();
$('div#callLog').append('<div id="stats">Answered at: '+(callDetails.establishedTime && new Date(callDetails.establishedTime))+'</div>');
},
onCallEnded: function(call) {
audioProgress.pause();
audioIncoming.srcObject = null;
$('button').removeClass('incall');
$('input#phoneNumber').removeAttr('disabled');
//Report call stats
var callDetails = call.getDetails();
$('div#callLog').append('<div id="stats">Ended: '+new Date(callDetails.endedTime)+'</div>');
$('div#callLog').append('<div id="stats">Duration (s): '+callDetails.duration+'</div>');
$('div#callLog').append('<div id="stats">End cause: '+call.getEndCause()+'</div>');
if(call.error) {
$('div#callLog').append('<div id="stats">Failure message: '+call.error.message+'</div>');
}
}
}

Related

Microphone on/off button not working after using screen sharing

I am building a video conferencing web application using WebRTC and I have implemented features for toggling the camera, microphone, and screen sharing. The camera and screen sharing features are working as expected, but I am having an issue with the microphone button.
The issue is that after using screen sharing and then stopping it, the microphone on/off button is not working properly. I am getting an error in the console saying
"Cannot read properties of undefined (reading 'enabled')".
Before using screen sharing, the microphone button works fine.
Here's my current code for handling the buttons:
let screenStream = null;
let localStream = null;
let audioTrack = null;
let pc = null;
// Toggle screen sharing on/off
document.getElementById("share-screen-btn").addEventListener("click", async () => {
try {
const localVideo = document.getElementById("localVideo");
const displayMediaOptions = {
video: true,
audio: true,
};
if (!screenStream) {
screenStream = await navigator.mediaDevices.getDisplayMedia(displayMediaOptions);
const videoTracks = screenStream.getVideoTracks();
await pc.getSenders().find(sender => sender.track.kind === 'video').replaceTrack(videoTracks[0], videoTracks[0].clone());
localVideo.srcObject = screenStream;
document.getElementById("share-screen-btn").classList.remove("btn-danger");
document.getElementById("share-screen-btn").classList.add("btn-primary");
// Disable audio track from localStream
if (localStream) {
audioTrack = localStream.getAudioTracks()[0];
audioTrack.enabled = false;
}
} else {
const localVideoStream = await navigator.mediaDevices.getUserMedia({ video: true, audio: true });
const sender = pc.getSenders().find(sender => sender.track.kind === 'video');
const localVideoTrack = localVideoStream.getVideoTracks()[0];
const localAudioTrack = localVideoStream.getAudioTracks()[0];
const localStream = new MediaStream([localVideoTrack, localAudioTrack]);
await sender.replaceTrack(localVideoTrack);
localVideo.srcObject = localStream;
document.getElementById("share-screen-btn").classList.remove("btn-primary");
document.getElementById("share-screen-btn").classList.add("btn-danger");
screenStream.getTracks().forEach(track => track.stop());
screenStream = null;
// Set audioTrack from localAudioTrack
audioTrack = localAudioTrack;
}
} catch (e) {
console.error("Error sharing screen: ", e);
}
})
// Toggle microphone on/off
document.getElementById("mute-audio-btn").addEventListener("click", () => {
let localStream = document.getElementById("localVideo").srcObject;
if (localStream) {
let audioTrack = localStream.getAudioTracks()[0];
let enabled = audioTrack.enabled;
if (enabled) {
audioTrack.enabled = false;
document.getElementById("mute-audio-btn").innerHTML = '<i class="fa-solid fa-microphone-slash"></i>'
} else {
audioTrack.enabled = true;
document.getElementById("mute-audio-btn").innerHTML = '<i class="fa-solid fa-microphone"></i>'
}
}
})
// Toggle camera on/off
document.getElementById("mute-video-btn").addEventListener("click", () => {
let localStream = document.getElementById("localVideo").srcObject;
if (localStream) {
let videoTrack = localStream.getVideoTracks()[0];
let enabled = videoTrack.enabled;
if (enabled) {
videoTrack.enabled = false;
document.getElementById("mute-video-btn").innerHTML = '<i class="fa fa-video-slash"></i>';
} else {
videoTrack.enabled = true;
document.getElementById("mute-video-btn").innerHTML = '<i class="fa fa-video"></i>';
}
}
})
If I see it correctly then audioTrack or videoTrack is undefined at the time this happens.
Try and console.log() the arrays returned by localStream.getAudioTracks() or screenStream.getVideoTracks()
You may from there work your way up the chain.
And it seems to use audio while sharing the screen you are suppused to use addTrack
have a look here:
Is it possible broadcast audio with screensharing with WebRTC

Browser doesn't ask for mic permission

I want the browser to ask for permission to use the mic.
It askes for permission on PC but doesn't on mobile.
Can someone tell me what I did wrong.
navigator.mediaDevices.getUserMedia({audio: true})
.then(stream => {
var mediaRecorder = new MediaRecorder(stream)
mediaRecorder.start()
var mediaChunks = []
mediaRecorder.addEventListener("dataavailable",function (e) {
mediaChunks.push(e.data)
})
mediaRecorder.addEventListener("stop",function () {
var mediaBlob = new Blob(mediaChunks)
var url = URL.createObjectURL(mediaBlob)
var audio = document.createElement("AUDIO")
audio.src = url
audio.controls = true
//audio.classList.add("ay")
audio.setAttribute('id','ay')
big.appendChild(audio)
})
setTimeout(function() {
mediaRecorder.stop()
alert("stopped")
},3000)
})

How can I open multiple WebSocket streams

I am trying to stream data from the Binance WebSocket API, I have it working for one symbol at a time.
if ("WebSocket" in window) {
//open websocket
var symbols = getSymbol();
//console.log(symbols);
symbols.forEach(function(entry) {
console.log(entry);
})
var ws = new WebSocket("wss://stream.binance.com:9443/ws/btcusdt#miniTicker")
ws.onopen = function() {
console.log("Binance connected...");
};
ws.onmessage = function(evt) {
var r_msg = evt.data;
var jr_msg = JSON.parse(r_msg);
}
ws.onclose = function() {
console.log("Binance disconnected");
}
} else {
alert("WebSocket is NOT supported");
}
the line var symbols = getSymbol(); creates an array of 431 symbols, my logic (and what I am trying to achieve) is to add the new websocket() to the forEach and stream price data from all of the currency pairs.
I'm not sure if this is possible at all or what a better solution would be but I wish to stream and display live data from the api.
Your idea about putting the new WebSocket() inside the for-each should work. However,
I'm not sure if you are allowed to opening hundreds of web sockets from the same tab, and there could also be some performance issues related to it.
According to the API documentation, it is possible to open just one web socket which will send you data from a list of streams, or even just all streams. Just construct the URLs like this:
Specific streams: wss://stream.binance.com:9443/ws/stream1/stream2/stream3
All streams: wss://stream.binance.com:9443/ws/!miniTicker#arr
Here is a code sample that takes these things into consideration. By default this code uses the URL for all streams, but it also has the code (commented out) that uses specific streams.
let streams = [
"ethbtc#miniTicker","bnbbtc#miniTicker","wavesbtc#miniTicker","bchabcbtc#miniTicker",
"bchsvbtc#miniTicker","xrpbtc#miniTicker","tusdbtc#miniTicker","eosbtc#miniTicker",
"trxbtc#miniTicker","ltcbtc#miniTicker","xlmbtc#miniTicker","bcptbtc#miniTicker",
"adabtc#miniTicker","zilbtc#miniTicker","xmrbtc#miniTicker","stratbtc#miniTicker",
"zecbtc#miniTicker","qkcbtc#miniTicker","neobtc#miniTicker","dashbtc#miniTicker","zrxbtc#miniTicker"
];
let trackedStreams = [];
//let ws = new WebSocket("wss://stream.binance.com:9443/ws/" + streams.join('/'));
let ws = new WebSocket("wss://stream.binance.com:9443/ws/!miniTicker#arr");
ws.onopen = function() {
console.log("Binance connected...");
};
ws.onmessage = function(evt) {
try {
let msgs = JSON.parse(evt.data);
if (Array.isArray(msgs)) {
for (let msg of msgs) {
handleMessage(msg);
}
} else {
handleMessage(msgs)
}
} catch (e) {
console.log('Unknown message: ' + evt.data, e);
}
}
ws.onclose = function() {
console.log("Binance disconnected");
}
function handleMessage(msg) {
const stream = msg.s;
if (trackedStreams.indexOf(stream) === -1) {
document.getElementById('streams').innerHTML += '<br/>' + stream + ': <span id="stream_' + stream + '"></span>';
trackedStreams.push(stream);
document.getElementById('totalstreams').innerText = trackedStreams.length;
}
document.getElementById('stream_' + stream).innerText = msg.v;
}
<span id="totalstreams"></span> streams tracked<br/>
Total traded base asset volume:<br/>
<div id="streams"></div>

peer.js webrtc >> changing stream in runtime

I am developing a cross-plattform application with peer.js and webrtc.
I am using cordova, crosswalk.
Additionaly I am using the webrtc adapter (https://github.com/webrtc/adapter)
My code is based on the webrtc-crosswalk sample. (https://github.com/crosswalk-project/crosswalk-samples)
I want to change the videosource of the stream without creating a new call.
My approche is to remove the tracks of the stream and add the new tracks of the other camera.
The result is that the local video shows the right content, but the callee's remote video freezes.
Probably I am doing a very basic mistake, but i can't find a solution.
I am looking forward to your answers and solutions.
My main codefile is attached.
//Notwendig, um die Dialogfunktion zu aktivieren
document.addEventListener("deviceready", onDeviceReady, false);
function onDeviceReady() {
console.log(navigator.notification);
// Now safe to use device APIs
}
document.addEventListener('DOMContentLoaded', function () {
// PeerJS server location
var SERVER_IP = '172.20.37.147';
var SERVER_PORT = 9000;
// DOM elements manipulated as user interacts with the app
var messageBox = document.querySelector('#messages');
var callerIdEntry = document.querySelector('#caller-id');
var connectBtn = document.querySelector('#connect');
var recipientIdEntry = document.querySelector('#recipient-id');
var dialBtn = document.querySelector('#dial');
var remoteVideo = document.querySelector('#remote-video');
var localVideo = document.querySelector('#local-video');
var cameraTurn = document.querySelector('#camera_turn');
var stop = document.querySelector('#stop');
// the default facing direction
var dir = "environment";
// the ID set for this client
var callerId = null;
// PeerJS object, instantiated when this client connects with its
// caller ID
var peer = null;
// the local video stream captured with getUserMedia()
var localStream = null;
// DOM utilities
var makePara = function (text) {
var p = document.createElement('p');
p.innerText = text;
return p;
};
var addMessage = function (para) {
if (messageBox.firstChild) {
messageBox.insertBefore(para, messageBox.firstChild);
}
else {
messageBox.appendChild(para);
}
};
var logError = function (text) {
var p = makePara('ERROR: ' + text);
p.style.color = 'red';
addMessage(p);
};
var logMessage = function (text) {
addMessage(makePara(text));
};
// get the local video and audio stream and show preview in the
// "LOCAL" video element
// successCb: has the signature successCb(stream); receives
// the local video stream as an argument
var getLocalStream = function (successCb, ask = true) {
if (localStream && successCb) {
successCb(localStream);
}
else {
navigator.mediaDevices.getUserMedia({ audio: true, video: { facingMode: dir } })
.then(function (stream) {
if (localStream == null) {
/* use the stream */
localStream = stream;
}
else {
stream.getTracks().forEach(function (track) {
localStream.addTrack(track);
});
}
localVideo.src = window.URL.createObjectURL(localStream);
if (successCb) {
successCb(stream);
}
})
.catch(function (err) {
/* handle the error */
logError('failed to access local camera');
logError(err.message);
});
}
};
// set the "REMOTE" video element source
var showRemoteStream = function (stream) {
remoteVideo.src = window.URL.createObjectURL(stream);
};
// set caller ID and connect to the PeerJS server
var connect = function () {
callerId = callerIdEntry.value;
if (!callerId) {
logError('please set caller ID first');
return;
}
try {
// create connection to the ID server
peer = new Peer(callerId, { host: SERVER_IP, port: SERVER_PORT });
// hack to get around the fact that if a server connection cannot
// be established, the peer and its socket property both still have
// open === true; instead, listen to the wrapped WebSocket
// and show an error if its readyState becomes CLOSED
peer.socket._socket.onclose = function () {
logError('no connection to server');
peer = null;
};
// get local stream ready for incoming calls once the wrapped
// WebSocket is open
peer.socket._socket.onopen = function () {
getLocalStream();
};
// handle events representing incoming calls
peer.on('call', answer);
}
catch (e) {
peer = null;
logError('error while connecting to server');
}
};
// make an outgoing call
var dial = function () {
if (!peer) {
logError('please connect first');
return;
}
if (!localStream) {
logError('could not start call as there is no local camera');
return
}
var recipientId = recipientIdEntry.value;
if (!recipientId) {
logError('could not start call as no recipient ID is set');
return;
}
getLocalStream(function (stream) {
logMessage('outgoing call initiated');
var call = peer.call(recipientId, stream);
call.on('stream', showRemoteStream);
call.on('error', function (e) {
logError('error with call');
logError(e.message);
});
});
};
// answer an incoming call
var answer = function (call) {
if (!peer) {
logError('cannot answer a call without a connection');
return;
}
if (!localStream) {
logError('could not answer call as there is no localStream ready');
return;
}
//Asks user to answer the call
navigator.notification.confirm(
"Receive a call?",
function (buttonIndex) {
if (buttonIndex === 1) {
//user clicked "yes"
logMessage('incoming call answered');
call.on('stream', showRemoteStream);
call.answer(localStream);
}
else {
//user clicked "no"
logMessage('incoming call denied');
}
}
,
'Incoming Call',
['Yes', 'No']
);
};
function turnDirection() {
if (dir === "user")
return "environment";
else
return "user";
}
var turnCamera = function (call) {
dir = turnDirection();
localStream.getTracks().forEach(function (track) {
track.stop();
localStream.removeTrack(track);
});
getLocalStream(false);
};
var stopCall = function (call) { };
// wire up button events
connectBtn.addEventListener('click', connect);
dialBtn.addEventListener('click', dial);
cameraTurn.addEventListener('click', turnCamera);
stop.addEventListener('click', stopCall);
});
If you remove and then add a new track to a PeerConnection you need to renegotiate the offer-answer to get it working. I will recommend you to use the replaceTrack API to avoid the re-negotiation problem while changing the camera input.

Streaming microphone through sockets using socket IO and Node JS

I am making an application where I want the user to use their mic (on their phone) and be able to talk to each other in the game lobby. However, this has proven to be more than difficult.
I am using Node JS socket io and socket io stream
on my client I am using the audio api to take my microphones input ( I am not really worried about this all that much because I am going to make this a Native IOS app)
navigator.getUserMedia = ( navigator.getUserMedia ||
navigator.webkitGetUserMedia ||
navigator.mozGetUserMedia ||
navigator.msGetUserMedia);
if (navigator.getUserMedia) {
navigator.getUserMedia (
// constraints
{
video: false,
audio: true
},
function(localMediaStream) {
var video = document.querySelector('audio');
video.src = window.URL.createObjectURL(localMediaStream);
lcm = localMediaStream;
var audioContext = window.AudioContext;
var context = new audioContext();
var audioInput = context.createMediaStreamSource(localMediaStream);
var bufferSize = 2048;
// create a javascript node
var recorder = context.createScriptProcessor(bufferSize, 1, 1);
// specify the processing function
recorder.onaudioprocess = recorderProcess;
// connect stream to our recorder
audioInput.connect(recorder);
// connect our recorder to the previous destination
recorder.connect(context.destination);
},
// errorCallback
function(err) {
console.log("The following error occured: " + err);
$("video").remove();
alert("##");
}
);
} else {
console.log("getUserMedia not supported");
}
function recorderProcess(e) {
var left = e.inputBuffer.getChannelData(0);
window.stream.write(convertFloat32ToInt16(left));
//var f = $("#aud").attr("src");
var src = window.URL.createObjectURL(lcm);
ss(socket).emit('file', src, {size: src.size});
ss.createBlobReadStream(src).pipe(window.stream);
//ss.createReadStream(f).pipe(widnow.stream);
}
function convertFloat32ToInt16(buffer)
{
l = buffer.length;
buf = new Int16Array(l);
while (l--) {
buf[l] = Math.min(1, buffer[l])*0x7FFF;
}
return buf.buffer;
}
});
ss(socket).on('back', function(stream, data) {
//console.log(stream);
var video = document.querySelector('audio');
video.src = window.URL.createObjectURL(stream);
console.log("getting mic data");
});
i which I can successfully listen to my self speak on the microphone. I am using the stream socket to create a blob to upload to my server...
index.ss(socket).on('file', function(stream, data) {
console.log("getting stream");
var filename = index.path.basename(data.name);
//var myfs = index.fs.createWriteStream(filename);
var fileWriter = new index.wav.FileWriter('demo.wav', {
channels: 1,
sampleRate: 48000,
bitDepth: 16
});
var streams = index.ss.createStream();
streams.pipe(fileWriter);
index.ss(socket).emit('back', fileWriter, {size: fileWriter.size});
});
I cannot get the stream to write to a file or even a temporary buffer, and Then stream back to a client so I can then play or "stream" the audio real time. After a while the server crashes with saying that the pipe is not writable.
Has anyone else encountered this?
By using SFMediaStream library you can socket.io and Nodejs server for live streaming your microphone from a browser. But this library still need some improvement before release to the production.
For the presenter
var mySocket = io("/", {transports:['websocket']});
// Set latency to 100ms (Equal with streamer)
var presenterMedia = new ScarletsMediaPresenter({
audio:{
channelCount:1,
echoCancellation: false
}
}, 100);
// Every new client streamer must receive this header buffer data
presenterMedia.onRecordingReady = function(packet){
mySocket.emit('bufferHeader', packet);
}
// Send buffer to the server
presenterMedia.onBufferProcess = function(streamData){
mySocket.emit('stream', streamData);
}
presenterMedia.startRecording();
For the streamer
var mySocket = io("/", {transports:['websocket']});
// Set latency to 100ms (Equal with presenter)
var audioStreamer = new ScarletsAudioBufferStreamer(100);
audioStreamer.playStream();
// Buffer header must be received first
mySocket.on('bufferHeader', function(packet){
audioStreamer.setBufferHeader(packet);
});
// Receive buffer and play it
mySocket.on('stream', function(packet){
// audioStreamer.realtimeBufferPlay(packet);
audioStreamer.receiveBuffer(packet);
});
// Request buffer header
mySocket.emit('requestBufferHeader', '');
Or you can test it from your localhost with this example

Categories