Im making an audio transcripter script for my webpage and i want it to stop listening the user when it capture silence, i can capture the silence, but i cant delete or turn off the event 'stopped_speaking'. Is there a way to do it? if its not, how can i resolve my problem? thx.
here my code:
function bg_startRecording() {
navigator.mediaDevices.getUserMedia({
video: false,
audio: true
}).then(async function(stream) {
recorder = RecordRTC(stream, {
type: 'audio',
mimeType: 'audio/wav',
recorderType: StereoAudioRecorder,
disableLogs: false,
numberOfAudioChannels: 1,
});
recorder.startRecording();
var options = {};
speechEvents = hark(stream, options);
speechEvents.on('stopped_speaking', function() {
speechEvents.off('stopped_speaking'); <----- throw an error
bg_stopRecording() //function to stop recording and transcript
});
})
};
The error:
main.js:49 Uncaught TypeError: speechEvents.off is not a function
at Object.stopped_speaking (main.js:49)
at harker.emit (hark.js:16)
at hark.js:109
Related
I simply want to play audio coming in the microphone directly to the output, using the code below. But there is a lag, about 0.2 seconds. Is there a way to reduce this delay ?
navigator.getUserMedia = navigator.getUserMedia ||navigator.webkitGetUserMedia || navigator.mozGetUserMedia;
var aCtx;
var analyser;
var microphone;
if (navigator.getUserMedia) {
navigator.getUserMedia(
{audio: true},
function(stream) {
aCtx = new AudioContext();
microphone = aCtx.createMediaStreamSource(stream);
var destination=aCtx.destination;
microphone.connect(destination);
},
function(){ console.log("Error 003.")}
);
}
You can disable any pre-processing which should reduce the delay to a minimum.
Instead of this ...
navigator.getUserMedia({
audio: true
})
... you would then write this ...
navigator.mediaDevices.getUserMedia({
audio: {
autoGainControl: false,
echoCancellation: false,
noiseSuppression: false
}
})
... to disable all pre-processing.
Please note that I also used navigator.mediaDevices.getUserMedia instead of navigator.getUserMedia. Usage of the latter is deprecated.
I am currently trying to improve my JS skills by going through WesBos's 30 Days of JavaScript. (It is really fun if!) I'm currently on Day 19, which is about using JS to access the WebCam, and then add affects using CSS.
I was successfully able to set up a local server, and here is my code so far:
function getVideo() {
navigator.mediaDevices.getUserMedia({video: true, audio: false})
.then(localMediaStream => {
console.log(localMediaStream);
video.src = window.URL.createObjectURL(localMediaStream);
video.play();
})
.catch(err => {
console.error(`Web camera access is not enabled. To resolve, reload the page and allow
access.`, err);
});
}
getVideo();
However, I am getting this error:
TypeError: URL.createObjectURL: Argument 1 is not valid for any of the 1-argument overloads.
getVideo http://localhost/19-webcam-fun/scripts.js:12
promise callback*getVideo http://localhost/19-webcam-fun/scripts.js:10
<anonymous> http://localhost/19-webcam-fun/scripts.js:27
Idk if this helps, but the console.log(localMediaStream) results in the following:
MediaStream { id: "{97c3d27e-404e-4d14-b1d2-2a9ebbf09137}", active: true, onaddtrack: null,
onremovetrack: null }
active: true
id: "{97c3d27e-404e-4d14-b1d2-2a9ebbf09137}"
onaddtrack: null
onremovetrack: null
<prototype>: MediaStreamPrototype { getAudioTracks: getAudioTracks(), getVideoTracks:
getVideoTracks(), getTracks: getTracks(), … }
I would really appreciate it if someone could help me understand this a little better! Thanks!
Try:
video.srcObject = localMediaStream;
if you're on firefox
When sending a message using WebRTC sendDirectlyToAll, the message is never recieved the first time, but every time after that.
I've stripped the code down to a very simple state now, but it's still the same. Anyone got a clue about why this is happening?
Here is the code:
var webrtc = new SimpleWebRTC({
localVideoEl: 'localVideo',
remoteVideosEl: 'remoteVideos',
autoRequestMedia: false,
media: {
video: true,
audio: false
},
localVideo: {
autoplay: true,
mirror: true,
muted: true
}
});
$("#chat-send-button").on("click", function (e) {
sendMessage();
});
function sendMessage() {
console.log("sendMessage");
const chatMessage = $("#chat-message-input");
webrtc.sendDirectlyToAll(
"chat",
"info", {
"chatmessage": chatMessage.val()
}
)
chatMessage.val("");
}
webrtc.on("channelMessage", function (peer, channel, data) {
console.log(peer);
console.log(channel);
console.log("data", data);
$("#chat-message-container").text(data.payload.chatmessage);
});
You probably need for the WebRTC connection to be established before allowing the user to send a message - do you make use of the readyToCall event described in the documentation https://github.com/SimpleWebRTC/SimpleWebRTC#3-tell-it-to-join-a-room-when-ready
(a link to an editable runable code snippet might help)
I have downloaded this library into my project and put it into "lib" folder in my project.
Then I add it into the cotroller of my view, when I want to call it when clicking the button, as described in the documentation
sap.ui.define([
"sap/ui/core/mvc/Controller",
"Test_ScreenRecordingTest_ScreenRecording/lib/RecordRTC"
], function(Controller, RecordRTC) {
"use strict";
return Controller.extend("Test_ScreenRecordingTest_ScreenRecording.controller.View1", {
onStartRecording: function(){
debugger;
var mediaConstraints = { video: true, audio: true };
navigator.mediaDevices.getUserMedia(mediaConstraints).then(this.successCallback.bind(this)).catch(this.errorCallback);
},
successCallback: function(stream) {
// RecordRTC usage goes here
var options = {
mimeType: 'video/webm', // or video/webm\;codecs=h264 or video/webm\;codecs=vp9
audioBitsPerSecond: 128000,
videoBitsPerSecond: 128000,
bitsPerSecond: 128000 // if this line is provided, skip above two
};
//jQuery.sap.require("Test_ScreenRecordingTest_ScreenRecording.lib.RecordRTC");
this.recordRTC = RecordRTC(stream, options);
this.recordRTC.startRecording();
},
errorCallback: function(error) {
console.log(error)
debugger;
},
onStopRecording: function(){
this.recordRTC.stopRecording(function (audioVideoWebMURL) {
video.src = audioVideoWebMURL;
var recordedBlob = this.recordRTC.getBlob();
debugger;
this.recordRTC.getDataURL(function(dataURL) {
debugger;
});
});
}
});
If I don't use the RecordRTC variable, I can see it in the debugger. If I use it, it appears as "undefined". So can never call it.
Could you please help??Ç
EDIT 09-feb-2018: Solved declaring a new variable in the Controller extension
return Controller.extend("Test_ScreenRecordingTest_ScreenRecording.controller.View1", {
//this line solved the issue
RecordRTC: RecordRTC,
onStartRecording: function(){
debugger;
var mediaConstraints = { video: true, audio: true };
navigator.mediaDevices.getUserMedia(mediaConstraints).then(this.successCallback.bind(this)).catch(this.errorCallback);
},
Thank you in advance
The dependency string in your code looks strange:
"Test_ScreenRecordingTest_ScreenRecording/lib/RecordRTC".
Can it be a typo?
Anyway, the dependency path should be like this: "<app ID from manifest.json>/lib/RecordRTC".
I am trying to set the getusermedia video constraints like setting min/max frame-rates and resolutions etc... in my peer.js webrtc application which is a simple peer to peer chat application. I have being trying to integrate it into my application but it seems to break it.Any help would be greatly appreciated other online tutorials look different to my app set up. Down at function 1 is where I have been trying to set the constraints it just doesn't show the video anymore. Is this the correct place?
Also will these constraints work on a video-file playing instead of the webcam?. I am using the Google chrome flags that plays a video file instead of a camera.
navigator.getWebcam = (navigator.getUserMedia ||
navigator.webkitGetUserMedia ||
navigator.mozGetUserMedia ||
navigator.msGetUserMedia);
// PeerJS object ** FOR PRODUCTION, GET YOUR OWN KEY at http://peerjs.com/peerserver **
var peer = new Peer({
key: 'XXXXXXXXXXXXXXXX',
debug: 3,
config: {
'iceServers': [{
url: 'stun:stun.l.google.com:19302'
}, {
url: 'stun:stun1.l.google.com:19302'
}, {
url: 'turn:numb.viagenie.ca',
username: "XXXXXXXXXXXXXXXXXXXXXXXXX",
credential: "XXXXXXXXXXXXXXXXX"
}]
}
});
// On open, set the peer id so when peer is on we display our peer id as text
peer.on('open', function(){
$('#my-id').text(peer.id);
});
peer.on('call', function(call) {
// Answer automatically for demo
call.answer(window.localStream);
step3(call);
});
// Click handlers setup
$(function() {
$('#make-call').click(function() {
//Initiate a call!
var call = peer.call($('#callto-id').val(), window.localStream);
step3(call);
});
$('end-call').click(function() {
window.existingCall.close();
step2();
});
// Retry if getUserMedia fails
$('#step1-retry').click(function() {
$('#step1-error').hide();
step();
});
// Get things started
step1();
});
function step1() {
//Get audio/video stream
navigator.getWebcam({audio: true, video: true}, function(stream){
// Display the video stream in the video object
$('#my-video').prop('src', URL.createObjectURL(stream));
// Displays error
window.localStream = stream;
step2();
}, function(){ $('#step1-error').show(); });
}
function step2() { //Adjust the UI
$('#step1', '#step3').hide();
$('#step2').show();
}
function step3(call) {
// Hang up on an existing call if present
if (window.existingCall) {
window.existingCall.close();
}
// Wait for stream on the call, then setup peer video
call.on('stream', function(stream) {
$('#their-video').prop('src', URL.createObjectURL(stream));
});
$('#step1', '#step2').hide();
$('#step3').show();
}
Your JavaScript looks invalid. You can't declare a var inside a function argument list. Did you paste wrong? Try:
var constraints = {
audio: false,
video: { mandatory: { minWidth: 1280, minHeight: 720 } }
};
navigator.getWebcam(constraints, function(stream){ etc. }
Now it's valid JavaScript at least. I'm not familiar with PeerJS, but the constraints you're using look like the Chrome ones, so if you're on Chrome then hopefully they'll work, unless PeerJS does it differently for some reason.
Your subject says "WebRTC Camera constraints" so I should mention that the Chrome constraints are non-standard. See this answer for an explanation.