I am obtaining 1000ms audio chunks from client using getUserMedia() and converting it into audio blob of 1 second and sending them to the server(and to rest of the clients) using socket-io till the user stops recording.
On the server I re-emit the obtained blob to other clients where the blob's URL is obtained using createObjectURL() and then this URL is set as src for <audio> tag on the frontend. Every 1000ms, the clients get a new blob and its URL is obtained and then fed to the <audio> tag.
However this transition from previous url to new url has some visible delay, which causes audio to be patchy and less smooth and consistent.
On the client side this is how I record the audio blobs,
var chunks = [];
var mediaRecorder = new MediaRecorder(stream, { bitsPerSecond: 32000 });
mediaRecorder.ondataavailable = function (e) {
chunks.push(e.data);
}
mediaRecorder.start();
setInterval(() => {
mediaRecorder.stop();
mediaRecorder.start();
}, 1000);
mediaRecorder.onstop = function (e) {
var blob = new Blob(chunks, { 'type': 'audio/ogg; codecs=opus' });
chunks = [];
socket.emit('audio-blob', blob);
}
On the server I just re-emit the blob to clients,
socket.to(String(socket.room)).broadcast.emit('audio-blob', blob);
and on the receiving clients, the blob is played like this,
socket.on('audio-blob', blob => {
var newblob = new File([blob], "filename")
var audioURL = window.URL.createObjectURL(newblob);
window.audio = new Audio();
window.audio.src = audioURL;
window.audio.play();
})
How do I make the audio consistent and reduce to noticeable lag during the transition
Related
I have video files hosted on the CDN, the video file is encrypted. So I need the decrypt it before play it in the browser. But the web video tag has no interface to modify the media stream.
So I want to run a proxy in the client side with javascript to proxy the media stream request, and decrypt the stream before feet to the video tag.
Is it possible?
By math-chen's answer, I have tryed below code, but when I paly it, the video keep spin and not render the frame like below image.
I use a very small unencrypted video file out.mp4, so it can be loaded by once.
<html>
<video id="video" controls src="out.mp4">
</video>
<script>
const video = document.querySelector('#video');
const mediaSource = new MediaSource();
video.src = URL.createObjectURL(mediaSource);
mediaSource.addEventListener('sourceopen', sourceOpen);
function sourceOpen() {
var mime = 'video/mp4; codecs="avc1.42E01E, mp4a.40.2"'
var sourceBuffer = mediaSource.addSourceBuffer(mime);
fetchBuffer('out.mp4', buffer => {
sourceBuffer.appendBuffer(buffer)
})
}
function fetchBuffer (url, callback) {
var xhr = new XMLHttpRequest;
xhr.open('get', url);
xhr.responseType = 'arraybuffer';
xhr.onload = function () {
callback(xhr.response);
};
xhr.send();
}
</script>
</html>
it does not need a proxy
const video = document.querySelector('#video');
const mediaSource = new MediaSource();
video.src = URL.createObjectURL(mediaSource);
mediaSource.addEventListener('sourceopen', sourceOpen);
//you can implement logic in function sourceOpen
function sourceOpen() {
//mime is type of video
const sourceBuffer = mediaSource.addSourceBuffer(mime);
fetch(videoUrl).then(function(response) {
//decrypt
return response.arrayBuffer();
}).then(buffer => {
sourceBuffer.appendBuffer(arrayBuffer);
});
}
Im trying to send audio from a client (javascript) to a server (java). I take the user audio from the microphone and then make a blob from it (and a url for the blob). The project is a spring boot project so i am looking for a way to send it as a parameter in a method to upload it to the server.
Was hoping that it would be possible to upload the blob to the server, but it seems to only be avalible localy on the browser and since the url for the blob starts with "blob:" before "http" it causes problems.
I have also looked at serialization but dont seem to find a way to do that with a blob in js.
Just passing the blob url here between the client and the server
Client side in js
// Convert the audio data in to blob
// after stopping the recording
mediaRecorder.onstop = function (ev) {
console.log(dataArray);
// blob of type mp3
let audioData = new Blob(dataArray,
{ 'type': 'audio/mp3;' });
// After fill up the chunk
// array make it empty
dataArray = [];
// Creating audio url with reference
// of created blob named 'audioData'
let audioSrc = window.URL
.createObjectURL(audioData);
//console.log(audioSrc);
// Pass the audio url to the 2nd video tag
playAudio.src = audioSrc;
const url = "http://localhost:8080/speech?url=" + audioSrc;
console.log(url);
$.get(url, function(data) {
$("#resultat").html("transcribed tekst: " + data);
});
}
Server in Java
#GetMapping("/speech")
public String speechToText(String url) throws IOException {
try (SpeechClient speechClient = SpeechClient.create()) {
// The path to the audio file to transcribe
String gcsUri = url;
// Builds the sync recognize request
RecognitionConfig config =
RecognitionConfig.newBuilder()
.setEncoding(RecognitionConfig.AudioEncoding.LINEAR16)
.setSampleRateHertz(16000)
.setLanguageCode("en-US")
.build();
RecognitionAudio audio = RecognitionAudio.newBuilder().setUri(gcsUri).build();
// Performs speech recognition on the audio file
RecognizeResponse response = speechClient.recognize(config, audio);
List<SpeechRecognitionResult> results = response.getResultsList();
for (SpeechRecognitionResult result : results) {
// There can be several alternative transcripts for a given chunk of speech. Just use the
// first (most likely) one here.
SpeechRecognitionAlternative alternative = result.getAlternativesList().get(0);
System.out.printf("Transcription: %s%n", alternative.getTranscript());
return alternative.getTranscript();
}
return "idk";
} catch (IOException e) {
e.printStackTrace();
return "noe ble feil";
}
}
so, what am trying to do is to play audio file while am downloading it , the problem that am facing is the audio player play the audio but only after it finish the download, here is my code:
audio tag
<audio controls preload="all" muted="muted" > </audio>
this is my JS
var audio = document.querySelector('audio');
var assetURL = 'url/audios/file';
var token = 'Bearer token'
var mimeCodec = 'audio/wav';
var mediaSource = new MediaSource;
audio.src = URL.createObjectURL(mediaSource);
mediaSource.addEventListener('sourceopen', sourceOpen);
function sourceOpen(_) {
const playPromise = audio.play();
console.log(this.readyState);
var mediaSource = this;
var sourceBuffer = mediaSource.addSourceBuffer(mimeCodec);
fetchAB(assetURL, function (buf) {
sourceBuffer.addEventListener('updateend', function (_) {
mediaSource.endOfStream();
audio.play();
console.log(mediaSource.readyState); // ended
});
sourceBuffer.appendBuffer(buf);
});
};
function fetchAB(url, cb) {
console.log(url);
var xhr = new XMLHttpRequest;
xhr.open('get', url);
xhr.setRequestHeader('Authorization', token);
xhr.responseType = 'arraybuffer';
xhr.onload = function () {
cb(xhr.response);
};
xhr.send();
};
am not really sure how to do it, any help will be appreciate it
You cannot partially play an audiofile using the Web Audio API. You should create an <audio> element and set its src-attribute to load the file, that approach will let you stream a file.
const audio = document.createElement('audio');
audio.src = 'file.wav';
audio.play();
However, your example shows you're setting headers when loading the file, that is not going to work with the above approach so you should get rid of that (although seems to be a way to circumvent it: Send custom HTTP request header with HTML5 audio tag)
I think you can simplify you code to this:
var audio = new Audio('file.wav');
var promise = audio.play();
// Older browsers may not return promise
if (promise)
promise.catch(function() {
// Couldn't play audio for some reason
});
The built-in Audio class supports buffering and playback will start as soon as possible, without waiting for whole file to be downloaded.
My code:
// Create an AudioContext instance for this sound
var audioContext = new (window.AudioContext || window.webkitAudioContext)();
var maxChannelCount = audioContext.destination.maxChannelCount;
var gainNode = audioContext.createGain()
audioContext.destination.channelCount = maxChannelCount;
var merger = audioContext.createChannelMerger(maxChannelCount);
merger.connect(audioContext.destination);
gainNode.gain.value = 0.1 // 10 %
gainNode.connect(audioContext.destination)
// Create a buffer for the incoming sound content
var source = audioContext.createBufferSource();
// Create the XHR which will grab the audio contents
var request = new XMLHttpRequest();
// Set the audio file src here
request.open('GET', 'phonemes/bad-bouyed/bad.mp3', true);
// Setting the responseType to arraybuffer sets up the audio decoding
request.responseType = 'arraybuffer';
request.onload = function() {
// Decode the audio once the require is complete
audioContext.decodeAudioData(request.response, function(buffer) {
source.buffer = buffer;
// Connect the audio to source (Can I also set the gain within this declaration?)
source.connect(merger, 0,10);
// Simple setting for the buffer
source.loop = false;
// Play the sound!
source.start(0);
}, function(e) {
console.log('Audio error! ', e);
});
}
// Send the request which kicks off
request.send();
The above code loads an mp3 file in and plays it using Web Audio API, this works great although I am just wondering if it is possible to set the gain of the source AND also set the output channel using my already existing code. You can see above I have already created a gain node and connected the audio context. What is the syntax for specifying both? Do I declare them in the order I wish for them to be executed?
For example
source.connect(merger, 0,10);
source.connect(gainNode);
Any help on this would be great.
I'd like to dynamically generate a bitstream in JavaScript that is e.g. a large OGG-video.
Is it possible to tell the browser to ask a JavaScript function for the bitstream instead of making a HTTP-GET-Request to some location?
The only possible way to feed data to the video-tag, that I found, would contain data:-URLs. But that requires the whole video to be encoded in the document.
This is a bad solution for large videos, that would normally be streamed. AFAIK you can't add more data dynamically to data-URLs.
Does anyone know if this is possible somehow?
I don't know if is possible with Javascript, but you can probably do something like that with a Java or Javascript (?) player, like Cortado.
http://www.flumotion.net/cortado/
If your video is encoded for streaming, it will be downloaded progressively by whatever browser is requesting it. That's just how it works. You will need both OGG and MP4 for
FF/Chrome/IE9.
http://www.mediacollege.com/video/streaming/http.html
"encoded on the document" doesn't make any sense. The video is encoded by your encoder and can further have settings to optimize for streaming (that is it encodes the first XX seconds at a lower bitrate to get started faster).
This should now be possible with the MediaSource API.
Here is an example from the link above:
var video = document.querySelector('video');
var assetURL = 'frag_bunny.mp4';
// Need to be specific for Blink regarding codecs
// ./mp4info frag_bunny.mp4 | grep Codec
var mimeCodec = 'video/mp4; codecs="avc1.42E01E, mp4a.40.2"';
if ('MediaSource' in window && MediaSource.isTypeSupported(mimeCodec)) {
var mediaSource = new MediaSource();
//console.log(mediaSource.readyState); // closed
video.src = URL.createObjectURL(mediaSource);
mediaSource.addEventListener('sourceopen', sourceOpen);
} else {
console.error('Unsupported MIME type or codec: ', mimeCodec);
}
function sourceOpen (_) {
//console.log(this.readyState); // open
var mediaSource = this;
var sourceBuffer = mediaSource.addSourceBuffer(mimeCodec);
fetchAB(assetURL, function (buf) {
sourceBuffer.addEventListener('updateend', function (_) {
mediaSource.endOfStream();
video.play();
//console.log(mediaSource.readyState); // ended
});
sourceBuffer.appendBuffer(buf);
});
};
function fetchAB (url, cb) {
console.log(url);
var xhr = new XMLHttpRequest;
xhr.open('get', url);
xhr.responseType = 'arraybuffer';
xhr.onload = function () {
cb(xhr.response);
};
xhr.send();
};