convert audio buffer to play as audio element - javascript

I'm using the souncloud api to stream audio from soundcloud. However, I'm trying to get the tracks BPM programmatically so I have the following
Soundcloud.prototype.play = function (options) {
options = options || {};
var src;
if (options.streamUrl) {
src = options.streamUrl;
} else if (this._playlist) {
var length = this._playlist.tracks.length;
if (length) {
if (options.playlistIndex === undefined) {
this._playlistIndex = this._playlistIndex || 0;
} else {
this._playlistIndex = options.playlistIndex;
}
// be silent if index is out of range
if (this._playlistIndex >= length || this._playlistIndex < 0) {
this._playlistIndex = 0;
return;
}
src = this._playlist.tracks[this._playlistIndex].stream_url;
}
} else if (this._track) {
src = this._track.stream_url;
}
if (!src) {
throw new Error('There is no tracks to play, use `streamUrl` option or `load` method');
}
if (this._clientId) {
src = _appendQueryParam(src, 'client_id', this._clientId);
}
if (src !== this.audio.src) {
pulse.loadBufferFromURI(src, (event, pulse) => {
this.trackBPM = pulse.beat.bpm
this.audio.src = window.URL.createObjectURL(pulse.renderedBuffer);
this.playing = src;
console.log('trackBPM',pulse.beat.bpm);
})
} else return this.audio.play();
};
Now
pulse.loadBufferFromURI(src, (event, pulse) => {
this.trackBPM = pulse.beat.bpm
this.audio.src = window.URL.createObjectURL(pulse.renderedBuffer);
this.playing = src;
console.log('trackBPM',pulse.beat.bpm);
})
attempts to load the stream_url as aduiobuffer to get the bpm. The question is can I covert the buffer into a source compatible enough to play using <audio> element, reason being I want to take advantage of the event listeners...

Related

Recording voice and convert speech to text at the same time

I want to use the Web Speech API for speech recognition and record the user's voice in Android Devices at the same time (I mean user holds a button, his/her voice is recorded and transcript to text at the same time .
This is working perfectly in Windows but with Android it just returns the error :
no-speech
Seems like defining the MediaRecorder blocks access of microphone for Web Speech API in Android!
How can I fix this?
If I remove this line which is responsible for recording, speech recognition works again:
new MediaRecorder(stream); // adding this line ruins the speech recognition
Here is the code in action:
In the given code I didn't remove this, in order to show that the code won't work on Android devices:
Note: this code should be tested with an Android device, it is working fine in desktop.
CodePen: https://codepen.io/pixy-dixy/pen/GRddgYL?editors=1010
Demo here in SO:
let audioChunks = [];
let rec;
let stopRecognize;
const output = document.getElementById('output');
async function Recognize() {
console.log('Recognize')
let recognitionAllowed = true;
stopRecognize = function() {
if(recognitionAllowed) {
recognition.stop();
recognitionAllowed = false;
}
}
var SpeechRecognition = SpeechRecognition || webkitSpeechRecognition;
var SpeechGrammarList = SpeechGrammarList || webkitSpeechGrammarList;
var SpeechRecognitionEvent = SpeechRecognitionEvent || webkitSpeechRecognitionEvent;
var recognition = new SpeechRecognition();
var speechRecognitionList = new SpeechGrammarList();
recognition.grammars = speechRecognitionList;
recognition.lang = 'en-GB';
recognition.continuous = false;
recognition.interimResults = true;
recognition.maxAlternatives = 1;
recognition.start();
recognition.onresult = function(event) {
window.interim_transcript = '';
window.speechResult = '';
for(var i = event.resultIndex; i < event.results.length; ++i) {
if(event.results[i].isFinal) {
speechResult += event.results[i][0].transcript;
console.log(speechResult);
output.innerHTML = speechResult;
} else {
interim_transcript += event.results[i][0].transcript;
console.log(interim_transcript);
output.innerHTML = interim_transcript;
}
}
}
recognition.onerror = function(event) {
// restartRecognition();
console.log('recognition error: ' + event.error);
}
recognition.onend = async function(event) {
restartRecognition();
}
function restartRecognition() {
try { if(recognitionAllowed) recognition.start(); } catch(err) {}
}
}
const startRecognition = document.getElementById('start-recognition');
startRecognition.addEventListener('mousedown', handleRecognitionStart);
startRecognition.addEventListener('mouseup', handleRecognitionEnd);
startRecognition.addEventListener('touchstart', handleRecognitionStart);
startRecognition.addEventListener('touchend', handleRecognitionEnd);
function handleRecognitionStart(e) {
console.log('handleRecognitionStart', isTouchDevice)
const event = e.type;
if(isTouchDevice && event == 'touchstart') {
recognitionStart();
} else if(!isTouchDevice && event == 'mousedown') {
console.log('handleRecognitionStart')
recognitionStart();
}
}
const isTouchDevice = touchCheck();
function touchCheck() {
const maxTouchPoints = navigator.maxTouchPoints || navigator.msMaxTouchPoints;
return 'ontouchstart' in window || maxTouchPoints > 0 || window.matchMedia && matchMedia('(any-pointer: coarse)').matches;
}
function handleRecognitionEnd(e) {
const event = e.type;
console.log(':::', event == 'touchend');
if(isTouchDevice && event == 'touchend') {
recognitionEnd();
} else if(!isTouchDevice && event == 'mouseup') {
recognitionEnd();
}
}
function recognitionEnd() {
resetRecognition();
}
function recognitionStart() {
console.log('recognitionStart')
Recognize();
audioChunks = [];
voiceRecorder.start()
}
function resetRecognition() {
console.log('reset')
if(typeof stopRecognize == "function") stopRecognize();
// if(rec.state !== 'inactive') rec.stop();
voiceRecorder.stop()
}
const playAudio = document.getElementById('play');
playAudio.addEventListener('click', () => {
console.log('play');
voiceRecorder.play();
})
class VoiceRecorder {
constructor() {
if (navigator.mediaDevices && navigator.mediaDevices.getUserMedia) {
console.log("getUserMedia supported")
} else {
console.log("getUserMedia is not supported on your browser!")
}
this.mediaRecorder
this.stream
this.playerRef = document.querySelector("#player")
this.recorderRef = document.querySelector("#recorder")
this.chunks = []
this.isRecording = false
this.constraints = {
audio: true,
video: false
}
}
handleSuccess(stream) {
this.stream = stream
this.stream.oninactive = () => {
console.log("Stream ended!")
};
this.recorderRef.srcObject = this.stream
this.mediaRecorder = new MediaRecorder(this.stream)
console.log(this.mediaRecorder)
this.mediaRecorder.ondataavailable = this.onMediaRecorderDataAvailable.bind(this)
this.mediaRecorder.onstop = this.onMediaRecorderStop.bind(this)
this.recorderRef.play()
this.mediaRecorder.start()
}
handleError(error) {
console.log("navigator.getUserMedia error: ", error)
}
onMediaRecorderDataAvailable(e) { this.chunks.push(e.data) }
onMediaRecorderStop(e) {
const blob = new Blob(this.chunks, { 'type': 'audio/ogg; codecs=opus' })
const audioURL = window.URL.createObjectURL(blob)
this.playerRef.src = audioURL;
this.chunks = [];
this.stream.getAudioTracks().forEach(track => track.stop());
this.stream = null;
}
play() { this.playerRef.play(); }
start() {
console.log('start')
if(this.isRecording) return;
console.log('33')
this.isRecording = true;
this.playerRef.src = '';
navigator.mediaDevices
.getUserMedia(this.constraints)
.then(this.handleSuccess.bind(this))
.catch(this.handleError.bind(this))
}
stop() {
if(!this.isRecording) return;
this.isRecording = false;
this.recorderRef.pause();
this.mediaRecorder.stop();
}
}
voiceRecorder = new VoiceRecorder();
<button id="start-recognition">Hold This Button and Speak In Android This should output the text and record your voice at the s</button>
<button id="play">Play Recorded Audio</button>
<h1 id="output">Voice over here</h1>
<audio id="recorder" muted hidden></audio>
<audio id="player" hidden></audio>

Webrtc - Switch Camera in realtime

I have to switch camera in webrtc when 2 user connecting in the call. I'm having a problem trying to change my camera in real time, It works for the local video, but the remote person cannot see the new camera, and still sees the old one. I tried to stop the stream and init again but still not working. This is just some of my code. I have searched everywhere and I can't find a solution. Can someone help me out?
``
$(".btn_rear_camera").click(function() {
if (cameratype == "user") {
capture('environment');
} else {
capture('user');
}
});
function capture(facingMode) {
cameratype = facingMode;
localStream.getTracks().forEach(function(track) {
track.stop();
});
var constraints = {
video: {
deviceId: devicesIds[1]
},
audio: true
};
navigator.mediaDevices.getUserMedia(constraints).then(function(stream) {
replaceTracks(stream);
}).catch(function(error) {
});
}
function replaceTracks(newStream) {
var elementId = "localVideo";
detachMediaStream(elementId);
newStream.getTracks().forEach(function(track) {
localStream.addTrack(track);
});
attachMediaStream(elementId, newStream);
// optionally, if you have active peer connections:
_replaceTracksForPeer(peerConnection);
function _replaceTracksForPeer(peer) {
peer.getSenders().map(function(sender) {
sender.replaceTrack(newStream.getTracks().find(function(track) {
return track.kind === sender.track.kind;
}));
});
}
}
function detachMediaStream(id) {
var elem = document.getElementById(id);
if (elem) {
elem.pause();
if (typeof elem.srcObject === 'object') {
elem.srcObject = null;
} else {
elem.src = '';
}
}
};
function attachMediaStream(id, stream) {
var elem = document.getElementById(id);
if (elem) {
if (typeof elem.srcObject === 'object') {
elem.srcObject = stream;
} else {
elem.src = window.URL.createObjectURL(stream);
}
elem.onloadedmetadata = function(e) {
elem.play();
};
} else {
throw new Error('Unable to attach media stream');
}
};
``

How to horizontially flip a image in JavaScript

I am attempting to flip in image captured from the webcam stream. The webcam view in the CSS is flipped, but when I take a snapshot, it takes it directly from the camera stream. I am doing this for OCR recognition. (TesseractJS)
Capture Function:
function captureSnapshot() {
if (null != cameraStream) {
var ctx = capture.getContext("2d");
var img = new Image();
ctx.drawImage(stream, 0, 0, capture.width, capture.height);
img.src = capture.toDataURL("image/png");
img.width = 240;
snapshot.innerHTML = "";
snapshot.appendChild(img);
}
}
Full JavaScript
// The buttons to start & stop stream and to capture the image
var btnStart = document.getElementById("btn-start");
var btnStop = document.getElementById("btn-stop");
var btnCapture = document.getElementById("btn-capture");
// The stream & capture
var stream = document.getElementById("stream");
var capture = document.getElementById("capture");
var snapshot = document.getElementById("snapshot");
// The video stream
var cameraStream = null;
// Attach listeners
btnStart.addEventListener("click", startStreaming);
btnStop.addEventListener("click", stopStreaming);
// Start Streaming
function startStreaming() {
var mediaSupport = "mediaDevices" in navigator;
if (mediaSupport && null == cameraStream) {
navigator.mediaDevices
.getUserMedia({ video: true })
.then(function(mediaStream) {
cameraStream = mediaStream;
stream.srcObject = mediaStream;
stream.play();
})
.catch(function(err) {
console.log("Unable to access camera: " + err);
});
} else {
alert("Your browser does not support media devices.");
return;
}
}
// Stop Streaming
function stopStreaming() {
if (null != cameraStream) {
var track = cameraStream.getTracks()[0];
track.stop();
stream.load();
cameraStream = null;
}
}
btnCapture.addEventListener("click", captureSnapshot);
function captureSnapshot() {
if (null != cameraStream) {
var ctx = capture.getContext("2d");
var img = new Image();
ctx.drawImage(stream, 0, 0, capture.width, capture.height);
img.src = capture.toDataURL("image/png");
img.width = 240;
snapshot.innerHTML = "";
snapshot.appendChild(img);
}
}
function dataURItoBlob(dataURI) {
var byteString = atob(dataURI.split(",")[1]);
var mimeString = dataURI
.split(",")[0]
.split(":")[1]
.split(";")[0];
var buffer = new ArrayBuffer(byteString.length);
var data = new DataView(buffer);
for (var i = 0; i < byteString.length; i++) {
data.setUint8(i, byteString.charCodeAt(i));
}
return new Blob([buffer], { type: mimeString });
}
function recognizeText(img) {
Tesseract.recognize(
"https://tesseract.projectnaptha.com/img/eng_bw.png",
"eng",
{ logger: m => console.log(m) }
).then(({ data: { text } }) => {
console.log(text);
});
}
CSS Flip (I don't know if this matters):
video {
-webkit-transform: scaleX(-1);
transform: scaleX(-1);
}
This code below you have should work for css. i would recommend just adding a class to the image when you instantiate it. that class in css will use that of below.
Is this something you have tried?
video {
-webkit-transform: scaleX(-1);
transform: scaleX(-1);
}

Possible memory leak or something else?

I've made a visualizer in javascript that when you select a music directory your're able to select files within that directory to play and have the visualizer move to. But it would seem after loading a directory and then changing the song more than 4 times Results in less responsive movement from the visualizer. I'm unsure why this is happening.
Heres an example of this happening.
Keep changing the song from the drop down box until you see it starting to slow.
window.onload = function() {
var input = document.getElementById("file");
var audio = document.getElementById("audio");
var selectLabel = document.querySelector("label[for=select]");
var audioLabel = document.querySelector("label[for=audio]");
var select = document.querySelector("select");
var context = void 0,
src = void 0,
res = [],
url = "";
function processDirectoryUpload(event) {
var webkitResult = [];
var mozResult = [];
var files;
console.log(event);
select.innerHTML = "";
// do mozilla stuff
function mozReadDirectories(entries, path) {
console.log("dir", entries, path);
return [].reduce.call(entries, function(promise, entry) {
return promise.then(function() {
return Promise.resolve(entry.getFilesAndDirectories() || entry)
.then(function(dir) {
return dir
})
})
}, Promise.resolve())
.then(function(items) {
var dir = items.filter(function(folder) {
return folder instanceof Directory
});
var files = items.filter(function(file) {
return file instanceof File
});
if (files.length) {
// console.log("files:", files, path);
mozResult = mozResult.concat.apply(mozResult, files);
}
if (dir.length) {
// console.log(dir, dir[0] instanceof Directory);
return mozReadDirectories(dir, dir[0].path || path);
} else {
if (!dir.length) {
return Promise.resolve(mozResult).then(function(complete) {
return complete
})
}
}
})
};
function handleEntries(entry) {
let file = "webkitGetAsEntry" in entry ? entry.webkitGetAsEntry() : entry
return Promise.resolve(file);
}
function handleFile(entry) {
return new Promise(function(resolve) {
if (entry.isFile) {
entry.file(function(file) {
listFile(file, entry.fullPath).then(resolve)
})
} else if (entry.isDirectory) {
var reader = entry.createReader();
reader.readEntries(webkitReadDirectories.bind(null, entry, handleFile, resolve))
} else {
var entries = [entry];
return entries.reduce(function(promise, file) {
return promise.then(function() {
return listDirectory(file)
})
}, Promise.resolve())
.then(function() {
return Promise.all(entries.map(function(file) {
return listFile(file)
})).then(resolve)
})
}
})
function webkitReadDirectories(entry, callback, resolve, entries) {
console.log(entries);
return listDirectory(entry).then(function(currentDirectory) {
console.log(`iterating ${currentDirectory.name} directory`, entry);
return entries.reduce(function(promise, directory) {
return promise.then(function() {
return callback(directory)
});
}, Promise.resolve())
}).then(resolve);
}
}
function listDirectory(entry) {
console.log(entry);
return Promise.resolve(entry);
}
function listFile(file, path) {
path = path || file.webkitRelativePath || "/" + file.name;
console.log(`reading ${file.name}, size: ${file.size}, path:${path}`);
webkitResult.push(file);
return Promise.resolve(webkitResult)
};
function processFiles(files) {
Promise.all([].map.call(files, function(file, index) {
return handleEntries(file, index).then(handleFile)
}))
.then(function() {
console.log("complete", webkitResult);
res = webkitResult;
res.reduce(function(promise, track) {
return promise.then(function() {
return playMusic(track)
})
}, displayFiles(res))
})
.catch(function(err) {
alert(err.message);
})
}
if ("getFilesAndDirectories" in event.target) {
return (event.type === "drop" ? event.dataTransfer : event.target).getFilesAndDirectories()
.then(function(dir) {
if (dir[0] instanceof Directory) {
console.log(dir)
return mozReadDirectories(dir, dir[0].path || path)
.then(function(complete) {
console.log("complete:", webkitResult);
event.target.value = null;
});
} else {
if (dir[0] instanceof File && dir[0].size > 0) {
return Promise.resolve(dir)
.then(function() {
console.log("complete:", mozResult);
res = mozResult;
res.reduce(function(promise, track) {
return promise.then(function() {
return playMusic(track)
})
}, displayFiles(res))
})
} else {
if (dir[0].size == 0) {
throw new Error("could not process '" + dir[0].name + "' directory" + " at drop event at firefox, upload folders at 'Choose folder...' input");
}
}
}
}).catch(function(err) {
alert(err)
})
}
files = event.target.files;
if (files) {
processFiles(files)
}
}
function displayFiles(files) {
select.innerHTML = "";
return Promise.all(files.map(function(file, index) {
return new Promise(function(resolve) {
if (/^audio/.test(file.type)) { /* do stuff, that is all code currently within Promise resolver function */ } else { /* proceed to next file */
resolve()
}
var option = new Option(file.name, index);
select.appendChild(option);
resolve()
})
}))
}
function handleSelectedSong(event) {
if (res.length) {
var index = select.value;
var track = res[index];
playMusic(track)
.then(function(filename) {
console.log(filename + " playback completed")
})
} else {
console.log("No songs to play")
}
}
function playMusic(file) {
return new Promise(function(resolve) {
audio.pause();
audio.onended = function() {
audio.onended = null;
if (url) URL.revokeObjectURL(url);
resolve(file.name);
}
if (url) URL.revokeObjectURL(url);
url = URL.createObjectURL(file);
audio.load();
audio.src = url;
audio.play();
audioLabel.textContent = file.name;
context = context || new AudioContext();
src = src || context.createMediaElementSource(audio);
src.disconnect(context);
var analyser = context.createAnalyser();
var canvas = document.getElementById("canvas");
canvas.width = window.innerWidth;
canvas.height = window.innerHeight;
var ctx = canvas.getContext("2d");
src.connect(analyser);
analyser.connect(context.destination);
analyser.fftSize = 16384;
var bufferLength = analyser.frequencyBinCount;
console.log(bufferLength);
var dataArray = new Uint8Array(bufferLength);
var WIDTH = canvas.width;
var HEIGHT = canvas.height;
var barWidth = (WIDTH / bufferLength) * 32;
var barHeight;
var x = 0;
function renderFrame() {
requestAnimationFrame(renderFrame);
x = 0;
analyser.getByteFrequencyData(dataArray);
ctx.fillStyle = "#1b1b1b";
ctx.fillRect(0, 0, WIDTH, HEIGHT);
for (var i = 0; i < bufferLength; i++) {
barHeight = dataArray[i];
ctx.fillStyle = "rgb(5,155,45)"
ctx.fillRect(x, (((HEIGHT - barHeight - 5 % barHeight) + (20 % HEIGHT - barHeight))), barWidth, barHeight + 20 % HEIGHT);
x += barWidth + 2;
}
}
renderFrame();
})
}
input.addEventListener("change", processDirectoryUpload);
select.addEventListener("change", handleSelectedSong);
}
<canvas id="canvas" width="window.innerWidth" height="window.innerHeight"></canvas>
<div id="content">
<label class="custom-file-upload">
Select Music directory <input id="file" type="file" accept="audio/*" directory allowdirs webkitdirectory/>
<p style="color: rgb(5,195,5);">Now playing:<label for="audio"></label></p>
<p style="color: rgb(5,195,5);">Select Song</p>
<select id="select">
</select>
<audio id="audio" controls></audio>
Like noted by yuriy636, you are starting a new animation for every new song, without never stopping the previous one.
So when you played 5 songs, you still have 5 visualizations rendering loop running at every frame, and 5 analyzers.
The best to do here is to refactor your code :
create a single analyzer, only update which stream feeds it
make the canvas animation autonomous, declare it once at first load
since you've got only one <canvas>, start only one rendering animation
When using a single analyzer, your render doesn't use anything new when you change the source, it's always the same canvas, the same analyzer, the same visualization.
Here is a quick proof of concept, really dirty, but I hope you'll be able to undertstand what I did and why.
window.onload = function() {
var input = document.getElementById("file");
var audio = document.getElementById("audio");
var selectLabel = document.querySelector("label[for=select]");
var audioLabel = document.querySelector("label[for=audio]");
var select = document.querySelector("select");
var viz = null;
// removed all the IDK what it was meant for directory special handlers
function displayFiles() {
select.innerHTML = "";
// that's all synchronous, why Promises ?
res = Array.prototype.slice.call(input.files);
res.forEach(function(file, index) {
if (/^audio/.test(file.type)) {
var option = new Option(file.name, index);
select.appendChild(option);
}
});
if (res.length) {
var analyser = initAudioAnalyser();
viz = initVisualization(analyser);
// pre-select the first song ?
handleSelectedSong();
audio.pause();
}
}
function handleSelectedSong(event) {
if (res.length) {
var index = select.value;
var track = res[index];
playMusic(track)
.then(function(filename) {
console.log(filename + " playback completed")
})
viz.play();
} else {
console.log("No songs to play")
}
}
function playMusic(file) {
return new Promise(function(resolve) {
var url = audio.src;
audio.pause();
audio.onended = function() {
audio.onended = null;
// arguablily useless here since blobURIs are just pointers to real file on the user's system
if (url) URL.revokeObjectURL(url);
resolve(file.name);
}
if (url) URL.revokeObjectURL(url);
url = URL.createObjectURL(file);
// audio.load(); // would just set a 404 since you revoked the URL just before
audio.src = url;
audio.play();
audioLabel.textContent = file.name;
});
}
function initAudioAnalyser() {
var context = new AudioContext();
var analyser = context.createAnalyser();
analyser.fftSize = 16384;
var src = context.createMediaElementSource(audio);
src.connect(analyser);
src.connect(context.destination);
return analyser;
}
function initVisualization(analyser) {
var canvas = document.getElementById("canvas");
canvas.width = window.innerWidth;
canvas.height = window.innerHeight;
var ctx = canvas.getContext("2d");
var bufferLength = analyser.frequencyBinCount;
var dataArray = new Uint8Array(bufferLength);
var WIDTH = canvas.width;
var HEIGHT = canvas.height;
var barWidth = (WIDTH / bufferLength) * 32;
var barHeight;
var x = 0;
var paused = true;
function renderFrame() {
if (!paused) {
requestAnimationFrame(renderFrame);
} else {
return;
}
x = 0;
analyser.getByteFrequencyData(dataArray);
ctx.fillStyle = "#1b1b1b";
ctx.fillRect(0, 0, WIDTH, HEIGHT);
ctx.fillStyle = "rgb(5,155,45)"
ctx.beginPath();
for (var i = 0; i < bufferLength; i++) {
barHeight = dataArray[i];
// micro-optimisation, but concatenating all the rects in a single shape is easier for the CPU
ctx.rect(x, (((HEIGHT - barHeight - 5 % barHeight) + (20 % HEIGHT - barHeight))), barWidth, barHeight + 20 % HEIGHT);
x += barWidth + 2;
}
ctx.fill();
}
var viz = window.viz = {
play: function() {
if(paused){
paused = false;
renderFrame();
}
},
pause: function() {
paused = true;
clearTimeout(pauseTimeout);
pauseTimeout = null;
},
};
// we can even add auto pause linked to the audio element
var pauseTimeout = null;
audio.onpause = function() {
// let's really do it in 2s to keep the tear down effect
pauseTimeout = setTimeout(viz.pause, 2000);
}
audio.onplaying = function() {
clearTimeout(pauseTimeout);
// we were not playing
if(!pauseTimeout){
viz.play();
}
}
return viz;
}
input.addEventListener("change", displayFiles);
select.addEventListener("change", handleSelectedSong);
}
<canvas id="canvas" width="window.innerWidth" height="window.innerHeight"></canvas>
<div id="content">
<label class="custom-file-upload">
Select Music directory <input id="file" type="file" accept="audio/*" directory allowdirs webkitdirectory/>
<p style="color: rgb(5,195,5);">Now playing:<label for="audio"></label></p>
<p style="color: rgb(5,195,5);">Select Song</p>
<select id="select">
</select>
<audio id="audio" controls></audio>

Autoplay Background HTML5 Sound/Video on Android and iOS

I digg deep into StackOwerflow but not get proper answer from any post.
First here is my code:
(function(a,n){
window.onload = function() {
var b = document.body,
userAgent = navigator.userAgent || navigator.vendor || window.opera,
playSound = function(file) {
var mediaAudio = new Audio(file);
mediaAudio.play();
};
if(b.id == 'fail' || b.id == 'success')
{
if ((/android/gi.test(userAgent) && !window.MSStream))
{
var vm = document.createElement("video"), type;
vm.autoPlay = false;
vm.controls = true;
vm.preload = 'auto';
vm.loop = false;
vm.muted = true;
vm.style.position = 'absolute';
vm.style.top = '-9999%';
vm.style.left = '-9999%';
vm.style.zIndex = '-1';
vm.id = 'video';
if(b.id == 'fail')
type = a;
else
type = n;
for(key in type)
{
if(/video/gi.test(key) && vm.canPlayType(key) == 'probably') {
vm.type = key;
vm.src = type[key];
b.appendChild(vm);
setTimeout(function(){
vm.muted = false;
vm.play();
},100);
return;
}
}
}
else
{
var au = new Audio(),type;
if(b.id == 'fail')
type = a;
else
type = n;
for(key in type)
{
if(/audio/gi.test(key) && au.canPlayType(key) == "probably") {
playSound(type[key]);
return;
}
}
}
}
}
}({
'audio/mpeg':'./sfx/not_ok.mp3',
'audio/wav':'./sfx/not_ok.wav',
'audio/ogg':'./sfx/not_ok.ogg',
'video/mp4; codecs=avc1.42E01E,mp4a.40.2':'./sfx/not_ok.mp4',
},
{
'audio/mpeg':'./sfx/ok.mp3',
'audio/wav':'./sfx/ok.wav',
'audio/ogg':'./sfx/ok.ogg',
'video/mp4; codecs=avc1.42E01E,mp4a.40.2':'./sfx/ok.mp4',
}));
I'm trying to play background sound on all devices on one special page. That page play fail or success sound.
All works great on desktop browsers but when I try to play on mobile, I not get results. In that code you see above, I add one hack where I on android platform generate hidden video and trying to autoplay but not have success.
Is there a way how I can trigger play for video or audio automaticaly?
Is there a way to emulate some click event on body to automaticaly play sound on click event or some other solution?
Thanks!

Categories