How do I create a single button (button will be displayed in index.php) that plays AND pauses audio source node in JavaScript?
So essentially you have a button that plays an audio, and when clicked again, it pauses the song...
EDIT: index.php
<!DOCTYPE html>
<html>
<head>
<meta name="description" content="HTML5 Audio Spectrum Visualizer">
<title>HTML5 Audio API showcase | Audio visualizer</title>
<link type="text/css" rel="stylesheet" href="style/style.css">
</head>
<body>
<div id="wrapper">
<div id="fileWrapper" class="file_wrapper">
<div id="info">
HTML5 Audio API showcase | An Audio Viusalizer
</div>
<label for="uploadedFile">Drag&drop or select a file to play:
</label>
<input type="file" id="uploadedFile"></input>
</div>
<div id="visualizer_wrapper">
<canvas id='canvas' width="800" height="350"></canvas>
</div>
</div>
<footer>
<small></small>
</footer>
<script type="text/javascript" src="js/html5_audio_visualizer.js">
</script>
</body>
</html>
Here is part of the JavaScript code:
window.onload = function() {
new Visualizer().ini();
};
var Visualizer = function() {
this.file = null; //the current file
this.fileName = null; //the current file name
this.audioContext = null;
this.source = null; //the audio source
this.info = document.getElementById('info').innerHTML; //this used to
upgrade the UI information
this.infoUpdateId = null; //to sotore the setTimeout ID and clear the
interval
this.animationId = null;
this.status = 0; //flag for sound is playing 1 or stopped 0
this.forceStop = false;
this.allCapsReachBottom = false;
}
Visualizer.prototype = {
ini: function() {
this._prepareAPI();
this._addEventListner();
},
_prepareAPI: function() {
//fix browser vender for AudioContext and requestAnimationFrame
window.AudioContext = window.AudioContext || window.webkitAudioContext
|| window.mozAudioContext || window.msAudioContext;
window.requestAnimationFrame = window.requestAnimationFrame ||
window.webkitRequestAnimationFrame || window.mozRequestAnimationFrame ||
window.msRequestAnimationFrame;
window.cancelAnimationFrame = window.cancelAnimationFrame ||
window.webkitCancelAnimationFrame || window.mozCancelAnimationFrame ||
window.msCancelAnimationFrame;
try {
this.audioContext = new AudioContext();
} catch (e) {
this._updateInfo('!Your browser does not support AudioContext',
false);
console.log(e);
}
},
_addEventListner: function() {
var that = this,
audioInput = document.getElementById('uploadedFile'),
dropContainer = document.getElementsByTagName("canvas")[0];
//listen the file upload
audioInput.onchange = function() {
if (that.audioContext===null) {return;};
//the if statement fixes the file selction cancle, because the
onchange will trigger even the file selection been canceled
if (audioInput.files.length !== 0) {
//only process the first file
that.file = audioInput.files[0];
that.fileName = that.file.name;
if (that.status === 1) {
//the sound is still playing but we upload another file, so
set the forceStop flag to true
that.forceStop = true;
};
document.getElementById('fileWrapper').style.opacity = 1;
that._updateInfo('Uploading', true);
//once the file is ready,start the visualizer
that._start();
};
};
//listen the drag & drop
dropContainer.addEventListener("dragenter", function() {
document.getElementById('fileWrapper').style.opacity = 1;
that._updateInfo('Drop it on the page', true);
}, false);
dropContainer.addEventListener("dragover", function(e) {
e.stopPropagation();
e.preventDefault();
//set the drop mode
e.dataTransfer.dropEffect = 'copy';
}, false);
dropContainer.addEventListener("dragleave", function() {
document.getElementById('fileWrapper').style.opacity = 0.2;
that._updateInfo(that.info, false);
}, false);
dropContainer.addEventListener("drop", function(e) {
e.stopPropagation();
e.preventDefault();
if (that.audioContext===null) {return;};
document.getElementById('fileWrapper').style.opacity = 1;
that._updateInfo('Uploading', true);
//get the dropped file
that.file = e.dataTransfer.files[0];
if (that.status === 1) {
document.getElementById('fileWrapper').style.opacity = 1;
that.forceStop = true;
};
that.fileName = that.file.name;
//once the file is ready,start the visualizer
that._start();
}, false);
},
_start: function() {
//read and decode the file into audio array buffer
var that = this,
file = this.file,
fr = new FileReader();
fr.onload = function(e) {
var fileResult = e.target.result;
var audioContext = that.audioContext;
if (audioContext === null) {
return;
};
that._updateInfo('Decoding the audio', true);
audioContext.decodeAudioData(fileResult, function(buffer) {
that._updateInfo('Decode succussfully,start the visualizer',
true);
that._visualize(audioContext, buffer);
}, function(e) {
that._updateInfo('!Fail to decode the file', false);
console.log(e);
});
};
fr.onerror = function(e) {
that._updateInfo('!Fail to read the file', false);
console.log(e);
};
//assign the file to the reader
this._updateInfo('Starting read the file', true);
fr.readAsArrayBuffer(file);
},
_visualize: function(audioContext, buffer) {
var audioBufferSouceNode = audioContext.createBufferSource(),
analyser = audioContext.createAnalyser(),
that = this;
//connect the source to the analyser
audioBufferSouceNode.connect(analyser);
//connect the analyser to the destination(the speaker), or we won't
hear the sound
analyser.connect(audioContext.destination);
//then assign the buffer to the buffer source node
audioBufferSouceNode.buffer = buffer;
//play the source
if (!audioBufferSouceNode.start) {
audioBufferSouceNode.start = audioBufferSouceNode.noteOn //in old
browsers use noteOn method
audioBufferSouceNode.stop = audioBufferSouceNode.noteOff //in old
browsers use noteOn method
};
//stop the previous sound if any
if (this.animationId !== null) {
cancelAnimationFrame(this.animationId);
}
if (this.source !== null) {
this.source.stop(0);
}
audioBufferSouceNode.start(0);
this.status = 1;
this.source = audioBufferSouceNode;
audioBufferSouceNode.onended = function() {
that._audioEnd(that);
};
this._updateInfo('Playing ' + this.fileName, false);
this.info = 'Playing ' + this.fileName;
document.getElementById('fileWrapper').style.opacity = 0.2;
this._drawSpectrum(analyser);
},
...
How do I go about doing it? Also is it possible to create a basic volume slider-bar?
Related
Basically what I'm trying to do is locally detect faces while being in a video conference. For this purpose I'm using RTCMultiConnection for the video conference part, this specific part to be exact and OpenCV.js for the computer vision part, specifically this repository.
I know that I need to send the mediaStream object from video element to the canvas element inside the OpenCV function. I was not getting that stream shown on the canvas so I figured I would make the variable containing that mediaStream object global so I can pass it to the OpenCV function but that is not working either.
I would be truly grateful for any insight you can provide, thank you.
Here's the code:
<!DOCTYPE html>
<html lang="en" dir="ltr">
<head>
<meta charset="utf-8">
<title>Video Conferencing using RTCMultiConnection</title>
<meta name="viewport" content="width=device-width, initial-scale=1.0, minimum-scale=1.0">
<link rel="shortcut icon" href="/demos/logo.png">
<link rel="stylesheet" href="/demos/stylesheet.css">
<script src="/demos/menu.js"></script>
<script async src="js/opencv.js" ></script>
<script src="js/utils.js"></script>
</head>
<body>
<header>
<!-- <a class="logo" href="/"><img src="/demos/logo.png" alt="RTCMultiConnection"></a>
Menu<img src="/demos/menu-icon.png" alt="Menu">
<!-- <nav>
<li>
Home
</li>
<li>
Demos
</li>
<li>
Getting Started
</li>
<li>
FAQ
</li>
<li>
YouTube
</li>
<li>
Wiki
</li>
<li>
Github
</li>
</nav> -->
</header> -->
<h1>
Video Conferencing using RTCMultiConnection
<p class="no-mobile">
Multi-user (many-to-many) video chat using mesh networking model.
</p>
</h1>
<section class="make-center">
<div>
<label><input type="checkbox" id="record-entire-conference"> Record Entire Conference In The Browser?</label>
<span id="recording-status" style="display: none;"></span>
<button id="btn-stop-recording" style="display: none;">Stop Recording</button>
<br><br>
<input type="text" id="room-id" value="abcdef" autocorrect=off autocapitalize=off size=20>
<button id="open-room">Open Room</button>
<button id="join-room">Join Room</button>
<button id="open-or-join-room">Auto Open Or Join Room</button>
</div>
<div id="videos-container" style="margin: 20px 0;"></div>
<canvas id="canvas_output"></canvas>
<div id="room-urls" style="text-align: center;display: none;background: #F1EDED;margin: 15px -10px;border: 1px solid rgb(189, 189, 189);border-left: 0;border-right: 0;"></div>
</section>
<script src="/dist/RTCMultiConnection.min.js"></script>
<script src="/node_modules/webrtc-adapter/out/adapter.js"></script>
<script src="/socket.io/socket.io.js"></script>
<!-- <script async src="js/opencv.js" onload="openCvReady()"></script> -->
<!-- <script src="js/utils.js"></script> -->
<!-- custom layout for HTML5 audio/video elements -->
<link rel="stylesheet" href="/dev/getHTMLMediaElement.css">
<script src="/dev/getHTMLMediaElement.js"></script>
<script src="/node_modules/recordrtc/RecordRTC.js"></script>
<script>
// ......................................................
// .......................UI Code........................
// ......................................................
document.getElementById('open-room').onclick = function() {
disableInputButtons();
connection.open(document.getElementById('room-id').value, function(isRoomOpened, roomid, error) {
if(isRoomOpened === true) {
showRoomURL(connection.sessionid);
}
else {
disableInputButtons(true);
if(error === 'Room not available') {
alert('Someone already created this room. Please either join or create a separate room.');
return;
}
alert(error);
}
});
};
document.getElementById('join-room').onclick = function() {
disableInputButtons();
connection.join(document.getElementById('room-id').value, function(isJoinedRoom, roomid, error) {
if (error) {
disableInputButtons(true);
if(error === 'Room not available') {
alert('This room does not exist. Please either create it or wait for moderator to enter in the room.');
return;
}
alert(error);
}
});
};
document.getElementById('open-or-join-room').onclick = function() {
disableInputButtons();
connection.openOrJoin(document.getElementById('room-id').value, function(isRoomExist, roomid, error) {
if(error) {
disableInputButtons(true);
alert(error);
}
else if (connection.isInitiator === true) {
// if room doesn't exist, it means that current user will create the room
showRoomURL(roomid);
}
});
};
// ......................................................
// .......................UI Code ENDS........................
// ......................................................
// ***********************************************************
// ......................................................
// ..................RTCMultiConnection Code.............
// ......................................................
var connection = new RTCMultiConnection();
// by default, socket.io server is assumed to be deployed on your own URL
connection.socketURL = '/';
// comment-out below line if you do not have your own socket.io server
// connection.socketURL = 'https://rtcmulticonnection.herokuapp.com:443/';
connection.socketMessageEvent = 'video-conference-demo';
connection.session = {
audio: true,
video: true
};
connection.sdpConstraints.mandatory = {
OfferToReceiveAudio: true,
OfferToReceiveVideo: true
};
// STAR_FIX_VIDEO_AUTO_PAUSE_ISSUES
// via: https://github.com/muaz-khan/RTCMultiConnection/issues/778#issuecomment-524853468
var bitrates = 512;
var resolutions = 'Ultra-HD';
var videoConstraints = {};
if (resolutions == 'HD') {
videoConstraints = {
width: {
ideal: 1280
},
height: {
ideal: 720
},
frameRate: 30
};
}
if (resolutions == 'Ultra-HD') {
videoConstraints = {
width: {
ideal: 1920
},
height: {
ideal: 1080
},
frameRate: 30
};
}
connection.mediaConstraints = {
video: videoConstraints,
audio: true
};
var CodecsHandler = connection.CodecsHandler;
connection.processSdp = function(sdp) {
var codecs = 'vp8';
if (codecs.length) {
sdp = CodecsHandler.preferCodec(sdp, codecs.toLowerCase());
}
if (resolutions == 'HD') {
sdp = CodecsHandler.setApplicationSpecificBandwidth(sdp, {
audio: 128,
video: bitrates,
screen: bitrates
});
sdp = CodecsHandler.setVideoBitrates(sdp, {
min: bitrates * 8 * 1024,
max: bitrates * 8 * 1024,
});
}
if (resolutions == 'Ultra-HD') {
sdp = CodecsHandler.setApplicationSpecificBandwidth(sdp, {
audio: 128,
video: bitrates,
screen: bitrates
});
sdp = CodecsHandler.setVideoBitrates(sdp, {
min: bitrates * 8 * 1024,
max: bitrates * 8 * 1024,
});
}
return sdp;
};
// END_FIX_VIDEO_AUTO_PAUSE_ISSUES
// https://www.rtcmulticonnection.org/docs/iceServers/
// use your own TURN-server here!
connection.iceServers = [{
'urls': [
'stun:stun.l.google.com:19302',
'stun:stun1.l.google.com:19302',
'stun:stun2.l.google.com:19302',
'stun:stun.l.google.com:19302?transport=udp',
]
}];
connection.videosContainer = document.getElementById('videos-container');
connection.onstream = function(specialEvent) {
// its just event but I was debugging it on the console and wanted to change the
// name to specialEvent
var existing = document.getElementById(specialEvent.streamid);
if(existing && existing.parentNode) {
existing.parentNode.removeChild(existing);
}
// specialEvent.mediaElement.removeAttribute('src');
// specialEvent.mediaElement.removeAttribute('srcObject');
specialEvent.mediaElement.muted = true;
specialEvent.mediaElement.volume = 0;
var video = document.createElement('video');
video.setAttribute("id", "cam_input");
window.video = video;
try {
video.setAttributeNode(document.createAttribute('autoplay'));
video.setAttributeNode(document.createAttribute('playsinline'));
} catch (e) {
video.setAttribute('autoplay', true);
video.setAttribute('playsinline', true);
}
if(specialEvent.type === 'local') {
video.volume = 0;
try {
video.setAttributeNode(document.createAttribute('muted'));
} catch (e) {
video.setAttribute('muted', true);
}
}
video.srcObject = specialEvent.stream;
var width = parseInt(connection.videosContainer.clientWidth / 3) - 20;
var mediaElement = getHTMLMediaElement(video, {
title: specialEvent.userid,
buttons: ['full-screen'],
width: width,
showOnMouseEnter: false
});
connection.videosContainer.appendChild(mediaElement);
setTimeout(function() {
mediaElement.media.play();
}, 5000);
mediaElement.id = specialEvent.streamid;
// to keep room-id in cache
localStorage.setItem(connection.socketMessageEvent, connection.sessionid);
chkRecordConference.parentNode.style.display = 'none';
if(chkRecordConference.checked === true) {
btnStopRecording.style.display = 'inline-block';
recordingStatus.style.display = 'inline-block';
var recorder = connection.recorder;
if(!recorder) {
recorder = RecordRTC([specialEvent.stream], {
type: 'video'
});
recorder.startRecording();
connection.recorder = recorder;
}
else {
recorder.getInternalRecorder().addStreams([specialEvent.stream]);
}
if(!connection.recorder.streams) {
connection.recorder.streams = [];
}
connection.recorder.streams.push(specialEvent.stream);
recordingStatus.innerHTML = 'Recording ' + connection.recorder.streams.length + ' streams';
}
if(specialEvent.type === 'local') {
connection.socket.on('disconnect', function() {
if(!connection.getAllParticipants().length) {
location.reload();
}
});
}
window.specialEvent = specialEvent;
};
// ******************************
//
// OPENCV CODE STARTS FROM HERE
//
// ******************************
function openCvReady() {
// cv['onRuntimeInitialized']=()=>{
// let video = document.getElementById("cam_input"); // video is the id of video tag
// navigator.mediaDevices.getUserMedia({ video: true, audio: false })
// .then(function(stream) {
// console.log(stream);
// video.srcObject = stream;
// video.play();
// })
// .catch(function(err) {
// console.log("An error occurred! " + err);
// });
// let cam_input = document.getElementById("cam_input");
let video = document.getElementById("cam_input");
let src = new cv.Mat(video.height, video.width, cv.CV_8UC4);
let dst = new cv.Mat(video.height, video.width, cv.CV_8UC1);
let gray = new cv.Mat();
let cap = new cv.VideoCapture(cam_input);
let faces = new cv.RectVector();
// if (face == true){
// console.log("face shown");
// }
let classifier = new cv.CascadeClassifier();
let utils = new Utils('errorMessage');
let faceCascadeFile = 'haarcascade_frontalface_default.xml'; // path to xml
utils.createFileFromUrl(faceCascadeFile, faceCascadeFile, () => {
classifier.load(faceCascadeFile); // in the callback, load the cascade from file
});
const FPS = 24;
function processVideo() {
let begin = Date.now();
cap.read(src);
src.copyTo(dst);
cv.cvtColor(dst, gray, cv.COLOR_RGBA2GRAY, 0);
try{
classifier.detectMultiScale(gray, faces, 1.1, 3, 0);
console.log(faces.size());
}catch(err){
console.log(err);
}
for (let i = 0; i < faces.size(); ++i) {
let face = faces.get(i);
console.log("face:");
console.log(face);
let point1 = new cv.Point(face.x, face.y);
let point2 = new cv.Point(face.x + face.width, face.y + face.height);
cv.rectangle(dst, point1, point2, [255, 0, 0, 255]);
}
cv.imshow("canvas_output", dst);
// schedule next one.
let delay = 1000/FPS - (Date.now() - begin);
setTimeout(processVideo, delay);
}
// schedule first one.
setTimeout(processVideo, 0);
// };
}
var recordingStatus = document.getElementById('recording-status');
var chkRecordConference = document.getElementById('record-entire-conference');
var btnStopRecording = document.getElementById('btn-stop-recording');
btnStopRecording.onclick = function() {
var recorder = connection.recorder;
if(!recorder) return alert('No recorder found.');
recorder.stopRecording(function() {
var blob = recorder.getBlob();
invokeSaveAsDialog(blob);
connection.recorder = null;
btnStopRecording.style.display = 'none';
recordingStatus.style.display = 'none';
chkRecordConference.parentNode.style.display = 'inline-block';
});
};
connection.onstreamended = function(event) {
var mediaElement = document.getElementById(event.streamid);
if (mediaElement) {
mediaElement.parentNode.removeChild(mediaElement);
}
};
connection.onMediaError = function(e) {
if (e.message === 'Concurrent mic process limit.') {
if (DetectRTC.audioInputDevices.length <= 1) {
alert('Please select external microphone. Check github issue number 483.');
return;
}
var secondaryMic = DetectRTC.audioInputDevices[1].deviceId;
connection.mediaConstraints.audio = {
deviceId: secondaryMic
};
connection.join(connection.sessionid);
}
};
// ..................................
// ALL below scripts are redundant!!!
// ..................................
function disableInputButtons(enable) {
document.getElementById('room-id').onkeyup();
document.getElementById('open-or-join-room').disabled = !enable;
document.getElementById('open-room').disabled = !enable;
document.getElementById('join-room').disabled = !enable;
document.getElementById('room-id').disabled = !enable;
}
// ......................................................
// ......................Handling Room-ID................
// ......................................................
function showRoomURL(roomid) {
var roomHashURL = '#' + roomid;
var roomQueryStringURL = '?roomid=' + roomid;
var html = '<h2>Unique URL for your room:</h2><br>';
html += 'Hash URL: ' + roomHashURL + '';
html += '<br>';
html += 'QueryString URL: ' + roomQueryStringURL + '';
var roomURLsDiv = document.getElementById('room-urls');
roomURLsDiv.innerHTML = html;
roomURLsDiv.style.display = 'block';
}
(function() {
var params = {},
r = /([^&=]+)=?([^&]*)/g;
function d(s) {
return decodeURIComponent(s.replace(/\+/g, ' '));
}
var match, search = window.location.search;
while (match = r.exec(search.substring(1)))
params[d(match[1])] = d(match[2]);
window.params = params;
})();
var roomid = '';
if (localStorage.getItem(connection.socketMessageEvent)) {
roomid = localStorage.getItem(connection.socketMessageEvent);
} else {
roomid = connection.token();
}
var txtRoomId = document.getElementById('room-id');
txtRoomId.value = roomid;
txtRoomId.onkeyup = txtRoomId.oninput = txtRoomId.onpaste = function() {
localStorage.setItem(connection.socketMessageEvent, document.getElementById('room-id').value);
};
var hashString = location.hash.replace('#', '');
if (hashString.length && hashString.indexOf('comment-') == 0) {
hashString = '';
}
var roomid = params.roomid;
if (!roomid && hashString.length) {
roomid = hashString;
}
if (roomid && roomid.length) {
document.getElementById('room-id').value = roomid;
localStorage.setItem(connection.socketMessageEvent, roomid);
// auto-join-room
(function reCheckRoomPresence() {
connection.checkPresence(roomid, function(isRoomExist) {
if (isRoomExist) {
connection.join(roomid);
return;
}
setTimeout(reCheckRoomPresence, 5000);
});
})();
disableInputButtons();
}
// detect 2G
if(navigator.connection &&
navigator.connection.type === 'cellular' &&
navigator.connection.downlinkMax <= 0.115) {
alert('2G is not supported. Please use a better internet service.');
}
</script>
<footer>
<small id="send-message"></small>
</footer>
<script src="https://www.webrtc-experiment.com/common.js"></script>
</body>
</html>
I'm using a module to detect when the user is speaking, called hark. Here's some of the code:
// original source code is taken from:
// https://github.com/SimpleWebRTC/hark
// copyright goes to &yet team
// edited by Muaz Khan for RTCMultiConnection.js
function hark(stream, options) {
var audioContextType = window.webkitAudioContext || window.AudioContext;
var harker = this;
harker.events = {};
harker.on = function (event, callback) {
harker.events[event] = callback;
};
harker.emit = function () {
if (harker.events[arguments[0]]) {
harker.events[arguments[0]](arguments[1], arguments[2], arguments[3], arguments[4]);
}
};
// make it not break in non-supported browsers
if (!audioContextType) return harker;
options = options || {};
// Config
var smoothing = (options.smoothing || 0.1),
interval = (options.interval || 50),
threshold = options.threshold,
play = options.play,
history = options.history || 10,
running = true;
(...)
return harker;
}
What is this line for?
var harker = this;
When I checked in the debugger, this stores a Window object in harker. And from what I'm seeing it makes for some unexpected behavior when I call hark more than once.
Why not just do var harker;?
Full code is here:
https://www.webrtc-experiment.com/hark.js
And here's a demo where it's used:
<style>
html, body {
margin: 0!important;
padding: 0!important;
}
video {
width: auto;
max-width: 100%;
}
</style>
<title>Auto Stop RecordRTC on Silence</title>
<h1>Auto Stop RecordRTC on Silence</h1>
<br>
<button id="btn-start-recording">Start Recording</button>
<button id="btn-stop-recording" disabled style="display: none;">Stop Recording</button>
<hr>
<video controls autoplay playsinline></video>
<script src="/RecordRTC.js"></script>
<script src="https://www.webrtc-experiment.com/hark.js"></script>
<script>
var video = document.querySelector('video');
var h1 = document.querySelector('h1');
var default_h1 = h1.innerHTML;
function captureCamera(callback) {
navigator.mediaDevices.getUserMedia({ audio: true, video: true }).then(function(camera) {
callback(camera);
}).catch(function(error) {
alert('Unable to capture your camera. Please check console logs.');
console.error(error);
});
}
function stopRecordingCallback() {
video.srcObject = null;
var blob = recorder.getBlob();
video.src = URL.createObjectURL(blob);
recorder.camera.stop();
video.muted = false;
}
var recorder; // globally accessible
document.getElementById('btn-start-recording').onclick = function() {
this.disabled = true;
captureCamera(function(camera) {
video.muted = true;
video.srcObject = camera;
recorder = RecordRTC(camera, {
type: 'video'
});
recorder.startRecording();
var max_seconds = 3;
var stopped_speaking_timeout;
var speechEvents = hark(camera, {});
speechEvents.on('speaking', function() {
if(recorder.getBlob()) return;
clearTimeout(stopped_speaking_timeout);
if(recorder.getState() === 'paused') {
// recorder.resumeRecording();
}
h1.innerHTML = default_h1;
});
speechEvents.on('stopped_speaking', function() {
if(recorder.getBlob()) return;
// recorder.pauseRecording();
stopped_speaking_timeout = setTimeout(function() {
document.getElementById('btn-stop-recording').click();
h1.innerHTML = 'Recording is now stopped.';
}, max_seconds * 1000);
// just for logging purpose (you ca remove below code)
var seconds = max_seconds;
(function looper() {
h1.innerHTML = 'Recording is going to be stopped in ' + seconds + ' seconds.';
seconds--;
if(seconds <= 0) {
h1.innerHTML = default_h1;
return;
}
setTimeout(looper, 1000);
})();
});
// release camera on stopRecording
recorder.camera = camera;
document.getElementById('btn-stop-recording').disabled = false;
});
};
document.getElementById('btn-stop-recording').onclick = function() {
this.disabled = true;
recorder.stopRecording(stopRecordingCallback);
};
</script>
<footer style="margin-top: 20px;"><small id="send-message"></small></footer>
<script src="https://www.webrtc-experiment.com/common.js"></script>
The pattern of assigning the value of this to a variable is something you can read more about by searching for this that pattern (or self = this ) for example, since that's a common name for the variable for "saving" a reference to this.
The reason for doing that is that this changes depending on the context of functions. If you assign this at a specific scope you can pass that along to other functions - since they wouldn't be able to use this, since this might mean something completely different to them.
I'm learning angular and I'm trying to use copy image from clipboard and paste it in html web page.
I found a solution in Javascript that i want to convert to typeScript. But it doesn't work when I test it. It works fine on jsfiddle : http://jsfiddle.net/KJW4E/1739/
<!DOCTYPE html>
<html>
<head>
<title>Page Title</title>
<script>
var CLIPBOARD = new CLIPBOARD_CLASS("my_canvas", true);
/**
* image pasting into canvas
*
* #param {string} canvas_id - canvas id
* #param {boolean} autoresize - if canvas will be resized
*/
function CLIPBOARD_CLASS(canvas_id, autoresize) {
var _self = this;
var canvas = document.getElementById(canvas_id);
var ctx = document.getElementById(canvas_id).getContext("2d");
var ctrl_pressed = false;
var command_pressed = false;
var paste_event_support;
var pasteCatcher;
//handlers
document.addEventListener('keydown', function (e) {
_self.on_keyboard_action(e);
}, false); //firefox fix
document.addEventListener('keyup', function (e) {
_self.on_keyboardup_action(e);
}, false); //firefox fix
document.addEventListener('paste', function (e) {
_self.paste_auto(e);
}, false); //official paste handler
//constructor - we ignore security checks here
this.init = function () {
pasteCatcher = document.createElement("div");
pasteCatcher.setAttribute("id", "paste_ff");
pasteCatcher.setAttribute("contenteditable", "");
pasteCatcher.style.cssText = 'opacity:0;position:fixed;top:0px;left:0px;width:10px;margin-left:-20px;';
document.body.appendChild(pasteCatcher);
// create an observer instance
var observer = new MutationObserver(function(mutations) {
mutations.forEach(function(mutation) {
if (paste_event_support === true || ctrl_pressed == false || mutation.type != 'childList'){
//we already got data in paste_auto()
return true;
}
//if paste handle failed - capture pasted object manually
if(mutation.addedNodes.length == 1) {
if (mutation.addedNodes[0].src != undefined) {
//image
_self.paste_createImage(mutation.addedNodes[0].src);
}
//register cleanup after some time.
setTimeout(function () {
pasteCatcher.innerHTML = '';
}, 20);
}
});
});
var target = document.getElementById('paste_ff');
var config = { attributes: true, childList: true, characterData: true };
observer.observe(target, config);
}();
//default paste action
this.paste_auto = function (e) {
paste_event_support = false;
if(pasteCatcher != undefined){
pasteCatcher.innerHTML = '';
}
if (e.clipboardData) {
var items = e.clipboardData.items;
if (items) {
paste_event_support = true;
//access data directly
for (var i = 0; i < items.length; i++) {
if (items[i].type.indexOf("image") !== -1) {
//image
var blob = items[i].getAsFile();
var URLObj = window.URL || window.webkitURL;
var source = URLObj.createObjectURL(blob);
this.paste_createImage(source);
}
}
e.preventDefault();
}
else {
//wait for DOMSubtreeModified event
//https://bugzilla.mozilla.org/show_bug.cgi?id=891247
}
}
};
//on keyboard press
this.on_keyboard_action = function (event) {
k = event.keyCode;
//ctrl
if (k == 17 || event.metaKey || event.ctrlKey) {
if (ctrl_pressed == false)
ctrl_pressed = true;
}
//v
if (k == 86) {
if (document.activeElement != undefined && document.activeElement.type == 'text') {
//let user paste into some input
return false;
}
if (ctrl_pressed == true && pasteCatcher != undefined){
pasteCatcher.focus();
}
}
};
//on kaybord release
this.on_keyboardup_action = function (event) {
//ctrl
if (event.ctrlKey == false && ctrl_pressed == true) {
ctrl_pressed = false;
}
//command
else if(event.metaKey == false && command_pressed == true){
command_pressed = false;
ctrl_pressed = false;
}
};
//draw pasted image to canvas
this.paste_createImage = function (source) {
var pastedImage = new Image();
pastedImage.onload = function () {
if(autoresize == true){
//resize
canvas.width = pastedImage.width;
canvas.height = pastedImage.height;
}
else{
//clear canvas
ctx.clearRect(0, 0, canvas.width, canvas.height);
}
ctx.drawImage(pastedImage, 0, 0);
};
pastedImage.src = source;
};
}
</script>
</head>
<body>
1. Copy image data into clipboard or press Print Screen <br>
2. Press Ctrl+V (page/iframe must be focused):
<br /><br />
<button onclick="alert('Hello! I am an alert box!!');"></button>
<canvas style="border:1px solid grey;" id="my_canvas" width="300" height="300"></canvas>
</body>
</html>
I am using google chrome Version 70.0.3538.77
Thanks in advance
From the comments above, it sounds like you were trying to execute your bindings in the constructor of your component instead of within ngOnInit. This is a classic gotcha as the constructor spins up well before (in most cases) before the content on screen is finished rendering. ngOnInit is called as the component finalises its instantiation process.
https://angular-6ckpxd.stackblitz.io
The errors you receive are because typescript is strongly typed where javascript is not. You are pasting javascript code in (which is fine in most cases) but will need some casting to any if you want to eradicate most of them.
I'm using this function to create a sound, which works well on desktop and Android, and works initially on iOS when I use a touchevent to start it. I need to later replace the sound with another sound file, however on iOS it doesn't start - I'm assuming because it needs another user interaction to play the sound.
This is a VR app in a headset so this kind of user interaction isn't possible. Is there another way of replacing the sound or another non-click user interaction I can use like movement?
I've seen this http://matt-harrison.com/perfect-web-audio-on-ios-devices-with-the-web-audio-api/
Which seems to have another solution, but I don't want to pre-load all of the files (they're reasonably big and there's 10 of them) which seems to be a requirement here - plus I use the pause function in the code I have. Are there any easy ways round this?
var AudioContext = AudioContext || webkitAudioContext, context = new AudioContext();
function createSound(filename) {
console.log('createSound()');
var url = cdnPrefix + '/' + filename;
var buffer;
context = new AudioContext();
var request = new XMLHttpRequest();
request.open('GET', url, true);
request.responseType = 'arraybuffer';
// Decode asynchronously
request.onload = function() {
context.decodeAudioData(request.response, function(b) {
buffer = b;
play();
});
}
request.send();
var sourceNode = null,
startedAt = 0,
pausedAt = 0,
playing = false,
volume = context.createGain();
var play = function() {
if(playing || !buffer)
return;
var offset = pausedAt;
sourceNode = context.createBufferSource();
sourceNode.connect(context.destination);
sourceNode.connect(volume);
volume.gain.value = 1;
sourceNode.buffer = buffer;
sourceNode.start(0, offset);
sourceNode.onended = onEnded;
sourceNode.onstatechange = onStateChange;
sourceNode.onloaded = onLoaded;
//sourceNode.loop = true;
startedAt = context.currentTime - offset;
pausedAt = 0;
playing = true;
$(document).trigger("voiceoverPlay");
if(isPaused == true)
pause();
};
function onEnded(event){
$(document).trigger("voiceoverEnded");
play();
}
function onStateChange(event){
console.log('onStateChange',event);
}
function onLoaded(event){
console.log('onLoaded',event);
}
var pause = function() {
var elapsed = context.currentTime - startedAt;
stop();
pausedAt = elapsed;
$(document).trigger("voiceoverPause");
};
var stop = function() {
if (sourceNode) {
sourceNode.disconnect();
if(playing === true)
sourceNode.stop(0);
sourceNode = null;
}
pausedAt = 0;
startedAt = 0;
playing = false;
};
var getPlaying = function() {
return playing;
};
var getCurrentTime = function() {
if(pausedAt) {
return pausedAt;
}
if(startedAt) {
return context.currentTime - startedAt;
}
return 0;
};
var setCurrentTime = function(time) {
pausedAt = time;
};
var getDuration = function() {
return buffer.duration;
};
return {
getCurrentTime: getCurrentTime,
setCurrentTime: setCurrentTime,
getDuration: getDuration,
getPlaying: getPlaying,
play: play,
pause: pause,
stop: stop
};
}
You need a touch event for each sound.
I ended up using SoundJS which is much better.
i have visited a github code to increase effort for saving a file from javascript/html. We have some problem with to use it, because it mae for node.js. I'v found some demo.js code to save the file from demo.html itself. But, it is useless because i unfortunatly can't edit, because i wonder it just for demo.html class, and it will become crash if i make a reference to another html file.
This is the code File Saver.js
/* FileSaver.js * A saveAs() FileSaver implementation. * 1.3.2 * 2016-06-16 18:25:19 * * By Eli Grey, http://eligrey.com * License: MIT * See https://github.com/eligrey/FileSaver.js/blob/master/LICENSE.md */
/*global self */ /*jslint bitwise: true, indent: 4, laxbreak: true, laxcomma: true, smarttabs: true, plusplus: true */
/*! #source http://purl.eligrey.com/github/FileSaver.js/blob/master/FileSaver.js
*/
var saveAs = saveAs || (function(view) { "use strict"; // IE <10 is explicitly unsupported if (typeof view === "undefined" || typeof navigator !== "undefined" && /MSIE [1-9]\./.test(navigator.userAgent)) { return; } var
doc = view.document
// only get URL when necessary in case Blob.js hasn't overridden it yet , get_URL = function() { return view.URL || view.webkitURL || view; } , save_link = doc.createElementNS("http://www.w3.org/1999/xhtml", "a") , can_use_save_link = "download" in save_link , click = function(node) { var event = new MouseEvent("click"); node.dispatchEvent(event); } , is_safari = /constructor/i.test(view.HTMLElement) || view.safari , is_chrome_ios
=/CriOS\/[\d]+/.test(navigator.userAgent) , throw_outside = function(ex) { (view.setImmediate || view.setTimeout)(function() {
throw ex; }, 0); } , force_saveable_type = "application/octet-stream" // the Blob API is fundamentally broken as there is no "downloadfinished" event to subscribe to , arbitrary_revoke_timeout = 1000 * 40 // in ms , revoke = function(file) { var revoker = function() {
if (typeof file === "string") { // file is an object URL
get_URL().revokeObjectURL(file);
} else { // file is a File
file.remove();
} }; setTimeout(revoker, arbitrary_revoke_timeout); } , dispatch = function(filesaver, event_types, event) { event_types = [].concat(event_types); var i = event_types.length; while (i--) {
var listener = filesaver["on" + event_types[i]];
if (typeof listener === "function") {
try {
listener.call(filesaver, event || filesaver);
} catch (ex) {
throw_outside(ex);
}
} } } , auto_bom = function(blob) { // prepend BOM for UTF-8 XML and text/* types (including HTML) // note: your browser will automatically convert UTF-16 U+FEFF to EF BB BF if (/^\s*(?:text\/\S*|application\/xml|\S*\/\S*\+xml)\s*;.*charset\s*=\s*utf-8/i.test(blob.type)) {
return new Blob([String.fromCharCode(0xFEFF), blob], {type: blob.type}); } return blob; } , FileSaver = function(blob, name, no_auto_bom) { if (!no_auto_bom) {
blob = auto_bom(blob); } // First try a.download, then web filesystem, then object URLs var
filesaver = this
, type = blob.type
, force = type === force_saveable_type
, object_url
, dispatch_all = function() {
dispatch(filesaver, "writestart progress write writeend".split(" "));
}
// on any filesys errors revert to saving with object URLs
, fs_error = function() {
if ((is_chrome_ios || (force && is_safari)) && view.FileReader) {
// Safari doesn't allow downloading of blob urls
var reader = new FileReader();
reader.onloadend = function() {
var url = is_chrome_ios ? reader.result : reader.result.replace(/^data:[^;]*;/, 'data:attachment/file;');
var popup = view.open(url, '_blank');
if(!popup) view.location.href = url;
url=undefined; // release reference before dispatching
filesaver.readyState = filesaver.DONE;
dispatch_all();
};
reader.readAsDataURL(blob);
filesaver.readyState = filesaver.INIT;
return;
}
// don't create more object URLs than needed
if (!object_url) {
object_url = get_URL().createObjectURL(blob);
}
if (force) {
view.location.href = object_url;
} else {
var opened = view.open(object_url, "_blank");
if (!opened) {
// Apple does not allow window.open, see https://developer.apple.com/library/safari/documentation/Tools/Conceptual/SafariExtensionGuide/WorkingwithWindowsandTabs/WorkingwithWindowsandTabs.html
view.location.href = object_url;
}
}
filesaver.readyState = filesaver.DONE;
dispatch_all();
revoke(object_url);
} ; filesaver.readyState = filesaver.INIT;
if (can_use_save_link) {
object_url = get_URL().createObjectURL(blob);
setTimeout(function() {
save_link.href = object_url;
save_link.download = name;
click(save_link);
dispatch_all();
revoke(object_url);
filesaver.readyState = filesaver.DONE;
});
return; }
fs_error(); } , FS_proto = FileSaver.prototype , saveAs = function(blob, name, no_auto_bom) { return new FileSaver(blob, name || blob.name || "download", no_auto_bom); } ; // IE 10+ (native saveAs) if (typeof navigator !== "undefined" && navigator.msSaveOrOpenBlob) { return function(blob, name, no_auto_bom) { name = name || blob.name || "download";
if (!no_auto_bom) {
blob = auto_bom(blob); } return navigator.msSaveOrOpenBlob(blob, name); }; }
FS_proto.abort = function(){}; FS_proto.readyState = FS_proto.INIT = 0; FS_proto.WRITING = 1; FS_proto.DONE = 2;
FS_proto.error = FS_proto.onwritestart = FS_proto.onprogress = FS_proto.onwrite = FS_proto.onabort = FS_proto.onerror = FS_proto.onwriteend = null;
return saveAs; }(
typeof self !== "undefined" && self || typeof window !== "undefined" && window || this.content )); // `self` is undefined in Firefox for Android content script context // while `this` is nsIContentFrameMessageManager // with an attribute `content` that corresponds to the window
if (typeof module !== "undefined" && module.exports) { module.exports.saveAs = saveAs; } else if ((typeof define !== "undefined" && define !== null) && (define.amd !== null)) { define("FileSaver.js", function() {
return saveAs; }); }
And the demo.js
/*! FileSaver.js demo script
* 2016-05-26
*
* By Eli Grey, http://eligrey.com
* License: MIT
* See LICENSE.md
*/
/*! #source http://purl.eligrey.com/github/FileSaver.js/blob/master/demo/demo.js */
/*jshint laxbreak: true, laxcomma: true, smarttabs: true*/
/*global saveAs, self*/
(function(view) {
"use strict";
// The canvas drawing portion of the demo is based off the demo at
// http://www.williammalone.com/articles/create-html5-canvas-javascript-drawing-app/
var
document = view.document
, $ = function(id) {
return document.getElementById(id);
}
, session = view.sessionStorage
// only get URL when necessary in case Blob.js hasn't defined it yet
, get_blob = function() {
return view.Blob;
}
, canvas = $("canvas")
, canvas_options_form = $("canvas-options")
, canvas_filename = $("canvas-filename")
, canvas_clear_button = $("canvas-clear")
, text = $("text")
, text_options_form = $("text-options")
, text_filename = $("text-filename")
, html = $("html")
, html_options_form = $("html-options")
, html_filename = $("html-filename")
, ctx = canvas.getContext("2d")
, drawing = false
, x_points = session.x_points || []
, y_points = session.y_points || []
, drag_points = session.drag_points || []
, add_point = function(x, y, dragging) {
x_points.push(x);
y_points.push(y);
drag_points.push(dragging);
}
, draw = function(){
canvas.width = canvas.width;
ctx.lineWidth = 6;
ctx.lineJoin = "round";
ctx.strokeStyle = "#000000";
var
i = 0
, len = x_points.length
;
for(; i < len; i++) {
ctx.beginPath();
if (i && drag_points[i]) {
ctx.moveTo(x_points[i-1], y_points[i-1]);
} else {
ctx.moveTo(x_points[i]-1, y_points[i]);
}
ctx.lineTo(x_points[i], y_points[i]);
ctx.closePath();
ctx.stroke();
}
}
, stop_drawing = function() {
drawing = false;
}
// Title guesser and document creator available at https://gist.github.com/1059648
, guess_title = function(doc) {
var
h = "h6 h5 h4 h3 h2 h1".split(" ")
, i = h.length
, headers
, header_text
;
while (i--) {
headers = doc.getElementsByTagName(h[i]);
for (var j = 0, len = headers.length; j < len; j++) {
header_text = headers[j].textContent.trim();
if (header_text) {
return header_text;
}
}
}
}
, doc_impl = document.implementation
, create_html_doc = function(html) {
var
dt = doc_impl.createDocumentType('html', null, null)
, doc = doc_impl.createDocument("http://www.w3.org/1999/xhtml", "html", dt)
, doc_el = doc.documentElement
, head = doc_el.appendChild(doc.createElement("head"))
, charset_meta = head.appendChild(doc.createElement("meta"))
, title = head.appendChild(doc.createElement("title"))
, body = doc_el.appendChild(doc.createElement("body"))
, i = 0
, len = html.childNodes.length
;
charset_meta.setAttribute("charset", html.ownerDocument.characterSet);
for (; i < len; i++) {
body.appendChild(doc.importNode(html.childNodes.item(i), true));
}
var title_text = guess_title(doc);
if (title_text) {
title.appendChild(doc.createTextNode(title_text));
}
return doc;
}
;
canvas.width = 500;
canvas.height = 300;
if (typeof x_points === "string") {
x_points = JSON.parse(x_points);
} if (typeof y_points === "string") {
y_points = JSON.parse(y_points);
} if (typeof drag_points === "string") {
drag_points = JSON.parse(drag_points);
} if (session.canvas_filename) {
canvas_filename.value = session.canvas_filename;
} if (session.text) {
text.value = session.text;
} if (session.text_filename) {
text_filename.value = session.text_filename;
} if (session.html) {
html.innerHTML = session.html;
} if (session.html_filename) {
html_filename.value = session.html_filename;
}
drawing = true;
draw();
drawing = false;
canvas_clear_button.addEventListener("click", function() {
canvas.width = canvas.width;
x_points.length =
y_points.length =
drag_points.length =
0;
}, false);
canvas.addEventListener("mousedown", function(event) {
event.preventDefault();
drawing = true;
add_point(event.pageX - canvas.offsetLeft, event.pageY - canvas.offsetTop, false);
draw();
}, false);
canvas.addEventListener("mousemove", function(event) {
if (drawing) {
add_point(event.pageX - canvas.offsetLeft, event.pageY - canvas.offsetTop, true);
draw();
}
}, false);
canvas.addEventListener("mouseup", stop_drawing, false);
canvas.addEventListener("mouseout", stop_drawing, false);
canvas_options_form.addEventListener("submit", function(event) {
event.preventDefault();
canvas.toBlobHD(function(blob) {
saveAs(
blob
, (canvas_filename.value || canvas_filename.placeholder) + ".png"
);
}, "image/png");
}, false);
text_options_form.addEventListener("submit", function(event) {
event.preventDefault();
var BB = get_blob();
saveAs(
new BB(
[text.value || text.placeholder]
, {type: "text/plain;charset=" + document.characterSet}
)
, (text_filename.value || text_filename.placeholder) + ".txt"
);
}, false);
html_options_form.addEventListener("submit", function(event) {
event.preventDefault();
var
BB = get_blob()
, xml_serializer = new XMLSerializer()
, doc = create_html_doc(html)
;
saveAs(
new BB(
[xml_serializer.serializeToString(doc)]
, {type: "application/xhtml+xml;charset=" + document.characterSet}
)
, (html_filename.value || html_filename.placeholder) + ".xhtml"
);
}, false);
view.addEventListener("unload", function() {
session.x_points = JSON.stringify(x_points);
session.y_points = JSON.stringify(y_points);
session.drag_points = JSON.stringify(drag_points);
session.canvas_filename = canvas_filename.value;
session.text = text.value;
session.text_filename = text_filename.value;
session.html = html.innerHTML;
session.html_filename = html_filename.value;
}, false);
}(self));
The html
<!DOCTYPE html>
<html xmlns="http://www.w3.org/1999/xhtml" dir="ltr" lang="en-US-x-Hixie">
<head>
<meta charset="utf-8"/>
<title>FileSaver.js demo</title>
<link rel="stylesheet" type="text/css" href="https://cdn.rawgit.com/eligrey/FileSaver.js/702cd2e820b680f88a0f299e33085c196806fc52/demo/demo.css"/>
</head>
<body>
<h1>FileSaver.js demo</h1>
<p>
The following examples demonstrate how it is possible to generate and save any type of data right in the browser using the <code>saveAs()</code> FileSaver interface, without contacting any servers.
</p>
<section id="image-demo">
<h2>Saving an image</h2>
<canvas class="input" id="canvas" width="500" height="300"/>
<form id="canvas-options">
<label>Filename: <input type="text" class="filename" id="canvas-filename" placeholder="doodle"/>.png</label>
<input type="submit" value="Save"/>
<input type="button" id="canvas-clear" value="Clear"/>
</form>
</section>
<section id="text-demo">
<h2>Saving text</h2>
<textarea class="input" id="text" placeholder="Once upon a time..."/>
<form id="text-options">
<label>Filename: <input type="text" class="filename" id="text-filename" placeholder="a plain document"/>.txt</label>
<input type="submit" value="Save"/>
</form>
</section>
<section id="html-demo">
<h2>Saving rich text</h2>
<div class="input" id="html" contenteditable="">
<h3>Some example rich text</h3>
<ul>
<li><del>Plain</del> <ins>Boring</ins> text.</li>
<li><em>Emphasized text!</em></li>
<li><strong>Strong text!</strong></li>
<li>
<svg xmlns="http://www.w3.org/2000/svg" version="1.1" width="70" height="70">
<circle cx="35" cy="35" r="35" fill="red"/>
<text x="10" y="40">image</text>
</svg>
</li>
<li>A link.</li>
</ul>
</div>
<form id="html-options">
<label>Filename: <input type="text" class="filename" id="html-filename" placeholder="a rich document"/>.xhtml</label>
<input type="submit" value="Save"/>
</form>
</section>
<script async="" src="https://cdn.rawgit.com/eligrey/Blob.js/0cef2746414269b16834878a8abc52eb9d53e6bd/Blob.js"/>
<script async="" src="https://cdn.rawgit.com/eligrey/canvas-toBlob.js/f1a01896135ab378aa5c0118eadd81da55e698d8/canvas-toBlob.js"/>
<script async="" src="https://cdn.rawgit.com/eligrey/FileSaver.js/e9d941381475b5df8b7d7691013401e171014e89/FileSaver.min.js"/>
<script async="" src="https://cdn.rawgit.com/eligrey/FileSaver.js/597b6cd0207ce408a6d34890b5b2826b13450714/demo/demo.js"/>
</body>
</html>
Firstly, nothing in the code you've show is related to node.js - so using fileSaver.js without node.js is not an issue
The simplest example I can come up with to demonstrate how to use it using a Blob and a File is as follows
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8" />
<!-- you'll need to host fileSaver.js on your own host -->
<script src="fileSaver.js"></script>
</head>
<body>
<div id="source">This is some text</div>
<input id="saveFile" type="button" value="saveFile" />
<script>
document.getElementById('saveFile').addEventListener('click', function(e) {
var text = document.getElementById('source').innerHTML;
var file = new File([text], "hello world.txt", {type: "text/plain;charset=utf-8"});
// save it
saveAs(file);
});
</script>
</body>
</html>