How can I upload a file to a github repo in Javascript? - javascript

I have an audio file which is generated by a JS script integrated to my streamlit web-app with components.html, like this:
components.html(
"""
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Document</title>
</head>
<body>
<!-- Set up your HTML here -->
<center>
<p><button id="record">Record</button></p>
<div id="sound-clip"></div>
</center>
<script src="https://code.jquery.com/jquery-3.3.1.min.js"
integrity="sha256-FgpCb/KJQlLNfOu91ta32o/NMZxltwRo8QtmkMRdAu8=" crossorigin="anonymous"></script>
<script>
// Set up the AudioContext.
const audioCtx = new AudioContext();
// Top-level variable keeps track of whether we are recording or not.
let recording = false;
// Ask user for access to the microphone.
if (navigator.mediaDevices) {
navigator.mediaDevices.getUserMedia({ "audio": true }).then((stream) => {
// Instantiate the media recorder.
const mediaRecorder = new MediaRecorder(stream);
// Create a buffer to store the incoming data.
let chunks = [];
mediaRecorder.ondataavailable = (event) => {
chunks.push(event.data);
}
// When you stop the recorder, create a empty audio clip.
mediaRecorder.onstop = (event) => {
const audio = new Audio();
audio.setAttribute("controls", "");
$("#sound-clip").append(audio);
$("#sound-clip").append("<br />");
// Combine the audio chunks into a blob, then point the empty audio clip to that blob.
const blob = new Blob(chunks, { "type": "audio/wav; codecs=0" });
audio.src = window.URL.createObjectURL(blob);
// Clear the `chunks` buffer so that you can record again.
chunks = [];
};
mediaRecorder.start();
recording = true;
$("#record").html("Stop");
// Set up event handler for the "Record" button.
$("#record").on("click", () => {
if (recording) {
mediaRecorder.stop();
recording = false;
$("#record").html("Record");
} else {
$("#record").html("Stop");
}
});
}).catch((err) => {
// Throw alert when the browser is unable to access the microphone.
alert("Oh no! Your browser cannot access your computer's microphone.");
});
} else {
// Throw alert when the browser cannot access any media devices.
alert("Oh no! Your browser cannot access your computer's microphone. Please update your browser.");
}
</script>
</body>
</html>
"""
)
Since I'm using Streamlit I need to upload the generated file to a bucket (I was thinking to use a simple github repo for now), but I have problems understanding how to do it given the fact that the script is wrapped inside components.html. Is it possible to upload the file and later retrive it to use it inside my python script for some calculations?

Related

Javascript code runs fine on Codesandbox but not on localy or on a webserver

I tried to read read QR code thanks to javascript code found in this tutorial
The code provided by this tutorial works inside the codesandbox linked in the tutorial, however it doesn't work when I tired the same exact code on my laptop or on my remote webserver. I've litteraly copy and paste the code with the same file configuration, filenames ect... but I'm getting the following JS error on my browser :
SyntaxError: Identifier 'qrcode' has already been declared (at qrCodeScanner.js:1:1)
Since I run the exact same code I d'ont understand what is going on there. Is there something needed on the server side in order to make the code works that is not mentioned in the tutorial ?
If you want to see the code used and see it in action, you can teste the codesandbox instance there.
EDIT
Here's the code I use :
(HMTL)
<!DOCTYPE html>
<html>
<head>
<title>QR Code Scanner</title>
<meta charset="UTF-8" />
<meta name="viewport" content="width=device-width; initial-scale=1.0, maximum-scale=1.0; user-scalable=0;" />
<link rel="stylesheet" href="./src/style.css" />
<script src="https://rawgit.com/sitepoint-editors/jsqrcode/master/src/qr_packed.js"></script>
</head>
<body>
<div id="container">
<h1>QR Code Scanner</h1>
<a id="btn-scan-qr">
<img src="https://dab1nmslvvntp.cloudfront.net/wp-content/uploads/2017/07/1499401426qr_icon.svg">
<a/>
<canvas hidden="" id="qr-canvas"></canvas>
<div id="qr-result" hidden="">
<b>Data:</b> <span id="outputData"></span>
</div>
</div>
<script src="./src/qrCodeScanner.js"></script>
</body>
</html>
(Javascript)
const qrcode = window.qrcode;
const video = document.createElement("video");
const canvasElement = document.getElementById("qr-canvas");
const canvas = canvasElement.getContext("2d");
const qrResult = document.getElementById("qr-result");
const outputData = document.getElementById("outputData");
const btnScanQR = document.getElementById("btn-scan-qr");
let scanning = false;
qrcode.callback = res => {
if (res) {
outputData.innerText = res;
scanning = false;
video.srcObject.getTracks().forEach(track => {
track.stop();
});
qrResult.hidden = false;
canvasElement.hidden = true;
btnScanQR.hidden = false;
}
};
btnScanQR.onclick = () => {
navigator.mediaDevices
.getUserMedia({ video: { facingMode: "environment" } })
.then(function(stream) {
scanning = true;
qrResult.hidden = true;
btnScanQR.hidden = true;
canvasElement.hidden = false;
video.setAttribute("playsinline", true); // required to tell iOS safari we don't want fullscreen
video.srcObject = stream;
video.play();
tick();
scan();
});
};
function tick() {
canvasElement.height = video.videoHeight;
canvasElement.width = video.videoWidth;
canvas.drawImage(video, 0, 0, canvasElement.width, canvasElement.height);
scanning && requestAnimationFrame(tick);
}
function scan() {
try {
qrcode.decode();
} catch (e) {
setTimeout(scan, 300);
}
}
Problem
The problem is that you are probably using a live server or just opening the html file, but in the sandbox parcel-bundler is used. var qrcode from the library collides with your const qrcode.
Solutions
Type module
Replace
<script src="./src/qrCodeScanner.js"></script>
with
<script type="module" src="./src/qrCodeScanner.js"></script>
Rename
Change your variable to something else like const myQrcode
Use a bundler
You can use parcel-bundler as in the sandbox or any other that will resolve variable collision for you

Continuously play overlapping audio loop

I want to continuously play audio in a loop, but each repetition is supposed to overlap with the previous one. Another button should be able to stop the loop.
This is what I tried:
var audio = new Audio(... Create audio
audio.ontimeupdate = ... Use ontimeupdate event to check if audio almost over
if (event.target.duration - event.target.currentTime <= 1 && audio == event.target) ... Check if overlap is 1 second and make sure the event is triggered by the current audio (because audio will be overwritten, this is only supposed to trigger once per repetition)
audio = new Audio(); Discard the reference to the "old" (currently still playing the last second) audio and create a "new" audio instead, copying all relevant properties src and ontimeupdate
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1">
<title>Audio loop</title>
</head>
<body>
<button id="start">Start</button>
<button id="stop">Stop</button>
<script>
var audio = new Audio("data:audio/mpeg;base64,/+NIxAAAAAAAAAAAAFhpbmcAAAAPAAAALAAAFEAABwcPDxISFhYWGhoeHiEhJSUlKSktLTAwNDQ0ODg8PEtLS2FhZWVpaW1tbXBwdHR4eHx8fICAg4OHh4eLi4+Pnp60tLS4uLy8wMDDw8PHx8vLz8/P0tLW1tra3t7e4eHw8P//AAAAB0xBTUUzLjEwMAMoAAAAAAAAAAAUCCQDACEAAZoAABRAjMaAawAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA/+MoxAAdUIqgf1gYAApHJbuABG4bf9/3/h+NxuXxiMP45DDF2OI2Ms+bnG5RmAXEUEa4/kYpLdenp6enzD6gQBB0uD5/KBiU5QP9Hv1g4cxAD76wcDGQB/gR0EAQB8HwfB8HAQBAEAQB8HwfB8CAg7B/o+D4Pg+BAQBA5f/4PvQUBBUJAB6QCgUDgYAAQ05S/+MoxA0agdq2WZpQAOZ0f/5j/+ql6IYJqXKYL7mIJntMP3Hgo6O0YV6YYKwJYywzXN/7Qv/wkgFQAoggbgWv/xAg2gsisIoLohET//4iSIfD4hHo9OHw+///yEej0iHw+NHpKgNf/iU6Hf/rRjqEpARABjUDamQPwNBuaNFIb1XvL86qYCEMQy8yUEV3P7TV/+MYxCYFUF6oAdQAAUJVC/+DPCndgLnCEGHoiZxBLXKcDIr5bqTb0EIujyawPB9MkC80ttC652zLkTQCkxdVDNIWUFf6M0wN/+MYxEsEUGKcAAY4IUxBTUUzLjEwMFVVVVVVVVVVVVVV/+PCnlDthRAGmYhoxnYIM7lFfVRMQU1FMy4xMDCqqqqqqqqqqqqq/+MYxHQE6FqcoA44Iaqqqqqqqqqqqqqqqqqqqqqqqqqq8j1XjjdxweQDEWmL9JEBPLFrNpVMQU1FMy4xMDCqqqqqqqqqqqqq/+MYxJsEoGKYAAB2Aaqqqqqqqqqqqqqqqqqqqqqqqqqqqu8tQ6FwGBxkG9mig6ik5MapQmpMQU1FMy4xMDCqqqqqqqqqqqqq/+MYxMMEeGKYAAY4Iaqqqqqqqqqqqqqqqqqqqqqqqqqq/+DOklbsBU4CoYYqZmkJsoldTdNMQU1FMy4xMDBVVVVVVVVVVVVV/+MYxNgE+GKYAA44IVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVd6rxwhOxEzAJKQpisis9VVMQU1FMy4xMDBVVVVVVVVVVVVV/+MYxNgFAGaUAAB2AVVVVVVVVVVVVVVVVVVVVVVVVVVVVeZWYeCxQC8yNajTYaQVa9GaYGVMQU1FMy4xMDBVVVVVVVVVVVVV/+MYxNcEkGKUAAY4IVVVVVVVVVVVVVVVVVVVVVVVVVVVVfjwzpH/LmAYFmC5cZKBjhzF2lVySXbfgAGEQiYPBoKBaqRgYIGF/+MYxNgE+GKYAA44IYLGEgQniBQOYaDhh4NAYQDIJMQjMx+PTGorMFkIQo492BD9xrN1x85C6zTJjMtlc/20ajGMN1k1Sy/a/+MYxNQD4GKkAAY2IVoYhBkDBADsoSzkeNpAWEC4poqmuiCk19Gph9yc1pNhQhukcnA5CjpjKblGYDHC55nSaSl62iFlDOUF/+MYxNcEoGKUAAY4IQ3OQwMAgERKxo5d8BELsPanOWXLRqxt0RULRopwpWwuQXgYI8aY5cRMCEq3oS0H1L26IBAMNBeFJyFs/+MYxNcEkF6cAAB4AQtotRu6K5aBFCEq3lsy8a13oLkGUJcd7kqAAABATUbGXfARC7D5JVgUJZtS9sCEgBDQBvmpQWULMKWP/+NIxP91nG58fuZwaIorlmEAD2LTLZlt1L3qSELNoA3nWgXALaJ0QCmOW0QcbxkZd8tunXASgBbdB9t2QF/C5CdEMqBlyEHG0bmX/Llp1xpYQuWg+09uCEguQnRGVhy8CDisDMwSEwhLtv0l4YQlm1L2eFnDAAuw8qgYAAWYUsbGWrMIS173KYAEIGGle2QtQYAFrHMVvAAAEBHx4S1ZhGWjaGuQAhAQ0N3iLcGIQKAzRgYAAYAF8H1LdmMYGGvdgABCYxl23aLcGQgCIrxaZgIZiFvG4gUZnSBltXR4MpTKFDdsSivku/AAZiCAZiBpjhoCIs4AIwypgy5IvyFRBlzxozxkwRCHOE9N5HOYRMQCDBRqmDcknkaOGXYlDwDm/+NoxDttdG54XtdyQCPnVObzYkEIgTINMocui8BaowQC0TyrSLvIPNOglTFFUFAvclSWVLisFbshKLlIpSBW5CUkU6zwqAoqqaz7KVAUUW1dJIZAMoNLFzIrKDPM6KgKYqxZeylQFYsCwEsMqZiVhlSpmJRaAVAUVVNZewFMVTWBXSUCTGXVSMqUCWNA0ArCqCrptspVKsWBYCWGVMxKw4SwzEoGiLDVSsRtuCu1iMCwEoEisptOMqTGUGdaAVMUxVNaNpKgKgrqwUsMmMsaiaUoEqZ1o6l6XdRRkTAUJKDrBYKS+QDIpQlgSEpB5g0GpeoBUUYGZChJQdYLHE5kAyKUCtKLZFmk6oDQSgAEsy2y0S2JZlK19EEwBALNNNXkWyLNI/Q2hiAASzLTmQlwSzKPsOI9AEAs0wVmRcIs0gtD6PIABAQLBmelwSyKCsCJVAEABBK5Z8XCLJF2nvSpAAZhAqbNhLUmACXFexG4wRDFCTpYUBQDDCLfO+XtMIUxAVrLAgQEwAb+YUKqULSRkCBSVmbDQX1dKO3B6tX/AyiQxlnRhCmf0II0wNO3+PWg/+MYxCgFCGqQAAP4IZlE5xiZhQxHWAuNAx54pbsEav5yHrDNASHcKfxwyuhg6cor4CaZ8GRiZO0kaYTdH4nZfJ3abGpMQU1F/+MYxE4EOG6gAAJ2IDMuMTAwqqqqqqqqqqqqqqqqqqqqqqr3xoqRRZoIYMh9xlASbNJ7GZZMQU1FMy4xMDCqqqqqqqqqqqqq/+MYxHgE0G6YAAJ4IaqqqqqqqqqqqqqqqqqqqqqqqqqqqvoDBPWm1VDBK1OAjgum7kbpMx1MQU1FMy4xMDBVVVVVVVVVVVVV/+MYxJ8EsG6UAAP2IVVVVVVVVVVVVVVVVVVVVVVVVVX+YUKqULSRkCAyUmfjQXldKO3B6qpMQU1FMy4xMDCqqqqqqqqqqqqq/+MYxMcEUGqYAAM2IKqqqqqqqqqqqqqqqqqqqqqqqqqqqqqq/wMkSGMs6MITz/A5CGBp2/xMQU1FMy4xMDCqqqqqqqqqqqqq/+MYxNYEgG6YAAJ4Iaqqqqqqqqqqqqqqqqqqqqqqqqqqqqr1obKXYa+YOLBygJjQAeectgFMQU1FMy4xMDBVVVVVVVVVVVVV/+MYxNcEuG6YAAL4IVVVVVVVVVVVVVVVVVVVVVVVVVVVVf5yLaGaAkXwtADPRXRseOYr4CVMQU1FMy4xMDBVVVVVVVVVVVVV/+MYxNkFCGqQAAP4IVVVVVVVVVVVVVVVVVVVVVVVVVVVVVX/620ZjTtJomE3B/J2Xyd0KlWv5bvwADPHTGMhASUAKICjERhq/+MYxNUEQG6gAAJ2IBSMpsKBAJqvj05oSHIIZdZ8whSYxfFMwBGIyyXg69UwxkE0zTQkzzLUx5EcwJBAxFDMwFAIwWBYmA0w/+MYxNYEeGqcAAJ4ISwXCAfBgFAIGE3zCQFAoAJguC40BRgUCgcDAFA0DA2gwbRp7GYyfJhOheFBBgE9zYBMkznTWMBkLAaU/+MYxNcEsG6UAAP4ITDpGApa2jABgoFCnGW4QoVXCAMJLbsuLgLcQTpJrQL5tGLwOKX/YujwrAwNDRsKD7jImNAL3o5qqFv2/+MYxNYEUGqYAAbMJDRbRlpcNb6GCai0y+DPS8bjF/GKI9prpoA462iyDBQSFUZcxChVcHAYSWnZMWcVAXvQrVULfsyLSMlL/+NIxP95VG58Xs9wTIao0BCOC8y3jMS5bNi6ikEb0c18F225FyGal/1fo2I4MzLsNJLvtCQSK8LzhA1kAY7AgABboJCiOXkCAMLAxGGgEK4gYBERCeW/YwBhsOAgFGAaFCtEAt41UDAYiZUotgRAYgQlAU1tGkqmxiQRAApwh4iEAoLJNBFLTGkeGCDA5gqM0hXkZyp9GBA0AGnBzxgBoAwEzGT5MKRY4MMCoDoTSNcxnOmcYECSAqcDTIAGgS7TMhJkwrDlhQwCoShNZVjGlKLRgUGMCqTaEqCNiFNTUwiCZXh0wQwBcFTm86qxtWkkZGCzFRrcu/AAT1LIqcF7kkkQS1qbRihLQMMRSYAHHgQaQZ5BVTOZkRBKYdAmYQh8/+NoxCxnbG58Xs94QGCIFjoYGn2hHD0QYFC5iQRl4zCAVFgyDQAhEgJQGraLxOgis66gLesCac8KRLxpivwsM+bKWmukup6FBn/WFfJczTm4rpg1dsYYdC2suq6TLpIzqNtZkDhQNAK6YNXbGGHSNlLquky6SM6jbOZA1p+n1a7JWuxhypG4MCwE16SNelbkzjtQNALEYNYjGGdSNrLquky6SM6jbOZM4T9QC12Ss5jDWpOylprcljQQsaH2GwcyppzcViwWu2HGHQplLTXiWNHF3RdhsJZUwZmKYMBpivwpk3y5WCs+TCgBMaB1MW9YEwZuqYMBpivIsM3zAV0t2TGgBMZv0TV+qVJXMJLwumgFaAj0vZSlH1jReJwEAzU0eV6qrI/KdlyXbLutWR6ViVVR9aIXKdgu81JHlWFYJFJo5clyy7rFUqlLlgUUWeFsncLvMWSpTVWCQWZ+WlccuCs1KpK5YFACvgsk1ws8t5BKkinsXaUfLItbLYsdQTL87EbUwN7iD5G3llJXmML4ca0y8QwCmS85jgG7V3tmXWrvLcUWOdCOJh2aRenqR8cj/+MYxDEDsH6sAAayQNCyqzUEkoEYJpGAgEA1OX7N5UxBTeZHjfkDzoQHRlguKIluJEI+9FxMQU1FMy4xMDBVVVVVVVVVVVVV/+MYxF0EcH6kAAB2IFVVVVVVVVVVVVVVVVVVVVVVVVVVVVX6xrUL4FgJKnyVQd6JXbvUd/hMQU1FMy4xMDCqqqqqqqqqqqqq/+MYxIYDuH6oAAaCRKqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqI8yy5fnW2OMKJuzZp+vcod1MQU1FMy4xMDBVVVVVVVVVVVVV/+MYxLIEoH6ooAB2IFVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVXW7kFodDA7Ysw+9JYt2BlMQU1FMy4xMDBVVVVVVVVVVVVV/+MYxNcEwH6YAAB2IFVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVSPIOOWNprp2zj5MKsa3W0pMQU1FMy4xMDCqqqqqqqqqqqqq/+MYxNYEeIKkAAB2IKqqqqqqqqqqqqqqqqqqqqqqqqqqqqqq3cm4AHQExzHO4XIjdPYp7dVMQU1FMy4xMDBVVVVVVVVVVVVV/+MYxNUEKH6soAB0IFVVVVVVVVVVVVVVVVVVVVVVVVVVVfMVqljTklqjeroDZyzZHdmoBpVMQU1FMy4xMDBVVVVVVVVVVVVV/+MYxNMDsH6sAAbERFVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVdZ1H3QQGY2QHMa/GKSnpLAa3LfwAEDwoBOkHDEAkfQccLET/+MYxNQD2H60oAByIgQhgNSXCB1AC8EkB6DXsEmMJwDMGg4MDg3MYwRMgC+MJ41KRcMhxjCoXCxBmEQPEACjwjmBwIDAEjwL/+MYxNUECH6oAAbwQJgIAyHpMwwhSeRYLTp3KEIB0/isJaRKVLMuQn6oigEUbKAlk0Ck2C06BSiBctWRLIvAomoygEUTUZQc/+MYxNcEsH6cAAB2IFO1GEV1GlOEV1GlOEx1Ek2S5CsqhZdxWVWtAIo6oQgHUeVoQlqPK0IrsaVrRUUdW+kIwlb6QjPVWJDs/+MYxNQD+H6oAAbyQCluJDsKW4oGwpJsu4oqnGX8WSnGX8XynAX/WUnAhLWUqBCWzJONExeKk0TEtUIyziqo0AtWnshAWrT2/+NIxP92XG54XsdwSEIC1a0hoZahSlA8twpSgeXUXKgYXXUqRELrqVIiF71zIHl7FbUKy9imKFZexhpfgvepkhQgLWGQoQFsSQrQELCoVoCFhUK0BDEUKEJ5bYDYBLS2wGwCWl8gN8EMLagLYMYWtAWwYxAaAsA1pa4BYBrS3wCwFWl+gFsKMCAm0IUYEBNoQswWCbABdoYI2CC7QwxsEF2jRjaMQMDim0YgYHJNpRAweKbCCNoko2EEbRJxsMI2kSjacROEmm9IicJRN6RE4iicECOIdI5IC8wiRyMFZiSjmUKQCEnEtSvku/AAKqIyTqTJOaiEUkESS8bg1GoZgAr5XQl0WxFRICyIAOEFhqHqEqDA6PgjDyaCUXjcxTLz/+NIxDhMJG6IXmYYRNTPiKXTQnFU8LRVUE5MrPTpeenT5ilcPiqsPjlQZJlZ6dLz06fMUrh8lWHyVw+TPnqZ89OlZilUHxysPkqgyTKz1M+epnzFKoPkrh8liMkys9TPnqaMxSxHyVwySojJMjMUz5imRmKVEZJXDJKiMkJGYpnzEvIIklNAJxdUE4poAlFJBJqEjMSkbCSU0AnIaIyKZqJRSNhBKSCJJSIwIieRRCL5qJRfIoJEAjCCXDYSS4PwIkMfRCL5qIRfH0EiAPwglwjCCVB+BEhi0QiuPohFcfQSIAuEEqD8IJUFwIkMWiEVx9EIri0BRAFwSlQfhBHgXAJIYtEKNUxBTUUzLjEwMFVVVVVVVVVVVVVVVVVVVVVV");
audio.ontimeupdate = function(event) {
if (event.target.duration - event.target.currentTime <= 1 && audio == event.target) {
audio = new Audio();
audio.src = event.target.src;
audio.ontimeupdate = event.target.ontimeupdate;
audio.play();
}
};
document.getElementById("start").onclick = function() {
audio.play();
};
document.getElementById("stop").onclick = function() {
audio.pause();
};
</script>
</body>
</html>

Play loaded audio file with Tone.js web audio framework

Hi I'm creating a simple upload page where user load their local files and play it. For hour, i'm not uploading to the server, just want to create a file input and then play the audio file with Tone.js if it's possibel.
I'm using to this test P5.js library and Tone.js audio framework, the code below:
let inputFile, bttPlayFile;
let files;
window.AudioContext = window.AudioContext || window.webkitAudioContext;
let context = new Tone.Context();
let source;
function setup() {
inputFile = createFileInput(uploadTrack);
bttPlayFile = createButton('play');
bttPlayFile.mousePressed(tooglePlay);
}
function playSound(arraybuffer) {
context.decodeAudioData(arraybuffer, function(buf) {
source = new Tone.BufferSource();
source.connect(context).toDestinattion();
source.Tone.Buffer = buf;
source.start(0);
});
}
function selectedFile(evt) {
files = evt.target.files;
playFile(files[0]);
}
function playFile(file) {
let reader = new FileReader();
reader.onload = function(e) {
console.log(e.target.result);
playSound(e.target.result);
}
reader.readAsArrayBuffer(file);
}
function uploadTrack() {
inputFile.changed(selectedFile, false);
}
function tooglePlay() {
Tone.Transport.start();
}
<!DOCTYPE html>
<html lang="en">
<head>
<script src="https://cdnjs.cloudflare.com/ajax/libs/p5.js/1.4.0/p5.js"></script>
<link rel="stylesheet" type="text/css" href="style.css">
<meta charset="utf-8" />
<script src="https://cdnjs.cloudflare.com/ajax/libs/tone/14.8.32/Tone.js" integrity="sha512-USKCQh+O8BX/a2K06xPNTwduhmQvN/m9FhkR7PRysCRlPoqIItl7Qz3xVTZC/oIHe6g5XvnLHDUgGpRMZZTmFQ==" crossorigin="anonymous" referrerpolicy="no-referrer"></script>
</head>
<body>
<script src="sketch.js"></script>
</body>
</html>
At the moment, nothing happens and I want to understand why and how to proceed with this. Thanks!
If you open the console of your browser you should see a warning.
The AudioContext is "suspended". Invoke Tone.start() from a user
action to start the audio.
I think it will work if you call Tone.start() in the click handler of your "play" button before you load and decode the selected audio file.
It's unrelated to your question but it should be safe to remove window.AudioContext = window.AudioContext || window.webkitAudioContext from your code. Tone.js uses standardized-audio-context internally which should handle this for you.

How can I get a list of video cameras attached in my computer using JavaScript?

I want to display a list of video cameras attached to the user's computer, and when they select one, display streaming video from that camera in an HTML5 <video> tag.
How can I get a list of the video cameras attached to the user's computer?
Only works in chrome and edge
<script>
navigator.mediaDevices.enumerateDevices().then(function (devices) {
for(var i = 0; i < devices.length; i ++){
var device = devices[i];
if (device.kind === 'videoinput') {
var option = document.createElement('option');
option.value = device.deviceId;
option.text = device.label || 'camera ' + (i + 1);
document.querySelector('select#videoSource').appendChild(option);
}
};
});
</script>
<select id="videoSource"></select>
Perhaps Navigator.getUserMedia() (uses WebRTC under the hood) is what you're looking for, though I don't see anything that will directly tell you what devices are available (the list of devices isn't exposed to your codeā€”it's presented to the user when asking for permission to access available hardware).
Also note the browser support: Chrome 21+, Firefox 20+, Opera 12+, no support for IE and possibly Safari.
try out this...
<!DOCTYPE html>
<head>
<meta charset="utf-8">
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="author" content="Victor Stan">
<meta name="description" content="Get multiple video streams on one page. Adapted from code by Muaz Khan">
<title>Video Camera</title>
<script src="//ajax.googleapis.com/ajax/libs/jquery/2.0.3/jquery.min.js" ></script>
<style type="text/css" media="screen">
video {
border:1px solid gray;
}
</style>
</head>
<body>
<script>
if (!MediaStreamTrack) document.body.innerHTML = '<h1>Incompatible Browser Detected. Try <strong style="color:red;">Chrome Canary</strong> instead.</h1>';
var videoSources = [];
MediaStreamTrack.getSources(function(media_sources) {
console.log(media_sources);
alert('media_sources : '+media_sources);
media_sources.forEach(function(media_source){
if (media_source.kind === 'video') {
videoSources.push(media_source);
}
});
getMediaSource(videoSources);
});
var get_and_show_media = function(id) {
var constraints = {};
constraints.video = {
optional: [{ sourceId: id}]
};
navigator.webkitGetUserMedia(constraints, function(stream) {
console.log('webkitGetUserMedia');
console.log(constraints);
console.log(stream);
var mediaElement = document.createElement('video');
mediaElement.src = window.URL.createObjectURL(stream);
document.body.appendChild(mediaElement);
mediaElement.controls = true;
mediaElement.play();
}, function (e)
{
alert('Hii');
document.body.appendChild(document.createElement('hr'));
var strong = document.createElement('strong');
strong.innerHTML = JSON.stringify(e);
alert('strong.innerHTML : '+strong.innerHTML);
document.body.appendChild(strong);
});
};
var getMediaSource = function(media) {
console.log(media);
media.forEach(function(media_source) {
if (!media_source) return;
if (media_source.kind === 'video')
{
// add buttons for each media item
var button = $('<input/>', {id: media_source.id, value:media_source.id, type:'submit'});
$("body").append(button);
// show video on click
$(document).on("click", "#"+media_source.id, function(e){
console.log(e);
console.log(media_source.id);
get_and_show_media(media_source.id);
});
}
});
}
</script>
</body>
</html>
JavaScript cannot access your cameras to return a list. You will need to use a Flash SWF to get the camera information and pass it back to your page's JavaScript.
EDIT:
to those who downvoted. These methods will not give him a dropdown list of available cameras. If it does, please post a link or code. At the current date, the only way to get a list of cameras (which is what his questions was) is to use Flash (or possibly silverlight, but Flash has much broader install coverage). I've edited my question to be a little more specific in terms of getting the list versus accessing a camera.

How to record webcam and audio using webRTC and a server-based Peer connection

I would like to record the users webcam and audio and save it to a file on the server. These files would then be able to be served up to other users.
I have no problems with playback, however I'm having problems getting the content to record.
My understanding is that the getUserMedia .record() function has not yet been written - only a proposal has been made for it so far.
I would like to create a peer connection on my server using the PeerConnectionAPI. I understand this is a bit hacky, but I'm thinking it should be possible to create a peer on the server and record what the client-peer sends.
If this is possible, I should then be able to save this data to flv or any other video format.
My preference is actually to record the webcam + audio client-side, to allow the client to re-record videos if they didn't like their first attempt before uploading. This would also allow for interruptions in network connections. I've seen some code which allows recording of individual 'images' from the webcam by sending the data to the canvas - that's cool, but I need the audio too.
Here's the client side code I have so far:
<video autoplay></video>
<script language="javascript" type="text/javascript">
function onVideoFail(e) {
console.log('webcam fail!', e);
};
function hasGetUserMedia() {
// Note: Opera is unprefixed.
return !!(navigator.getUserMedia || navigator.webkitGetUserMedia ||
navigator.mozGetUserMedia || navigator.msGetUserMedia);
}
if (hasGetUserMedia()) {
// Good to go!
} else {
alert('getUserMedia() is not supported in your browser');
}
window.URL = window.URL || window.webkitURL;
navigator.getUserMedia = navigator.getUserMedia || navigator.webkitGetUserMedia ||
navigator.mozGetUserMedia || navigator.msGetUserMedia;
var video = document.querySelector('video');
var streamRecorder;
var webcamstream;
if (navigator.getUserMedia) {
navigator.getUserMedia({audio: true, video: true}, function(stream) {
video.src = window.URL.createObjectURL(stream);
webcamstream = stream;
// streamrecorder = webcamstream.record();
}, onVideoFail);
} else {
alert ('failed');
}
function startRecording() {
streamRecorder = webcamstream.record();
setTimeout(stopRecording, 10000);
}
function stopRecording() {
streamRecorder.getRecordedData(postVideoToServer);
}
function postVideoToServer(videoblob) {
/* var x = new XMLHttpRequest();
x.open('POST', 'uploadMessage');
x.send(videoblob);
*/
var data = {};
data.video = videoblob;
data.metadata = 'test metadata';
data.action = "upload_video";
jQuery.post("http://www.foundthru.co.uk/uploadvideo.php", data, onUploadSuccess);
}
function onUploadSuccess() {
alert ('video uploaded');
}
</script>
<div id="webcamcontrols">
<a class="recordbutton" href="javascript:startRecording();">RECORD</a>
</div>
You should definitely have a look at Kurento. It provides a WebRTC server infrastructure that allows you to record from a WebRTC feed and much more. You can also find some examples for the application you are planning here. It is really easy to add recording capabilities to that demo, and store the media file in a URI (local disk or wherever).
The project is licensed under LGPL Apache 2.0
EDIT 1
Since this post, we've added a new tutorial that shows how to add the recorder in a couple of scenarios
kurento-hello-world-recording: simple recording tutorial, showing the different capabilities of the recording endpoint.
kurento-one2one-recording: How to record a one-to-one communication in the media server.
kurento-hello-world-repository: use an external repository to record the file.
Disclaimer: I'm part of the team that develops Kurento.
I believe using kurento or other MCUs just for recording videos would be bit of overkill, especially considering the fact that Chrome has MediaRecorder API support from v47 and Firefox since v25. So at this junction, you might not even need an external js library to do the job, try this demo I made to record video/ audio using MediaRecorder:
Demo - would work in chrome and firefox (intentionally left out pushing blob to server code)
Github Code Source
If running firefox, you could test it in here itself( chrome needs https):
'use strict'
let log = console.log.bind(console),
id = val => document.getElementById(val),
ul = id('ul'),
gUMbtn = id('gUMbtn'),
start = id('start'),
stop = id('stop'),
stream,
recorder,
counter = 1,
chunks,
media;
gUMbtn.onclick = e => {
let mv = id('mediaVideo'),
mediaOptions = {
video: {
tag: 'video',
type: 'video/webm',
ext: '.mp4',
gUM: {
video: true,
audio: true
}
},
audio: {
tag: 'audio',
type: 'audio/ogg',
ext: '.ogg',
gUM: {
audio: true
}
}
};
media = mv.checked ? mediaOptions.video : mediaOptions.audio;
navigator.mediaDevices.getUserMedia(media.gUM).then(_stream => {
stream = _stream;
id('gUMArea').style.display = 'none';
id('btns').style.display = 'inherit';
start.removeAttribute('disabled');
recorder = new MediaRecorder(stream);
recorder.ondataavailable = e => {
chunks.push(e.data);
if (recorder.state == 'inactive') makeLink();
};
log('got media successfully');
}).catch(log);
}
start.onclick = e => {
start.disabled = true;
stop.removeAttribute('disabled');
chunks = [];
recorder.start();
}
stop.onclick = e => {
stop.disabled = true;
recorder.stop();
start.removeAttribute('disabled');
}
function makeLink() {
let blob = new Blob(chunks, {
type: media.type
}),
url = URL.createObjectURL(blob),
li = document.createElement('li'),
mt = document.createElement(media.tag),
hf = document.createElement('a');
mt.controls = true;
mt.src = url;
hf.href = url;
hf.download = `${counter++}${media.ext}`;
hf.innerHTML = `donwload ${hf.download}`;
li.appendChild(mt);
li.appendChild(hf);
ul.appendChild(li);
}
button {
margin: 10px 5px;
}
li {
margin: 10px;
}
body {
width: 90%;
max-width: 960px;
margin: 0px auto;
}
#btns {
display: none;
}
h1 {
margin-bottom: 100px;
}
<link type="text/css" rel="stylesheet" href="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.6/css/bootstrap.min.css">
<h1> MediaRecorder API example</h1>
<p>For now it is supported only in Firefox(v25+) and Chrome(v47+)</p>
<div id='gUMArea'>
<div>
Record:
<input type="radio" name="media" value="video" checked id='mediaVideo'>Video
<input type="radio" name="media" value="audio">audio
</div>
<button class="btn btn-default" id='gUMbtn'>Request Stream</button>
</div>
<div id='btns'>
<button class="btn btn-default" id='start'>Start</button>
<button class="btn btn-default" id='stop'>Stop</button>
</div>
<div>
<ul class="list-unstyled" id='ul'></ul>
</div>
<script src="https://code.jquery.com/jquery-2.2.0.min.js"></script>
<script src="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.6/js/bootstrap.min.js"></script>
Please, check the RecordRTC
RecordRTC is MIT licensed on github.
yes, as you understood, MediaStreamRecorder is currently unimplemented.
MediaStreamRecorder is a WebRTC API for recording getUserMedia() streams . It allows web apps to create a file from a live audio/video session.
alternatively you may do like this http://ericbidelman.tumblr.com/post/31486670538/creating-webm-video-from-getusermedia but audio is missing part.
You can use RecordRTC-together, which is based on RecordRTC.
It supports recording video and audio together in separate files. You will need tool like ffmpeg to merge two files into one on server.
Web Call Server 4 can record WebRTC audio and video to WebM container.
The recording is done using Vorbis codec for audio and VP8 codec for video.
Iniitial WebRTC codecs are Opus or G.711 and VP8. So, the server-side recording requires either Opus/G.711 to Vorbis server-side transcoding or VP8-H.264 transcoding if it is necessary to use another container, i.e. AVI.
For the record I also don't have enough knowledge about this,
But I found this on Git hub-
<!DOCTYPE html>
<html>
<head>
<title>XSockets.WebRTC Client example</title>
<meta charset="utf-8" />
<style>
body {
}
.localvideo {
position: absolute;
right: 10px;
top: 10px;
}
.localvideo video {
max-width: 240px;
width:100%;
margin-right:auto;
margin-left:auto;
border: 2px solid #333;
}
.remotevideos {
height:120px;
background:#dadada;
padding:10px;
}
.remotevideos video{
max-height:120px;
float:left;
}
</style>
</head>
<body>
<h1>XSockets.WebRTC Client example </h1>
<div class="localvideo">
<video autoplay></video>
</div>
<h2>Remote videos</h2>
<div class="remotevideos">
</div>
<h2>Recordings ( Click on your camera stream to start record)</h2>
<ul></ul>
<h2>Trace</h2>
<div id="immediate"></div>
<script src="XSockets.latest.js"></script>
<script src="adapter.js"></script>
<script src="bobBinder.js"></script>
<script src="xsocketWebRTC.js"></script>
<script>
var $ = function (selector, el) {
if (!el) el = document;
return el.querySelector(selector);
}
var trace = function (what, obj) {
var pre = document.createElement("pre");
pre.textContent = JSON.stringify(what) + " - " + JSON.stringify(obj || "");
$("#immediate").appendChild(pre);
};
var main = (function () {
var broker;
var rtc;
trace("Ready");
trace("Try connect the connectionBroker");
var ws = new XSockets.WebSocket("wss://rtcplaygrouund.azurewebsites.net:443", ["connectionbroker"], {
ctx: '23fbc61c-541a-4c0d-b46e-1a1f6473720a'
});
var onError = function (err) {
trace("error", arguments);
};
var recordMediaStream = function (stream) {
if ("MediaRecorder" in window === false) {
trace("Recorder not started MediaRecorder not available in this browser. ");
return;
}
var recorder = new XSockets.MediaRecorder(stream);
recorder.start();
trace("Recorder started.. ");
recorder.oncompleted = function (blob, blobUrl) {
trace("Recorder completed.. ");
var li = document.createElement("li");
var download = document.createElement("a");
download.textContent = new Date();
download.setAttribute("download", XSockets.Utils.randomString(8) + ".webm");
download.setAttribute("href", blobUrl);
li.appendChild(download);
$("ul").appendChild(li);
};
};
var addRemoteVideo = function (peerId, mediaStream) {
var remoteVideo = document.createElement("video");
remoteVideo.setAttribute("autoplay", "autoplay");
remoteVideo.setAttribute("rel", peerId);
attachMediaStream(remoteVideo, mediaStream);
$(".remotevideos").appendChild(remoteVideo);
};
var onConnectionLost = function (remotePeer) {
trace("onconnectionlost", arguments);
var peerId = remotePeer.PeerId;
var videoToRemove = $("video[rel='" + peerId + "']");
$(".remotevideos").removeChild(videoToRemove);
};
var oncConnectionCreated = function () {
console.log(arguments, rtc);
trace("oncconnectioncreated", arguments);
};
var onGetUerMedia = function (stream) {
trace("Successfully got some userMedia , hopefully a goat will appear..");
rtc.connectToContext(); // connect to the current context?
};
var onRemoteStream = function (remotePeer) {
addRemoteVideo(remotePeer.PeerId, remotePeer.stream);
trace("Opps, we got a remote stream. lets see if its a goat..");
};
var onLocalStream = function (mediaStream) {
trace("Got a localStream", mediaStream.id);
attachMediaStream($(".localvideo video "), mediaStream);
// if user click, video , call the recorder
$(".localvideo video ").addEventListener("click", function () {
recordMediaStream(rtc.getLocalStreams()[0]);
});
};
var onContextCreated = function (ctx) {
trace("RTC object created, and a context is created - ", ctx);
rtc.getUserMedia(rtc.userMediaConstraints.hd(false), onGetUerMedia, onError);
};
var onOpen = function () {
trace("Connected to the brokerController - 'connectionBroker'");
rtc = new XSockets.WebRTC(this);
rtc.onlocalstream = onLocalStream;
rtc.oncontextcreated = onContextCreated;
rtc.onconnectioncreated = oncConnectionCreated;
rtc.onconnectionlost = onConnectionLost;
rtc.onremotestream = onRemoteStream;
rtc.onanswer = function (event) {
};
rtc.onoffer = function (event) {
};
};
var onConnected = function () {
trace("connection to the 'broker' server is established");
trace("Try get the broker controller form server..");
broker = ws.controller("connectionbroker");
broker.onopen = onOpen;
};
ws.onconnected = onConnected;
});
document.addEventListener("DOMContentLoaded", main);
</script>
On Line number 89 in my case code OnrecordComplete actually append a link of recorder file, if you will click on that link it will start the download, you can save that path to your server as a file.
The Recording code looks something like this
recorder.oncompleted = function (blob, blobUrl) {
trace("Recorder completed.. ");
var li = document.createElement("li");
var download = document.createElement("a");
download.textContent = new Date();
download.setAttribute("download", XSockets.Utils.randomString(8) + ".webm");
download.setAttribute("href", blobUrl);
li.appendChild(download);
$("ul").appendChild(li);
};
The blobUrl holds the path. I solved my problem with this, hope someone will find this useful
Currently the browsers support recording on the client side.
https://webrtc.github.io/samples/
One can push the recorded file after the connection has been ended to server by uploading through some HTTP requests.
https://webrtc.github.io/samples/src/content/getusermedia/record/
https://github.com/webrtc/samples/tree/gh-pages/src/content/getusermedia/record
This has some kind of drawbacks that in case if the user just closes the tab and don't run these operations in the backend side, it may not uploaded the files fully to the server.
As a more stable solution Ant Media Server can record the stream on server side and recording functionality is one of the basic feature of Ant Media Server.
antmedia.io
Note: I'm a member of Ant Media team.
Technically you can use FFMPEG on backend to mix video and audio

Categories