Django issue playing audio files - javascript

I am using Javascript in a Django project to make an audio player/visualiser but I am having a problem getting the audio to play when the link is clicked. Having tested a few small functions in app.js I can see it is working in the HTML file but for some reason when I try to play the audio it says no supported source found and that my audio file was not found. The Javascript works fine in another project but I cannot get it to work within Django. I am new to Django so any help would be brilliant, thanks.
export default class AudioPlayer {
constructor(selector = '.audioPlayer', audio = []) {
this.playerElement = document.querySelector(selector);
this.audio = audio;
this.currentAudio = null;
this.createPlayerElements();
this.audioContext = null;
}
createVisualiser() {
this.audioContext = new AudioContext();
const src = this.audioContext.createMediaElementSource(this.audioElement);
const analyser = this.audioContext.createAnalyser();
const canvas = this.visualiserElement;
const ctx = canvas.getContext('2d');
src.connect(analyser);
analyser.connect(this.audioContext.destination);
analyser.fftSize = 128;
const bufferLength = analyser.frequencyBinCount;
const dataArray = new Uint8Array(bufferLength);
const barWidth = (canvas.width / bufferLength) * 2.5;
let barHeight;
let bar;
function renderFrame() {
requestAnimationFrame(renderFrame);
bar = 0;
analyser.getByteFrequencyData(dataArray);
ctx.fillStyle = '#000';
ctx.fillRect(0, 0, canvas.width, canvas.height);
for (let i = 0; i < bufferLength; i++) {
barHeight = dataArray[i] - 100;
const r = barHeight + (50 * (i / bufferLength));
ctx.fillStyle = `rgb(${r}, 100, 50)`;
ctx.fillRect(bar, canvas.height - barHeight, barWidth, barHeight);
bar += barWidth + 2;
}
}
renderFrame();
}
createPlayerElements() {
this.audioElement = document.createElement('audio');
this.audioElement.crossOrigin = "anonymous";
const playlistElement = document.createElement('div');
playlistElement.classList.add('playlist');
this.visualiserElement = document.createElement('canvas');
this.playerElement.appendChild(this.audioElement);
this.playerElement.appendChild(playlistElement);
this.playerElement.appendChild(this.visualiserElement);
this.createPlaylistElement(playlistElement);
}
createPlaylistElement(playlistElement) {
this.audio.forEach(audio => {
const audioItem = document.createElement('a');
audioItem.classList.add('musicA');
audioItem.href = audio.url;
audioItem.innerHTML = `<i class="fa fa-play"></i>${audio.name}`;
this.setupEventListener(audioItem);
playlistElement.appendChild(audioItem);
});
}
setupEventListener(audioItem) {
audioItem.addEventListener('click', (e) => {
e.preventDefault();
if (!this.audioContext) {
this.createVisualiser();
}
const isCurrentAudio = audioItem.getAttribute('href') == (this.currentAudio && this.currentAudio.getAttribute('href'));
if (isCurrentAudio && !this.audioElement.paused) {
this.setPlayIcon(this.currentAudio);
this.audioElement.pause();
console.log('paused');
}
else if (isCurrentAudio && this.audioElement.paused) {
this.setPuaseIcon(this.currentAudio);
this.audioElement.play();
}
else {
if (this.currentAudio) {
this.setPlayIcon(this.currentAudio);
}
this.currentAudio = audioItem;
this.setPuaseIcon(this.currentAudio);
this.audioElement.src = this.currentAudio.getAttribute('href');
this.audioElement.play();
}
});
}
setPlayIcon(element) {
const icon = element.querySelector('i');
icon.classList.remove('fa-pause');
icon.classList.add('fa-play');
}
setPuaseIcon(element) {
const icon = element.querySelector('i');
icon.classList.remove('fa-play');
icon.classList.add('fa-pause');
}
}
I have then created a new AudioPlayer in my app.js:
import AudioPlayer from './AudioPlayer.js';
const audioPlayer = new AudioPlayer('.audioPlayer', [
{ url: "musicPlayer/static/songs/song1.mp3", name: "abc" },
]);
The musicPlayer.urls:
urlpatterns = [
path('', views.home, name="home")
]
And the project .urls:
urlpatterns = [
path('admin/', admin.site.urls),
path('', include('musicPlayer.urls')),
]
The musicPlayer file:
musicPlayer
|
|-__pycache__
|-migrations
|-static
| |
| |-css
| |
| |-javascript
| | |
| | |-app.js
| | |-AudioPlayer.js
| |-songs
|-templates
|-urls.py
|-views.py
| ...

I'm not fully sure because I can't see your project structure, but is the audioPlayer urls inside of the musicPlayers app? or no?
it looks like you haven't included the audioPlayer urls anywhere...They're not in the musicPlayers.urls or the project urls, so effectively, you are trying to go to a url that they project can't find, because you haven't imported it properly.
You either need to import the audioPlayer.urls into the musicPlayer urls if the audioPlayer is in that app, or, if it's it's own app, then it needs to be imported into the project urls.
hopefully that's solve the 404, you might also have to make a view, and then connect the view to the url. I'm not entirely sure how the pure js functions work with urls.

Related

Recording video from PIXI canvas with CCapture.js not working

I'm stuck with one task, maybe someone has experience to help with it.
For recording video I using CCapture.js, but when I start it everything stops with no errors.
My basic code is below.
I also created other example and you can see it stops at the beginning too:
https://codepen.io/fjtwmjzf-the-lessful/pen/zYZJwvQ
const
videoContainer = document.getElementById('videoContainer');
vw = 1280,
vh = 720,
videoUrl = 'assets/video/landscape.mp4';
PIXI.settings.RESOLUTION = 2;
app = new PIXI.Application({
width: vw,
height: vh,
backgroundColor: bgColor,
});
const videoBg = PIXI.Texture.from(videoUrl);
const videoSprite = new PIXI.Sprite(videoBg);
const videoControler = videoSprite._texture.baseTexture.resource.source;
videoContainer.appendChild(app.view);
let capturer = new CCapture( { format: 'webm' } );
let n = 0;
app.ticker.add(() => {
if(n == 0){
capturer.start();
}
capturer.capture(app.view);
if(videoControler.currentTime >= videoControler.duration){
capturer.stop();
capturer.save();
app.ticker.destroy();
}
n += 1;
});

How to add image to Mxgraph mxGeometry

I am currently working on a project with MxGraph, and the components that I would like to render have been created by using the mxGeometry method. I have seen many answers on stackoverflow that suggest to add an image attribute to insertVertex or addVertex methods. But I have not used such methods.
Here is a snippet from my code:
class HyComp extends Component {
constructor() {
var w = 50;
var h = 50;
super(new mxGeometry(0, 0, w, h));
this.style += "strokeColor=black;fillColor=white;";
}
createChildren(name) {
let w = this.geometry.width;
let h = this.geometry.height;
this.createLabel(name);
var c_in = new mxCell("", new mxGeometry(-10, 5, 10, 10));
c_in.vertex = true;
c_in.connectable = true;
c_in.connectionType = "hydro";
c_in.style =
"editable=0;movable=0;strokeColor=blue;fillColor=white;portConstraint=west;perimeter=none;";
this.insert(c_in);
var c_out = new mxCell("", new mxGeometry(-10, h - 10 - 5, 10, 10));
c_out.vertex = true;
c_out.connectable = true;
c_out.connectionType = "hydro";
c_out.style =
"editable=0;movable=0;strokeColor=blue;fillColor=white;portConstraint=west;perimeter=none;";
this.insert(c_out);
this.setDimensions(10, 0, w + c_in.geometry.width, h);
}
}
And this is how the class Component looks like :
class Component extends mxCell {
constructor(geometry) {
super("", geometry);
this.vertex = true;
this.connectable = false;
this.isComponent = true;
this.componentName = this.constructor.name;
this.userName = "";
this.setNewId();
this.style = "editable=0;movable=1;";
this.setDimensions(0, 0, 0, 0);
}
I have been trying to insert an image as the background for these cells for several days now, but not successful.

Uncaught TypeError: Cannot read property 'getContext' of null in react render

I'm trying to implement web sockets in my react frontend to stream some frames to it with canvas.
First the code was working fine and I could receive the data very well ... but when I needed to add a function call from the parent component, I got an error:
videoSocket.js:51 Uncaught TypeError: Cannot read property 'getContext' of null
This is the code :
import React, { Component } from "react";
import "./canvas.css";
function b64toBlob(dataURI) {
var byteString = atob(dataURI.split(",")[0]);
var ab = new ArrayBuffer(byteString.length);
var ia = new Uint8Array(ab);
for (var i = 0; i < byteString.length; i++) {
ia[i] = byteString.charCodeAt(i);
}
return new Blob([ab], { type: "image/jpeg" });
} /*
ws.onmessage = function(event) {
var img=document.getElementById("video_frames");
var urlObject = URL.createObjectURL(event.data);
img.src = urlObject;
document.body.appendChild(img);
}*/
class VideoSocket extends Component {
state = {
websocket: null,
};
constructor(props) {
super(props);
var ws = new WebSocket("ws://" + props.link);
this.state = {
websocket: ws,
};
}
render() {
const self = this;
this.state.websocket.onmessage = function (event) {
var js = JSON.parse(atob(event.data.split(",")[0]));
var data = b64toBlob(js["data"]);
var personinfo = { infos: js["infos"] };
console.log(self.props);
self.props.setdata(personinfo);
var urlObject = URL.createObjectURL(data);
var canvas = document.getElementById("tools_sketch");
/*
canvas.width = window.innerWidth; // equals window dimension
canvas.height = window.innerHeight;*/
var ctx = canvas.getContext("2d");
var image = new Image();
image.onload = function () {
ctx.drawImage(
image,
0,
0,
image.width,
image.height, // source rectangle
0,
0,
canvas.width,
canvas.height
);
};
image.src = urlObject;
};
return (
<div>
<canvas
id="tools_sketch"
width={this.props.width}
height={this.props.height}
>
Sorry, your browser doesn't support the <canvas> element.
</canvas>
</div>
);
}
}
export default VideoSocket;
When I remove
self.props.setdata(personinfo);
it works fine but when I add it no matter what I tried it's not working .
I tried to add the function onmessage to componentdidmount like below but I get the same error.
import React, { Component } from "react";
import "./canvas.css";
function b64toBlob(dataURI) {
var byteString = atob(dataURI.split(",")[0]);
var ab = new ArrayBuffer(byteString.length);
var ia = new Uint8Array(ab);
for (var i = 0; i < byteString.length; i++) {
ia[i] = byteString.charCodeAt(i);
}
return new Blob([ab], { type: "image/jpeg" });
} /*
ws.onmessage = function(event) {
var img=document.getElementById("video_frames");
var urlObject = URL.createObjectURL(event.data);
img.src = urlObject;
document.body.appendChild(img);
}*/
class VideoSocket extends Component {
state = {
websocket: null,
};
constructor(props) {
super(props);
var ws = new WebSocket("ws://" + props.link);
this.state = {
websocket: ws,
};
}
componentDidMount() {
const self = this;
this.state.websocket.onmessage = function (event) {
var js = JSON.parse(atob(event.data.split(",")[0]));
var data = b64toBlob(js["data"]);
var personinfo = { infos: js["infos"] };
console.log(self.props);
self.props.setdata(personinfo);
var urlObject = URL.createObjectURL(data);
var canvas = document.getElementById("tools_sketch");
/*
canvas.width = window.innerWidth; // equals window dimension
canvas.height = window.innerHeight;*/
var ctx = canvas.getContext("2d");
var image = new Image();
image.onload = function () {
ctx.drawImage(
image,
0,
0,
image.width,
image.height, // source rectangle
0,
0,
canvas.width,
canvas.height
);
};
image.src = urlObject;
};
}
render() {
return (
<div>
<canvas
id="tools_sketch"
width={this.props.width}
height={this.props.height}
>
Sorry, your browser doesn't support the <canvas> element.
</canvas>
</div>
);
}
}
export default VideoSocket;
This is the server side code in python :
#!/usr/bin/env python
import random
import websockets
import asyncio
import os
import numpy as np
import cv2
import json
import base64
#!/usr/bin/env python
# WS server that sends messages at random intervals
class VideoCamera(object):
def __init__(self):
self.video = cv2.VideoCapture('video2.mp4')
def __del__(self):
self.video.release()
def get_frame(self):
while True:
ret, image = self.video.read()
ret, jpeg = cv2.imencode('.jpg', image)
base6 = base64.b64encode(jpeg.tobytes())
yield base6.decode('utf-8')
def gen(camera):
while True:
image = next(camera.get_frame())
yield(image)
async def time(websocket, path):
camera = VideoCamera()
i = 0
while True:
i = i+1
data = next(gen(camera))
js = {'data': data, "infos": [
{'id': 1, 'name': 'name1'}, {'id': 2, 'name': 'name2'}]}
res_bytes = json.dumps(js).encode('utf-8')
base6 = base64.b64encode(res_bytes)
message = base6.decode('utf-8')
await websocket.send(message)
start_server = websockets.serve(time, "127.0.0.1", 5678)
asyncio.get_event_loop().run_until_complete(start_server)
asyncio.get_event_loop().run_forever()
Edit : the parent component code :
import React, { Component, useState } from "react";
import "antd/dist/antd.css";
import VideoSocket from "../components/videoSocket";
import MyTable from "../components/table";
class Home extends Component {
state = { personinfo: {} };
constructor(props) {
super(props);
let links = {
1: "127.0.0.1:5678",
2: "127.0.0.1:5679",
3: "127.0.0.1:5680",
4: "127.0.0.1:5681",
};
const CameraId = props.match.params.cameraId;
console.log(CameraId);
let link = links[CameraId];
if (link == null) {
link = "127.0.0.1:5678";
}
let curent = link;
this.state = {
links: links,
curent: curent,
};
console.log(link);
}
setData = (personinfo) => {
this.setState({
personinfo: personinfo,
});
};
render() {
return (
<div className=".container-fluid overflow-auto" width="100%">
<div></div>
<VideoSocket
className=""
width={(window.innerWidth * 75) / 100}
height="400px"
link={this.state.curent}
setdata={this.setData}
key="1"
></VideoSocket>
<MyTable personinfo={this.state.personinfo}></MyTable>
</div>
);
}
}
export default Home;
Edit: I tried to add
shouldComponentUpdate() {
return false;
}
to the component videosocket but it still do the same problem

Import class from another file in pure JavaScript [duplicate]

I have problem with importing JS scripts from code. I have a canvas game, and want to import my classes rather than define they in HTML. But when I run my code in Edge, it throws me an error.
init.js
import {Tram} from "./tram/tram"
var body;
var curScreen = "menu";
var canvas = document.createElement("canvas");
canvas.width = 1920;
canvas.height = 1080;
var ctx = canvas.getContext("2d");
var tex = {};
var corrHeight =
loadTextures();
window.onload = function () {
body = document.body;
body.appendChild(canvas);
window.onclick = function () {
if (body.requestFullscreen) {
body.requestFullscreen();
}
else if (body.msRequestFullscreen) {
body.msRequestFullscreen();
}
else if (body.mozRequestFullScreen) {
body.mozRequestFullScreen();
}
else if (body.webkitRequestFullscreen) {
body.webkitRequestFullscreen();
}
};
mainLoop();
};
function updateCanvasRect() {
canvas.width = brect.width;
canvas.height = brect.height;
var prop = 16 / 9;
tex.sky.img.height = brect.height;
tex.sky.img.width = brect.height * prop;
tex.sky.patt = ctx.createPattern(tex.sky.img, "repeat-x");
}
function loadTextures() {
tex.sky = importTexture("base/sky");
}
I have following folder structure:
ts2d/
img/
...
base/
...
tram/
tram.js
...
init.js
load.js
main.js
index.html
Solved! The problem was in HTML script declaration:
I used this code:
<script src="base/init.js"></script>
But I don't knew, that I need to specify type attribute, with code below import works excellent!
<script src="base/init.js" type="module"></script>
UPD: variables just are file-scoped, to use in another file export variables:
var myVar = "string";
export {myVar};
and then in your file
import {myVar} from "<file>";

How to lower mic input volume using navigator.mediaDevices.getUserMedia?

I'm creating audio record app using navigator.mediaDevices.getUserMedia() and it records every sound around me even very quiet and which is 10m away from me. I DO NOT play this sound, I only visualize it depending on volume, so I need only quite loud sounds or which are close to mic, cause there's too much interference.
Also if I enable playback to hear my mic input and start making quiet noise like tapping on the table, I can't here this sound in playback but I see it in visualizer and this is exactly what I don't want
Here's my code:
const stream = await navigator.mediaDevices.getUserMedia({ audio: true });
this.audioContext = new AudioContext();
this.sourceNode = this.audioContext.createMediaStreamSource(stream);
this.analyserNode = this.audioContext.createAnalyser();
this.sourceNode.connect(this.analyserNode);
const data = new Float32Array(this.analyserNode.fftSize);
this.analyserNode.getFloatTimeDomainData(data);
So how can I lower mic sensivity using Web Audio API or lower mic input volume or maybe transform data from analyser? I've read about AudioContext.createGain(), gain.volume, but it's used for output audio volume, not input one
I've read about AudioContext.createGain(), gain.volume, but it's used for output audio volume, not input one
No, it is used to control the volume of the audio that goes through it.
You have to see your audio context nodes as a chain, then you might understand that you can indeed use a GainNode to control the input volume of the next node to which it is connected.
Like e.g if we declare something like
gainNode.gain.volume = 0.5;
input.connect(gainNode);
gainNode.connect(analyserNode);
input.connect(audioContext.destination);
it can be seen as
Input [mic] ===> GainNode ===> AnalyserNode
100% || 50% 50%
||
===> AudioContext Output
100%
So your gainNode here did lower the volume of your AnalyserNode, but not the one of the context output.
But this is not really what you want.
Indeed, the AnalyserNode API has minDecibels and maxDecibels properties which will do exactly what you want (filter out out of db range sounds).
But these properties make sense only for frequency data (getXXXFrequencyData) since waveform doesn't take volume into account.
However, it is still possible to check if this frequency data is in our required bounds before deciding if we should draw our waveform or not.
polyfill();
(async() => {
const ctx = new AudioContext();
const input = await loadFileAsBufferNode(ctx);
const analyser = ctx.createAnalyser();
analyser.minDecibels = -90;
analyser.maxDecibels = -10;
analyser.fftSize = 512;
input.connect(analyser);
const gainNode = ctx.createGain();
input.connect(gainNode);
const bufferLength = analyser.frequencyBinCount;
const freqArray = new Uint8Array(bufferLength);
const waveArray = new Uint8Array(bufferLength);
const canvasCtx = canvas.getContext('2d');
const WIDTH = canvas.width;
const HEIGHT = canvas.height;
canvasCtx.lineWidth = 2;
draw();
// taken from https://developer.mozilla.org/en-US/docs/Web/API/AnalyserNode/maxDecibels#Example
function draw() {
requestAnimationFrame(draw);
canvasCtx.clearRect(0, 0, WIDTH, HEIGHT);
analyser.getByteFrequencyData(freqArray);
gainNode.gain.value = 1;
analyser.getByteTimeDomainData(waveArray);
var barWidth = (WIDTH / bufferLength) * 2.5;
var barHeight;
var x = 0;
for (var i = 0; i < bufferLength; i++) {
barHeight = freqArray[i];
canvasCtx.fillStyle = 'rgb(' + (barHeight + 100) + ',50,50)';
canvasCtx.fillRect(x, HEIGHT - barHeight / 2, barWidth, barHeight / 2);
x += barWidth + 1;
}
// here we check if the volume is in bounds
if (freqArray.some(isTooHigh) || !freqArray.some(hasValue)) {
canvasCtx.fillRect(0, HEIGHT / 2, WIDTH, 1);
gainNode.gain.value = 0;
return;
}
canvasCtx.beginPath();
var sliceWidth = WIDTH * 1.0 / bufferLength;
var x = 0;
for (var i = 0; i < bufferLength; i++) {
var v = waveArray[i] / 128.0;
var y = v * HEIGHT / 2;
if (i === 0) {
canvasCtx.moveTo(x, y);
} else {
canvasCtx.lineTo(x, y);
}
x += sliceWidth;
}
canvasCtx.lineTo(canvas.width, canvas.height / 2);
canvasCtx.stroke();
};
function isTooHigh(val) {
return val === 255;
}
function hasValue(val) {
return val;
}
// DOM
maxDB.oninput = e => {
const max = +maxDB.value;
if (+minDB.value >= max) minDB.value = analyser.minDecibels = max - 1;
analyser.maxDecibels = max;
}
minDB.oninput = e => {
const min = +minDB.value;
if (+maxDB.value <= min) maxDB.value = analyser.maxDecibels = min + 1;
analyser.minDecibels = min;
}
out.onchange = e => {
if (out.checked)
gainNode.connect(ctx.destination);
else
gainNode.disconnect(ctx.destination);
};
})();
function loadFileAsBufferNode(ctx, url = 'https://dl.dropboxusercontent.com/s/8c9m92u1euqnkaz/GershwinWhiteman-RhapsodyInBluePart1.mp3') {
return fetch(url)
.then(r => r.arrayBuffer())
.then(buf => ctx.decodeAudioData(buf))
.then(bufferNode => {
const source = ctx.createBufferSource();
source.buffer = bufferNode;
source.repeat = true;
source.start(0);
return source;
});
};
/* for Safari */
function polyfill() {
window.AudioContext = window.AudioContext || window.webkitAudioContext;
try {
const prom = new AudioContext().decodeAudioData(new ArrayBuffer()).catch(e => {});
} catch (e) {
const prev = AudioContext.prototype.decodeAudioData;
Object.defineProperty(AudioContext.prototype, 'decodeAudioData', {
get: () => asPromise
});
function asPromise(audioBuffer, done, failed) {
return new Promise((res, rej) => {
prev.apply(this, [audioBuffer, onsuccess, onerror]);
function onsuccess(buf) {
if (typeof done === 'function') done(buf);
res(buf);
}
function onerror(err) {
if (typeof failed === 'function') failed(err);
rej(err);
}
});
}
}
}
<label>min<input type="range" id="minDB" min="-100" max="-1" value="-90"></label>
<label>max<input type="range" id="maxDB" min="-99" max="0" value="-10"></label>
<label>output audio<input type="checkbox" id="out"></label>
<canvas id="canvas"></canvas>

Categories