I'm having an issue with ScriptProcessorNode and lag. Even when doing an incredibly simple process that just takes the sample and does x %= 0.35. Why does this simple ScriptProcessorNode (included below) have so much latency?
The MDN says nothing about latency: https://developer.mozilla.org/en-US/docs/Web/API/ScriptProcessorNode
Also, I know that ScriptProcessorNode is to be deprecated soon, but audioWorklets aren't yet implemented, so this is the best there is.
var baseFreq = 220;
var audioCtx = new(window.AudioContext || window.webkitAudioContext)();
var osc = audioCtx.createOscillator();
osc.type = 'sine';
osc.frequency.value = baseFreq;
osc.start();
var intervals = [1, 1.125, 1.25, 1.5, 1.6666666, 1.875, 2, 4]
var index = 0;
function newTone() {
index++;
index %= intervals.length;
osc.frequency.value = intervals[index] * baseFreq;
}
var modulus = audioCtx.createScriptProcessor();
modulus.onaudioprocess = function(audioProcessingEvent) {
var inputBuffer = audioProcessingEvent.inputBuffer;
var outputBuffer = audioProcessingEvent.outputBuffer;
for (var channel = 0; channel < outputBuffer.numberOfChannels; channel++) {
var inputData = inputBuffer.getChannelData(channel);
var outputData = outputBuffer.getChannelData(channel);
for (var sample = 0; sample < inputBuffer.length; sample++) {
outputData[sample] = inputData[sample] % moduAmount;
}
}
}
var moduLow = 0.35;
var moduHigh = 2;
var moduAmount = moduLow;
function turnModulusOnOff() {
if (moduAmount == moduLow)
moduAmount = moduHigh;
else
moduAmount = moduLow;
}
var gain = audioCtx.createGain();
gain.gain.value = 0.05;
gain.connect(audioCtx.destination);
modulus.connect(gain)
osc.connect(modulus);
osc.connect(gain);
document.body.addEventListener("keydown", newTone);
html,
body {
height: 100%;
width: 100%;
font-family: arial;
}
<button onclick="newTone()">Next Tone</button>
<button onclick="turnModulusOnOff()">Mute/Unmute Modulus</button>
<br>
To test, click anywhere and then press any key to change the tone. Notice when Modulus is muted how responsive the tone change is. Notice the lag between the sine frequency change and the modulus effect.
<br>
<b>For Extra Fun: Go crazy on your keyboard</b>
Related
I'm creating audio record app using navigator.mediaDevices.getUserMedia() and it records every sound around me even very quiet and which is 10m away from me. I DO NOT play this sound, I only visualize it depending on volume, so I need only quite loud sounds or which are close to mic, cause there's too much interference.
Also if I enable playback to hear my mic input and start making quiet noise like tapping on the table, I can't here this sound in playback but I see it in visualizer and this is exactly what I don't want
Here's my code:
const stream = await navigator.mediaDevices.getUserMedia({ audio: true });
this.audioContext = new AudioContext();
this.sourceNode = this.audioContext.createMediaStreamSource(stream);
this.analyserNode = this.audioContext.createAnalyser();
this.sourceNode.connect(this.analyserNode);
const data = new Float32Array(this.analyserNode.fftSize);
this.analyserNode.getFloatTimeDomainData(data);
So how can I lower mic sensivity using Web Audio API or lower mic input volume or maybe transform data from analyser? I've read about AudioContext.createGain(), gain.volume, but it's used for output audio volume, not input one
I've read about AudioContext.createGain(), gain.volume, but it's used for output audio volume, not input one
No, it is used to control the volume of the audio that goes through it.
You have to see your audio context nodes as a chain, then you might understand that you can indeed use a GainNode to control the input volume of the next node to which it is connected.
Like e.g if we declare something like
gainNode.gain.volume = 0.5;
input.connect(gainNode);
gainNode.connect(analyserNode);
input.connect(audioContext.destination);
it can be seen as
Input [mic] ===> GainNode ===> AnalyserNode
100% || 50% 50%
||
===> AudioContext Output
100%
So your gainNode here did lower the volume of your AnalyserNode, but not the one of the context output.
But this is not really what you want.
Indeed, the AnalyserNode API has minDecibels and maxDecibels properties which will do exactly what you want (filter out out of db range sounds).
But these properties make sense only for frequency data (getXXXFrequencyData) since waveform doesn't take volume into account.
However, it is still possible to check if this frequency data is in our required bounds before deciding if we should draw our waveform or not.
polyfill();
(async() => {
const ctx = new AudioContext();
const input = await loadFileAsBufferNode(ctx);
const analyser = ctx.createAnalyser();
analyser.minDecibels = -90;
analyser.maxDecibels = -10;
analyser.fftSize = 512;
input.connect(analyser);
const gainNode = ctx.createGain();
input.connect(gainNode);
const bufferLength = analyser.frequencyBinCount;
const freqArray = new Uint8Array(bufferLength);
const waveArray = new Uint8Array(bufferLength);
const canvasCtx = canvas.getContext('2d');
const WIDTH = canvas.width;
const HEIGHT = canvas.height;
canvasCtx.lineWidth = 2;
draw();
// taken from https://developer.mozilla.org/en-US/docs/Web/API/AnalyserNode/maxDecibels#Example
function draw() {
requestAnimationFrame(draw);
canvasCtx.clearRect(0, 0, WIDTH, HEIGHT);
analyser.getByteFrequencyData(freqArray);
gainNode.gain.value = 1;
analyser.getByteTimeDomainData(waveArray);
var barWidth = (WIDTH / bufferLength) * 2.5;
var barHeight;
var x = 0;
for (var i = 0; i < bufferLength; i++) {
barHeight = freqArray[i];
canvasCtx.fillStyle = 'rgb(' + (barHeight + 100) + ',50,50)';
canvasCtx.fillRect(x, HEIGHT - barHeight / 2, barWidth, barHeight / 2);
x += barWidth + 1;
}
// here we check if the volume is in bounds
if (freqArray.some(isTooHigh) || !freqArray.some(hasValue)) {
canvasCtx.fillRect(0, HEIGHT / 2, WIDTH, 1);
gainNode.gain.value = 0;
return;
}
canvasCtx.beginPath();
var sliceWidth = WIDTH * 1.0 / bufferLength;
var x = 0;
for (var i = 0; i < bufferLength; i++) {
var v = waveArray[i] / 128.0;
var y = v * HEIGHT / 2;
if (i === 0) {
canvasCtx.moveTo(x, y);
} else {
canvasCtx.lineTo(x, y);
}
x += sliceWidth;
}
canvasCtx.lineTo(canvas.width, canvas.height / 2);
canvasCtx.stroke();
};
function isTooHigh(val) {
return val === 255;
}
function hasValue(val) {
return val;
}
// DOM
maxDB.oninput = e => {
const max = +maxDB.value;
if (+minDB.value >= max) minDB.value = analyser.minDecibels = max - 1;
analyser.maxDecibels = max;
}
minDB.oninput = e => {
const min = +minDB.value;
if (+maxDB.value <= min) maxDB.value = analyser.maxDecibels = min + 1;
analyser.minDecibels = min;
}
out.onchange = e => {
if (out.checked)
gainNode.connect(ctx.destination);
else
gainNode.disconnect(ctx.destination);
};
})();
function loadFileAsBufferNode(ctx, url = 'https://dl.dropboxusercontent.com/s/8c9m92u1euqnkaz/GershwinWhiteman-RhapsodyInBluePart1.mp3') {
return fetch(url)
.then(r => r.arrayBuffer())
.then(buf => ctx.decodeAudioData(buf))
.then(bufferNode => {
const source = ctx.createBufferSource();
source.buffer = bufferNode;
source.repeat = true;
source.start(0);
return source;
});
};
/* for Safari */
function polyfill() {
window.AudioContext = window.AudioContext || window.webkitAudioContext;
try {
const prom = new AudioContext().decodeAudioData(new ArrayBuffer()).catch(e => {});
} catch (e) {
const prev = AudioContext.prototype.decodeAudioData;
Object.defineProperty(AudioContext.prototype, 'decodeAudioData', {
get: () => asPromise
});
function asPromise(audioBuffer, done, failed) {
return new Promise((res, rej) => {
prev.apply(this, [audioBuffer, onsuccess, onerror]);
function onsuccess(buf) {
if (typeof done === 'function') done(buf);
res(buf);
}
function onerror(err) {
if (typeof failed === 'function') failed(err);
rej(err);
}
});
}
}
}
<label>min<input type="range" id="minDB" min="-100" max="-1" value="-90"></label>
<label>max<input type="range" id="maxDB" min="-99" max="0" value="-10"></label>
<label>output audio<input type="checkbox" id="out"></label>
<canvas id="canvas"></canvas>
I'm creating a audio player with visualizer.
But currently when I press the input to start the audio player my debug console returns:
Uncaught (in promise) DOMException: Failed to load because no
supported source was found.
What I'm currently doing is setting the whole audio element up in JS / jQuery:
var bins = 512;
var backgroundColour = "#2C2E3B";
var barColour = "#EC1A55";
var floorLevel = 32;
var audioContext;
var audioBuffer;
var audioAnalyserNode;
var initialized = false;
var songText = "";
var textSize;
var freqLookup = [];
var canvasContext;
var isStream = true;
var canvasWidth;
var canvasHeight;
var src;
var audioElement;
var isPlaying = false;
var volume = 1;
function play() {
audioElement = document.createElement('audio');
// Opus support check stuff
var streamEndpoint = 'http://**.**.**.**:8003/stream';
var canPlayOpus = (typeof audioElement.canPlayType === "function" && audioElement.canPlayType('audio/ogg; codecs="opus"') !== "");
if(volume > 1) {
volume = volume / 100;
}
audioElement.src = streamEndpoint;
audioElement.crossOrigin = 'anonymous';
audioElement.volume = volume;
audioElement.play();
isPlaying = true;
setUpCanvas(audioElement);
}
function pause() {
audioElement.pause();
audioElement.currentTime = 0;
audioElement.src = '';
isPlaying = false;
}
function setUpCanvas(audioElement){
try {
initCanvas(document.getElementById("canvas"));
if(typeof audioContext === 'undefined') {
audioContext = new AudioContext();
}
if (audioElement) {
isStream = true;
setupAudioApi(true, audioElement);
}
} catch(e) {
console.log(e);
}
}
function setupAudioApi(isStream, audioElement) {
//var src;
if (isStream){
if(typeof src === 'undefined'){
src = audioContext.createMediaElementSource(audioElement);
audioContext.crossOrigin = "anonymous";
audioAnalyserNode = audioContext.createAnalyser();
audioAnalyserNode.fftSize = bins * 4;
src.connect(audioAnalyserNode);
audioAnalyserNode.connect(audioContext.destination);
}
}
if (!isStream) {
src.start();
}
initialized = true;
initFreqLookupTable();
}
function initCanvas(canvasElement) {
canvasContext = canvasElement.getContext('2d');
canvasElement.width = canvasElement.clientWidth;
canvasElement.height = canvasElement.clientHeight;
canvasWidth = canvasElement.width;
canvasHeight = canvasElement.height;
requestAnimationFrame(paint);
}
function getFreqPoint(start, stop, n, binCount) {
return start * Math.pow(stop / start, n / (binCount - 1));
}
function initFreqLookupTable() {
var lastPoint = 0;
var bins = audioAnalyserNode.frequencyBinCount;
for(var i = 0; i < bins / 2; i++) {
//Scale to perceived frequency distribution
var newFreq = getFreqPoint(20, 20000, i * 2, bins);
var point = Math.floor(bins * newFreq / 20000);
while (point <= lastPoint) {
point++;
}
lastPoint = point;
freqLookup.push(point);
}
}
//Render some fancy bars
function paint() {
requestAnimationFrame(paint);
if(!initialized) {
alert('Er is iets fout gegaan');
return false;
}
canvasContext.clearRect(0, 0, canvasWidth, canvasHeight);
canvasContext.fillStyle = backgroundColour;
canvasContext.fillRect(0, 0, canvasWidth, canvasHeight);
var bins = audioAnalyserNode.frequencyBinCount;
var data = new Uint8Array(bins);
audioAnalyserNode.getByteFrequencyData(data);
canvasContext.fillStyle = barColour;
for(var i = 0; i < bins; i++) {
var point = freqLookup[i];
//Pretty much any volume will push it over 128 so we set that as the bottom threshold
//I suspect I should be doing a logarithmic space for the volume as well
var height = Math.max(0, (data[point] - floorLevel));
//Scale to the height of the bar
//Since we change the base level in the previous operations, 256 should be changed to 160 (i think) if we want it to go all the way to the top
height = (height / (256 - floorLevel)) * canvasHeight * 0.8;
var width = Math.ceil(canvasWidth / ((bins / 2) - 1));
canvasContext.fillRect(i * width, canvasHeight - height, width, height);
}
}
The stream is in audio/mpeg format, it does load when I simply create an audio element in HTML with a src.
Can someone help me clarify and find the solution to the DOMException I'm getting. I have been searching other cases of this error but the fixes there didn't resolve the problem.
Try creating the audio tag like this:
var audio = new Audio('audio_file.mp3');
And try setting the type:
audio.type = "audio/mpeg";
I think that will fix your problem.
This creates an element, identical to the one you use in your code.
I suggest you put an extension on your stream.
I know this way works, and I don't know why the other way doesn't.
I'm using setInterval to call a function that animates a fractal on a HTML5 canvas. There is also a slider to allow the user to change the quality of the fractal. Everything works fine until I start changing the slider. When I change it, the fractal animation becomes choppy, and eventually the "drawFractal" function stops being called.
Here is the slider HTML:
<input type="range" id="qualitySlider" min="1" max="10"></input>
Here is the javascript (it just generates a fractal):
var count = 0.5;
var slider = document.getElementById("qualitySlider");
var g = document.getElementById("drawingCanvas").getContext("2d");
function drawFractal() {
var cellSize = Math.ceil(slider.value);
//canvas is 700 by 400
g.fillStyle = "black";
g.clearRect(0, 0, 700, 400);
//Eveything from here to the end of this function generates the fractal
var imagC = Math.cos(count)*0.8;
var realC = Math.sin(count)*0.5;
for (x = 0; x < 700; x+=cellSize) {
for (y = 0; y < 400; y+=cellSize) {
var yCoord = (x / 700.0 - 0.5)*3;
var xCoord = (y / 400.0 - 0.5)*3;
var real = xCoord;
var imag = yCoord;
var broken = 0;
for (i = 0; i < 8; i++) {
var temp = real*real - imag*imag + realC;
imag = 2*imag*real + imagC;
real = temp;
if (real*real + imag*imag >= 4) {
broken = true;
break;
}
}
if (!broken) {
g.fillRect(x, y, cellSize, cellSize);
}
}
}
count = count + 0.04;
}
setInterval(drawFractal, 60);
I just need the "drawFractal" function to be called reliably every 60 milliseconds.
This is my improved code. I just used requestAnimationFrame to recursively call the "drawFractal" function. I also restricted the animation to 24 frames/sec with the setTimeout function.
var count = 0.5;
var qualitySlider = document.getElementById("qualitySlider");
var g = document.getElementById("drawingCanvas").getContext("2d");
function drawFractal() {
var cellSize = Math.ceil(qualitySlider.value);
//canvas is 700 by 400
g.fillStyle = "black";
g.clearRect(0, 0, 700, 400);
var imagC = Math.cos(count)*0.8;
var realC = Math.sin(count)*0.5;
for (x = 0; x < 700; x+=cellSize) {
for (y = 0; y < 400; y+=cellSize) {
var yCoord = (x / 700.0 - 0.5)*3;
var xCoord = (y / 400.0 - 0.5)*3;
var real = xCoord;
var imag = yCoord;
var broken = 0;
for (i = 0; i < 8; i++) {
var temp = real*real - imag*imag + realC;
imag = 2*imag*real + imagC;
real = temp;
if (real*real + imag*imag >= 4) {
broken = true;
break;
}
}
if (!broken) {
g.fillRect(x, y, cellSize, cellSize);
}
}
}
count = count + 0.04;
setTimeout(function() {
requestAnimationFrame(drawFractal);
}, 41);
}
drawFractal();
You are using setInterval() to call drawFractal every 60 ms, and then every time drawFractal is executed, you're calling setInterval() again, which is unnecessary. You now have two timers attempting to draw fractals every 60 ms... then you'll have 4, then 8, etc.
You need to either (1) call setInterval() once at the start of program execution and not call it again, or (2) switch to using setTimeout(), and call it at the end of each drawFractal().
I'd use the second option, just in case your fractal ever takes more than 60 ms to draw.
Spent 6 hours today building this from a tutorial modifying it for my elements, I've been trying for hours to get it to work and i Just can't :/
Here is a link to the tutorial i was following :
http://gamedevelopment.tutsplus.com/tutorials/create-a-simple-space-shooter-game-in-html5-with-easeljs--active-10944
Zip of all the files including the gifs etc. :
http://www.filedropper.com/shooter_1
Raw Javascript Code:
/* Variable Declarations */
var Lives = new Container(); //stores the lives gfx
var Bullets = new Container(); //stores the bullets gfx
var Enemies = new Container(); //stores the enemies gfx
var Boss_Health = 20;
var Score;
var Gfx_Loaded = 0; //used as a preloader, counts the already loaded items
var Center_X = 960;
var Center_Y = 540;
var Ticker_Listener = new Object(); //used as a Ticker listener
var Timer_Source; //references a setInterval method
/* Define Canvas*/
var canvas;
var stage;
/* Background */
var BG_Img = new Image();
var BG;
var BG2_Img = new Image();
var BG2;
/* Characters */
var Player1_Img = new Image();
var Player1;
/* Enemies */
var Enemy1_Img = new Image();
/* Boss */
var Boss1_Img = new Image();
var Boss1;
/* Lives */
var Life_Img = new Image();
/* Bullets */
var Bullet_Img1 = new Image();
/* Alerts */
var Win_Img = new Image ();
var Lose_Img = new Image ();
var Win;
var Lose;
/*Initiation Function*/
function Main () {
canvas = document.getElementbyId('Shooter');
stage = new stage(canvas);
stage.mouseEventsEnabled = true;
/*Sounds*/
SoundJS.addBatch([
{name:'Boss', src:'Boss.mp3', instances:1},
{name:'Shuriken_Hit', src:'Shuriken_Hit.mp3', instances:10},
{name:'Kunai_Throw', src:'Kunai_Throw.mp3', instances:10}]);
/* Load GFX */
BG_Img.src = 'BG_Img.jpg';
BG_Img.name = 'BG';
BG_Img.onload = loadGfx;
BG2_Img.src = 'BG2_Img.jpg';
BG2_Img.name = 'BG2';
BG2_Img.onload = loadGfx;
Player1_Img.src = 'Naruto_Idle.gif';
Player1_Img.name = 'Player1';
Player1_Img.onload = loadGfx;
Enemy1_Img.src = 'Basic_Enemy.gif';
Enemy1_Img.name = 'Enemy1';
Enemy1_Img.onload = loadGfx;
Boss1_Img.src = 'Akatsuki_Boss.gif';
Boss1_Img.name = 'Boss1';
Boss1_Img.onload = loadGfx;
Life_Img.src = 'Life.gif';
Life_Img.name = 'Life';
Life_Img.onload = loadGfx;
Bullet_Img1.src = 'Rasengan_Mid_Air.gif';
Bullet_Img1.name = 'Bullet';
Bullet_Img1.onload = loadGfx;
Win_Img.src = 'Game_Win.gif';
Win_Img.name = 'Game_Win';
Win_Img.onload = loadGfx;
Lose_Img.src = 'Game_Over.gif';
Lose_Img.name = 'Game_Over';
Lose_Img.onload = loadGfx;
/* Ticker */
Ticker.setFPS(30);
Ticker.addListener(stage);
}
/*Preload Function */
function loadGfx(e)
{
if(e.target.name = 'BG'){BG = new Bitmap(BG_Img);}
if(e.target.name = 'BG2'){BG2 = new Bitmap(BG2_Img);}
if(e.target.name = 'Player1'){Player1 = new Bitmap(Player1_Img);}
gfxLoaded++;
if(gfxLoaded == 9)
{
addGameView();
}
}
/* Game View Function */
function addGameView()
{
Player1.x = Center_X - 18.5;
Player1.y = 480 + 34;
/* Add Lives */
for(var i = 0; i < 3; i++)
{
var l = new Bitmap(Life_Img);
l.x = 248 + (25 * i);
l.y = 463;
Lives.addChild(l);
stage.update();
}
/* Score Text */
Score = new Text('0', 'bold 14px Courier New', '#FFFFFF');
Score.maxWidth = 1000; //fix for Chrome 17
Score.x = 2;
Score.y = 476;
/* Second Background */
BG2.y = -480;
/* Add gfx to stage and Tween Ship */
stage.addChild(BG, BG2, Player1, Enemies, Bullets, Lives, Score);
Tween.get(Player1).to({y:425}, 1000).call(Start_Game);
}
/*Move the Player*/
function Move_Player1(e)
{
Player1.x = e.stageX - 18.5;
}
/* Shooting */
function Shoot()
{
var b = new Bitmap(Bullet_Img1);
b.x = Player1.x + 13;
b.y = Player1.y - 20;
Bullets.addChild(b);
stage.update();
SoundJS.play('Kunai_Throw');
}
/* Adding Enemies */
function Add_Enemy ()
{
var e = new Bitmap(Enemy1_Img);
e.x = Math.floor(Math.random() * (320-50))
e.y = -50
Enemies.addChild(e);
stage.update();
}
function Start_Game()
{
stage.onMouseMove = Move_Player1;
BG.onPress = Shoot;
BG2.onPress = Shoot;
Ticker.addListener(Ticker_Listener, false);
Ticker_Listener.tick = update;
Timer_Source = setInterval('addEnemy()', 1000);
}
function update()
{
/* Move Background */
BG.y += 5;
BG2.y += 5;
if(BG.y >= 480)
{
BG.y = -480;
}
else if(BG2.y >= 480)
{
BG2.y = -480;
}
/* Move Bullets */
for(var i = 0; i < Bullets.children.length; i++)
{
Bullets.children[i].y -= 10;
}
/* Clear offstage Bullets */
if(Bullets.children[i].y < - 20)
{
Bullets.removeChildAt(i);
}
}
/* Show Boss */
if(parseInt(Score.text) >= 1000 && Boss1 == null)
{
Boss1 = new Bitmap(Boss1_Img);
SoundJS.play('Boss');
Boss1.x = Center_X - 90;
Boss1.y = -183;
stage.addChild(boss1);
Tween.get(boss1).to({y:40}, 2000) //tween the boss onto the play area
}
/* Move Enemies */
for(var j = 0; j < Enemies.children.length; j++)
{
eEemies.children[j].y += 5;
/* Remove Offstage Enemies */
if(Enemies.children[j].y > 480 + 50)
{
Enemies.removeChildAt(j);
}
}
/* Bullet - Enemy Collision */
for(var k = 0; k < Bullets.children.length; k++)
{
if(Bullets.children[k].x >= Enemies.children[j].x && Bullets.children[k].x + 11 < Enemies.children[j].x + 49 && Bullets.children[k].y < Enemies.children[j].y + 40)
{
Bullets.removeChildAt(k);
Enemies.removeChildAt(j);
stage.update();
SoundJS.play('Shuriken_Hit');
Score.text = parseFloat(Score.text + 50);
}
/* Bullet - Boss Collision */
if(Boss1 != null && bullets.children[k].x >= Boss1.x && Bullets.children[k].x + 11 < Boss1.x + 183 && Bullets.children[k].y < Boss1.y + 162)
{
Bullets.removeChildAt(k);
Boss_Health--;
stage.update();
SoundJS.play('Shuriken_Hit');
Score.text = parseInt(Score.text + 50);
}
}
/* Player1 - Enemy Collision */
if(Enemies.hitTest(Player1.x, Player1.y) || Enemies.hitTest(Player1.x + 37, Player1.y))
{
Enemies.removeChildAt(j);
Lives.removeChildAt(Lives.length);
Player1.y = 480 + 34;
Tween.get(Player1).to({y:425}, 500)
SoundJS.play('Shuriken_Hit');
}
/* Check for win */
if(Boss1 != null && Boss_Health <= 0)
{
alert('Win');
}
/* Check for lose */
if(Lives.children.length <= 0)
{
alert('Lose');
}
function alert(e)
{
/* Remove Listeners */
stage.onMouseMove = null;
BG.onPress = null;
BG2.onPress = null;
Ticker.removeListener(Ticker_Listener);
Ticker_Listener = null;
Timer_Source = null;
/* Display Correct Message */
if(e == 'Win')
{
Win = new Bitmap(Win_Img);
Win.x = centerX - 64;
Win.y = centerY - 23;
stage.addChild(Win);
stage.removeChild(Enemies, Boss1);
}
else
{
Lose = new Bitmap(Lose_Img);
Lose.x = centerX - 64;
Lose.y = centerY - 23;
stage.addChild(Lose);
stage.removeChild(Enemies, Player1);
}
BG.onPress = function(){window.location.reload();};
BG2.onPress = function(){window.location.reload();};
stage.update();
}
Raw HTML Code:
<!doctype html>
<html>
<head>
<title>Scrolling Shooter</title>
<meta charset="utf-8">
<style>*{-webkit-tap-highlight-color: rgba(0, 0, 0, 0);}</style>
<link rel="stylesheet" href="Week11.css">
<script src="https://code.createjs.com/easeljs-0.8.1.min.js"></script>
<script src="https://code.createjs.com/tweenjs-0.6.1.min.js"></script>
<script src="https://code.createjs.com/soundjs-0.6.1.min.js"></script>
</head>
<body onload="Main();">
<div id="stage">
<canvas id= "Shooter"> width="1920" height="1080" </canvas>
</div>
<script src="Main.js"> </script>
</body>
</html>
The tutorial you are using is sorely out-of-date (it uses a version of CreateJS that is 3 or 4 years old). The version of EaselJS you are using in your sample was published in May 2015.
To start with, all CreateJS classes (such as Container) need to use the createjs namespace:
Player1 = new createjs.Bitmap(Player1_Img);
Score = new createjs.Text('0', 'bold 14px Courier New', '#FFFFFF');
The Ticker API has also changed:
createjs.Ticker.addEventListener("tick", stage);
// OR
createjs.Ticker.on("tick", stage);
There are also other APIs that have changed (BitmapAnimation became Sprite for example).
I would start there, and then follow up here once you have made some progress. Use your console to determine what errors are happening, and solve them one at a time.
Cheers,
This is an issue with seemingly just the AnalyserNode in JS (or it's between the keyboard and the chair...).
Basically, I have a properly setup audio file (streams under normal circumstances). However, once I try to hook up the analyzer node between it and the destination, everything goes haywire. The audio does not output and the node always gets 0 or undefined in the dataArray. I've been researching this for the past 7-10 hours (with varying amounts of luck) and noticed most people solved it after firing a "canplay" event, but that did not work for me. Any help would be lovely!
As background, this is an IceCast stream, though I don't think it will hurt/change anything.
$(document).ready(function() {
var audio = new Audio();
audio.id = "audioStream";
audio.src = ; // Snipped our streaming link.
audio.autoplay = true;
audio.preload = "none";
document.body.appendChild(audio);
audio.addEventListener("canplay", function() {
initAudio(audio);
});
});
function initAudio(audio) {
window.AudioContext = window.AudioContext || window.webkitAudioContext;
window.requestAnimationFrame = window.requestAnimationFrame || window.mozRequestAnimationFrame ||
window.webkitRequestAnimationFrame || window.msRequestAnimationFrame;
var context = new AudioContext();
var analyser = context.createAnalyser();
analyser.smoothingTimeConstant = 0.85;
var source = context.createMediaElementSource(audio);
source.connect(analyser);
analyser.connect(context.destination);
analyser.fftSize = 32;
var bufferLength = analyser.frequencyBinCount;
var dataArray = new Uint8Array(bufferLength);
var visualisation = document.getElementById("bounceAnimation");
var barSpacingPercent = 100 / bufferLength;
for (var i = 0; i < bufferLength; i++) {
var div = document.createElement('div');
div.style["left"] = i * barSpacingPercent + "%";
div.style["width"] = barSpacingPercent + "%";
div.style["height"] = "5%";
div.className = "lineDance";
visualisation.appendChild(div);
}
var bars = visualisation.children;
function draw(){
requestAnimationFrame(draw);
analyser.getByteFrequencyData(dataArray);
for (i = 0; i < bars.length; i++) {
bars[i].style["height"] = (dataArray[i] * -1) + '%';
if(dataArray[i] != 0) {
alert(dataArray[i]);
}
};
}
draw();
}
You should add:
audio.crossOrigin = "anonymous";
and serve your stream with CORS headers.