Issue with HTML5 Audio Frequency Animation? - javascript

This is an issue with seemingly just the AnalyserNode in JS (or it's between the keyboard and the chair...).
Basically, I have a properly setup audio file (streams under normal circumstances). However, once I try to hook up the analyzer node between it and the destination, everything goes haywire. The audio does not output and the node always gets 0 or undefined in the dataArray. I've been researching this for the past 7-10 hours (with varying amounts of luck) and noticed most people solved it after firing a "canplay" event, but that did not work for me. Any help would be lovely!
As background, this is an IceCast stream, though I don't think it will hurt/change anything.
$(document).ready(function() {
var audio = new Audio();
audio.id = "audioStream";
audio.src = ; // Snipped our streaming link.
audio.autoplay = true;
audio.preload = "none";
document.body.appendChild(audio);
audio.addEventListener("canplay", function() {
initAudio(audio);
});
});
function initAudio(audio) {
window.AudioContext = window.AudioContext || window.webkitAudioContext;
window.requestAnimationFrame = window.requestAnimationFrame || window.mozRequestAnimationFrame ||
window.webkitRequestAnimationFrame || window.msRequestAnimationFrame;
var context = new AudioContext();
var analyser = context.createAnalyser();
analyser.smoothingTimeConstant = 0.85;
var source = context.createMediaElementSource(audio);
source.connect(analyser);
analyser.connect(context.destination);
analyser.fftSize = 32;
var bufferLength = analyser.frequencyBinCount;
var dataArray = new Uint8Array(bufferLength);
var visualisation = document.getElementById("bounceAnimation");
var barSpacingPercent = 100 / bufferLength;
for (var i = 0; i < bufferLength; i++) {
var div = document.createElement('div');
div.style["left"] = i * barSpacingPercent + "%";
div.style["width"] = barSpacingPercent + "%";
div.style["height"] = "5%";
div.className = "lineDance";
visualisation.appendChild(div);
}
var bars = visualisation.children;
function draw(){
requestAnimationFrame(draw);
analyser.getByteFrequencyData(dataArray);
for (i = 0; i < bars.length; i++) {
bars[i].style["height"] = (dataArray[i] * -1) + '%';
if(dataArray[i] != 0) {
alert(dataArray[i]);
}
};
}
draw();
}

You should add:
audio.crossOrigin = "anonymous";
and serve your stream with CORS headers.

Related

getUserMedia image capture resolution vs. html video element size

Is there a way to capture a higher resolution image than the actual width of my onscreen video element which is showing my webcam image that I intend to capture?
Currently, I have set the width in getUserMedia to 1280, but my element is constrained to 640px. I'd like to still be storing images of 1280px wide so that I'm getting higher quality images.
Code:
<div id="video-container">
<h3 id="webcam-title">Add Photos</h3>
<video id="video" autoplay playsinline></video>
<select id="videoSource"></select>
<div id="take-photo-button" onclick="takeSnapshot();">TAKE PHOTO <div class="overlay"></div></div>
<canvas id="myCanvas" style="display:none;"></canvas>
<div id="snapshot-container"></div>
<div id="approval-form-submit">SAVE ORDER</div>
</div>
<script src="https://webrtc.github.io/adapter/adapter-latest.js"></script>
<script>
$(function() {
var video = document.querySelector('video');
var videoSelect = document.querySelector('select#videoSource');
var initialized = false;
//Obtain media object from any browser
navigator.getUserMedia = ( navigator.getUserMedia ||
navigator.webkitGetUserMedia ||
navigator.mozGetUserMedia ||
navigator.msGetUserMedia);
var video_height, snapshot_height;
//var video_width = 1280;
var video_width = 640;
var container_width = 800;
var snapshot_margin = 10;
var snapshot_width = (container_width - snapshot_margin*6)/3;
//var snapshot_width = 1280;
function fillSelectWithDevices(deviceInfos) {
var value = videoSelect.value;
$(videoSelect).empty();
for (let i = 0; i !== deviceInfos.length; ++i) {
var deviceInfo = deviceInfos[i];
if (deviceInfo.kind === 'videoinput') {
var option = document.createElement('option');
option.value = deviceInfo.deviceId;
option.text = deviceInfo.label || `camera ${videoSelect.length + 1}`;
videoSelect.appendChild(option);
if(!initialized && deviceInfo.label==='Back Camera'){
value = deviceInfo.deviceId;
initialized = true;
}
}
if (Array.prototype.slice.call(videoSelect.childNodes).some(n => n.value === value)) {
videoSelect.value = value;
}
}
}
function gotStream(stream) {
window.stream = stream; // make stream available to console
video.srcObject = stream;
video.addEventListener('canplay', function(ev){
video_height = video.videoHeight * (video_width/video.videoWidth);
snapshot_height = video.videoHeight * (snapshot_width/video.videoWidth);
initCanvas();
// Firefox currently has a bug where the height can't be read from
// the video, so we will make assumptions if this happens.
if (isNaN(video_height)) {
video_height = video_width * (3/4);
console.log("Can't read video height. Assuming 4:3 aspect ratio");
}
//video_width=640;
//video_height=480;
video.setAttribute('width', video_width);
video.setAttribute('height', video_height);
canvas.setAttribute('width', video_width);
canvas.setAttribute('height', video_height);
}, false);
return navigator.mediaDevices.enumerateDevices();
}
function handleError(error) {
console.log('navigator.getUserMedia error: ', error);
}
function start() {
if (window.stream) {
window.stream.getTracks().forEach(track => {
track.stop();
});
}
var videoSource = videoSelect.value;
var constraints = {
video: {deviceId: videoSource ? {exact: videoSource} : undefined,
facingMode: "environment",
width:1280},
audio: false
};
navigator.mediaDevices.getUserMedia(constraints).then(gotStream).then(fillSelectWithDevices).catch(handleError);
}
videoSelect.onchange = start;
start();
var canvas, ctx, container;
function initCanvas() {
canvas = document.getElementById("myCanvas");
ctx = canvas.getContext('2d');
container = document.getElementById("snapshot-container");
//Reconstitute snapshots from form URI after failed submit
var image_list_field = $('#image-list-field'),
URI_array = image_list_field.val().split(','),
dataURI;
for(var i = 0;i<URI_array.length;i++){
if(URI_array[i]){
dataURI = "data:image/png;base64,"+URI_array[i];
displaySnapshot(dataURI);
}
}
}
// Capture a photo by fetching the current contents of the video
// and drawing it into a canvas, then converting that to a PNG
// format data URL. By drawing it on an offscreen canvas and then
// drawing that to the screen, we can change its size and/or apply
// other changes before drawing it.
takeSnapshot = function() {
alert (video_width + " " + video_height);
ctx.drawImage(video, 0, 0, video_width, video_height);
var data = canvas.toDataURL('image/png');
displaySnapshot(data);
}
function displaySnapshot(data){
var photo = document.createElement('img'),
snapshot_div = document.createElement('div'),
delete_text = document.createElement('p');
photo.setAttribute('src', data);
$(photo).css({"width":snapshot_width+"px"});
$(photo).addClass("snapshot-img");
$(snapshot_div).css({"width":snapshot_width+"px","height":snapshot_height+25+"px"});
$(delete_text).text("Delete Photo");
$(snapshot_div).append(photo).append(delete_text);
$(delete_text).on('click',function(){$(this).closest('div').remove()})
container.append(snapshot_div);
}
$('#approval-form-submit').on('click',function(e){
var form = $('#approval-form'),
image_list_field = $('#image-list-field'),
imageURI;
image_list_field.val("");
$('.snapshot-img').each(function(i, d){
imageURI = d.src.split(',')[1]+',';
image_list_field.val(image_list_field.val()+imageURI);
});
form.submit();
})

How to lower mic input volume using navigator.mediaDevices.getUserMedia?

I'm creating audio record app using navigator.mediaDevices.getUserMedia() and it records every sound around me even very quiet and which is 10m away from me. I DO NOT play this sound, I only visualize it depending on volume, so I need only quite loud sounds or which are close to mic, cause there's too much interference.
Also if I enable playback to hear my mic input and start making quiet noise like tapping on the table, I can't here this sound in playback but I see it in visualizer and this is exactly what I don't want
Here's my code:
const stream = await navigator.mediaDevices.getUserMedia({ audio: true });
this.audioContext = new AudioContext();
this.sourceNode = this.audioContext.createMediaStreamSource(stream);
this.analyserNode = this.audioContext.createAnalyser();
this.sourceNode.connect(this.analyserNode);
const data = new Float32Array(this.analyserNode.fftSize);
this.analyserNode.getFloatTimeDomainData(data);
So how can I lower mic sensivity using Web Audio API or lower mic input volume or maybe transform data from analyser? I've read about AudioContext.createGain(), gain.volume, but it's used for output audio volume, not input one
I've read about AudioContext.createGain(), gain.volume, but it's used for output audio volume, not input one
No, it is used to control the volume of the audio that goes through it.
You have to see your audio context nodes as a chain, then you might understand that you can indeed use a GainNode to control the input volume of the next node to which it is connected.
Like e.g if we declare something like
gainNode.gain.volume = 0.5;
input.connect(gainNode);
gainNode.connect(analyserNode);
input.connect(audioContext.destination);
it can be seen as
Input [mic] ===> GainNode ===> AnalyserNode
100% || 50% 50%
||
===> AudioContext Output
100%
So your gainNode here did lower the volume of your AnalyserNode, but not the one of the context output.
But this is not really what you want.
Indeed, the AnalyserNode API has minDecibels and maxDecibels properties which will do exactly what you want (filter out out of db range sounds).
But these properties make sense only for frequency data (getXXXFrequencyData) since waveform doesn't take volume into account.
However, it is still possible to check if this frequency data is in our required bounds before deciding if we should draw our waveform or not.
polyfill();
(async() => {
const ctx = new AudioContext();
const input = await loadFileAsBufferNode(ctx);
const analyser = ctx.createAnalyser();
analyser.minDecibels = -90;
analyser.maxDecibels = -10;
analyser.fftSize = 512;
input.connect(analyser);
const gainNode = ctx.createGain();
input.connect(gainNode);
const bufferLength = analyser.frequencyBinCount;
const freqArray = new Uint8Array(bufferLength);
const waveArray = new Uint8Array(bufferLength);
const canvasCtx = canvas.getContext('2d');
const WIDTH = canvas.width;
const HEIGHT = canvas.height;
canvasCtx.lineWidth = 2;
draw();
// taken from https://developer.mozilla.org/en-US/docs/Web/API/AnalyserNode/maxDecibels#Example
function draw() {
requestAnimationFrame(draw);
canvasCtx.clearRect(0, 0, WIDTH, HEIGHT);
analyser.getByteFrequencyData(freqArray);
gainNode.gain.value = 1;
analyser.getByteTimeDomainData(waveArray);
var barWidth = (WIDTH / bufferLength) * 2.5;
var barHeight;
var x = 0;
for (var i = 0; i < bufferLength; i++) {
barHeight = freqArray[i];
canvasCtx.fillStyle = 'rgb(' + (barHeight + 100) + ',50,50)';
canvasCtx.fillRect(x, HEIGHT - barHeight / 2, barWidth, barHeight / 2);
x += barWidth + 1;
}
// here we check if the volume is in bounds
if (freqArray.some(isTooHigh) || !freqArray.some(hasValue)) {
canvasCtx.fillRect(0, HEIGHT / 2, WIDTH, 1);
gainNode.gain.value = 0;
return;
}
canvasCtx.beginPath();
var sliceWidth = WIDTH * 1.0 / bufferLength;
var x = 0;
for (var i = 0; i < bufferLength; i++) {
var v = waveArray[i] / 128.0;
var y = v * HEIGHT / 2;
if (i === 0) {
canvasCtx.moveTo(x, y);
} else {
canvasCtx.lineTo(x, y);
}
x += sliceWidth;
}
canvasCtx.lineTo(canvas.width, canvas.height / 2);
canvasCtx.stroke();
};
function isTooHigh(val) {
return val === 255;
}
function hasValue(val) {
return val;
}
// DOM
maxDB.oninput = e => {
const max = +maxDB.value;
if (+minDB.value >= max) minDB.value = analyser.minDecibels = max - 1;
analyser.maxDecibels = max;
}
minDB.oninput = e => {
const min = +minDB.value;
if (+maxDB.value <= min) maxDB.value = analyser.maxDecibels = min + 1;
analyser.minDecibels = min;
}
out.onchange = e => {
if (out.checked)
gainNode.connect(ctx.destination);
else
gainNode.disconnect(ctx.destination);
};
})();
function loadFileAsBufferNode(ctx, url = 'https://dl.dropboxusercontent.com/s/8c9m92u1euqnkaz/GershwinWhiteman-RhapsodyInBluePart1.mp3') {
return fetch(url)
.then(r => r.arrayBuffer())
.then(buf => ctx.decodeAudioData(buf))
.then(bufferNode => {
const source = ctx.createBufferSource();
source.buffer = bufferNode;
source.repeat = true;
source.start(0);
return source;
});
};
/* for Safari */
function polyfill() {
window.AudioContext = window.AudioContext || window.webkitAudioContext;
try {
const prom = new AudioContext().decodeAudioData(new ArrayBuffer()).catch(e => {});
} catch (e) {
const prev = AudioContext.prototype.decodeAudioData;
Object.defineProperty(AudioContext.prototype, 'decodeAudioData', {
get: () => asPromise
});
function asPromise(audioBuffer, done, failed) {
return new Promise((res, rej) => {
prev.apply(this, [audioBuffer, onsuccess, onerror]);
function onsuccess(buf) {
if (typeof done === 'function') done(buf);
res(buf);
}
function onerror(err) {
if (typeof failed === 'function') failed(err);
rej(err);
}
});
}
}
}
<label>min<input type="range" id="minDB" min="-100" max="-1" value="-90"></label>
<label>max<input type="range" id="maxDB" min="-99" max="0" value="-10"></label>
<label>output audio<input type="checkbox" id="out"></label>
<canvas id="canvas"></canvas>

audio element not valid

I'm creating a audio player with visualizer.
But currently when I press the input to start the audio player my debug console returns:
Uncaught (in promise) DOMException: Failed to load because no
supported source was found.
What I'm currently doing is setting the whole audio element up in JS / jQuery:
var bins = 512;
var backgroundColour = "#2C2E3B";
var barColour = "#EC1A55";
var floorLevel = 32;
var audioContext;
var audioBuffer;
var audioAnalyserNode;
var initialized = false;
var songText = "";
var textSize;
var freqLookup = [];
var canvasContext;
var isStream = true;
var canvasWidth;
var canvasHeight;
var src;
var audioElement;
var isPlaying = false;
var volume = 1;
function play() {
audioElement = document.createElement('audio');
// Opus support check stuff
var streamEndpoint = 'http://**.**.**.**:8003/stream';
var canPlayOpus = (typeof audioElement.canPlayType === "function" && audioElement.canPlayType('audio/ogg; codecs="opus"') !== "");
if(volume > 1) {
volume = volume / 100;
}
audioElement.src = streamEndpoint;
audioElement.crossOrigin = 'anonymous';
audioElement.volume = volume;
audioElement.play();
isPlaying = true;
setUpCanvas(audioElement);
}
function pause() {
audioElement.pause();
audioElement.currentTime = 0;
audioElement.src = '';
isPlaying = false;
}
function setUpCanvas(audioElement){
try {
initCanvas(document.getElementById("canvas"));
if(typeof audioContext === 'undefined') {
audioContext = new AudioContext();
}
if (audioElement) {
isStream = true;
setupAudioApi(true, audioElement);
}
} catch(e) {
console.log(e);
}
}
function setupAudioApi(isStream, audioElement) {
//var src;
if (isStream){
if(typeof src === 'undefined'){
src = audioContext.createMediaElementSource(audioElement);
audioContext.crossOrigin = "anonymous";
audioAnalyserNode = audioContext.createAnalyser();
audioAnalyserNode.fftSize = bins * 4;
src.connect(audioAnalyserNode);
audioAnalyserNode.connect(audioContext.destination);
}
}
if (!isStream) {
src.start();
}
initialized = true;
initFreqLookupTable();
}
function initCanvas(canvasElement) {
canvasContext = canvasElement.getContext('2d');
canvasElement.width = canvasElement.clientWidth;
canvasElement.height = canvasElement.clientHeight;
canvasWidth = canvasElement.width;
canvasHeight = canvasElement.height;
requestAnimationFrame(paint);
}
function getFreqPoint(start, stop, n, binCount) {
return start * Math.pow(stop / start, n / (binCount - 1));
}
function initFreqLookupTable() {
var lastPoint = 0;
var bins = audioAnalyserNode.frequencyBinCount;
for(var i = 0; i < bins / 2; i++) {
//Scale to perceived frequency distribution
var newFreq = getFreqPoint(20, 20000, i * 2, bins);
var point = Math.floor(bins * newFreq / 20000);
while (point <= lastPoint) {
point++;
}
lastPoint = point;
freqLookup.push(point);
}
}
//Render some fancy bars
function paint() {
requestAnimationFrame(paint);
if(!initialized) {
alert('Er is iets fout gegaan');
return false;
}
canvasContext.clearRect(0, 0, canvasWidth, canvasHeight);
canvasContext.fillStyle = backgroundColour;
canvasContext.fillRect(0, 0, canvasWidth, canvasHeight);
var bins = audioAnalyserNode.frequencyBinCount;
var data = new Uint8Array(bins);
audioAnalyserNode.getByteFrequencyData(data);
canvasContext.fillStyle = barColour;
for(var i = 0; i < bins; i++) {
var point = freqLookup[i];
//Pretty much any volume will push it over 128 so we set that as the bottom threshold
//I suspect I should be doing a logarithmic space for the volume as well
var height = Math.max(0, (data[point] - floorLevel));
//Scale to the height of the bar
//Since we change the base level in the previous operations, 256 should be changed to 160 (i think) if we want it to go all the way to the top
height = (height / (256 - floorLevel)) * canvasHeight * 0.8;
var width = Math.ceil(canvasWidth / ((bins / 2) - 1));
canvasContext.fillRect(i * width, canvasHeight - height, width, height);
}
}
The stream is in audio/mpeg format, it does load when I simply create an audio element in HTML with a src.
Can someone help me clarify and find the solution to the DOMException I'm getting. I have been searching other cases of this error but the fixes there didn't resolve the problem.
Try creating the audio tag like this:
var audio = new Audio('audio_file.mp3');
And try setting the type:
audio.type = "audio/mpeg";
I think that will fix your problem.
This creates an element, identical to the one you use in your code.
I suggest you put an extension on your stream.
I know this way works, and I don't know why the other way doesn't.

Why does this simple ScriptProcessorNode have so much latency?

I'm having an issue with ScriptProcessorNode and lag. Even when doing an incredibly simple process that just takes the sample and does x %= 0.35. Why does this simple ScriptProcessorNode (included below) have so much latency?
The MDN says nothing about latency: https://developer.mozilla.org/en-US/docs/Web/API/ScriptProcessorNode
Also, I know that ScriptProcessorNode is to be deprecated soon, but audioWorklets aren't yet implemented, so this is the best there is.
var baseFreq = 220;
var audioCtx = new(window.AudioContext || window.webkitAudioContext)();
var osc = audioCtx.createOscillator();
osc.type = 'sine';
osc.frequency.value = baseFreq;
osc.start();
var intervals = [1, 1.125, 1.25, 1.5, 1.6666666, 1.875, 2, 4]
var index = 0;
function newTone() {
index++;
index %= intervals.length;
osc.frequency.value = intervals[index] * baseFreq;
}
var modulus = audioCtx.createScriptProcessor();
modulus.onaudioprocess = function(audioProcessingEvent) {
var inputBuffer = audioProcessingEvent.inputBuffer;
var outputBuffer = audioProcessingEvent.outputBuffer;
for (var channel = 0; channel < outputBuffer.numberOfChannels; channel++) {
var inputData = inputBuffer.getChannelData(channel);
var outputData = outputBuffer.getChannelData(channel);
for (var sample = 0; sample < inputBuffer.length; sample++) {
outputData[sample] = inputData[sample] % moduAmount;
}
}
}
var moduLow = 0.35;
var moduHigh = 2;
var moduAmount = moduLow;
function turnModulusOnOff() {
if (moduAmount == moduLow)
moduAmount = moduHigh;
else
moduAmount = moduLow;
}
var gain = audioCtx.createGain();
gain.gain.value = 0.05;
gain.connect(audioCtx.destination);
modulus.connect(gain)
osc.connect(modulus);
osc.connect(gain);
document.body.addEventListener("keydown", newTone);
html,
body {
height: 100%;
width: 100%;
font-family: arial;
}
<button onclick="newTone()">Next Tone</button>
<button onclick="turnModulusOnOff()">Mute/Unmute Modulus</button>
<br>
To test, click anywhere and then press any key to change the tone. Notice when Modulus is muted how responsive the tone change is. Notice the lag between the sine frequency change and the modulus effect.
<br>
<b>For Extra Fun: Go crazy on your keyboard</b>

Setting new body position on BeginContact event

I want to set a new position of body on BeginContact event but it's still not functional. It's writed in JavaSript with drawing to canvas but it doesn't matter for Box2d. In HTML file in body is only empty canvas, nothing else. Here is my code:
In the beginning of JS file are only declarated some variables.
Vec2 = Box2D.Common.Math.b2Vec2;
BodyDef = Box2D.Dynamics.b2BodyDef;
Body = Box2D.Dynamics.b2Body;
FixtureDef = Box2D.Dynamics.b2FixtureDef;
Fixture = Box2D.Dynamics.b2Fixture;
World = Box2D.Dynamics.b2World;
PolygonShape = Box2D.Collision.Shapes.b2PolygonShape;
DebugDraw = Box2D.Dynamics.b2DebugDraw;
var player;
It's followed by a setup function which is called in the beginning.
function setup()
{
canvas = document.getElementById("collisionCanvas");
context = canvas.getContext('2d');
document.getElementsByTagName('body')[0].style.backgroundColor = "black";
canvas.style.backgroundColor = "white";
canvas.width = 320;
canvas.height = 320;
world = new World(new Vec2(0, 10), false);
//Point of the problem!!!
//setting contact listener
var listener = new Box2D.Dynamics.b2ContactListener;
listener.BeginContact = function(contact)
{
var body1 = contact.GetFixtureA().GetBody();
var body2 = contact.GetFixtureB().GetBody();
if(body1.GetUserData().type == "player")
{
body1.SetPosition({x:5, y:5});
}
else
{
body2.SetPosition({x:5, y:5});
}
}
world.SetContactListener(listener);
var fixDef = new FixtureDef;
fixDef.density = 1.0;
fixDef.friction = 0.5;
fixDef.restitution = 0.2;
var bodyDef = new BodyDef;
//creating ground
bodyDef.type = Body.b2_staticBody;
bodyDef.position.x = convertPixelsToMeters(160);
bodyDef.position.y = convertPixelsToMeters(320-32/2);
bodyDef.userData = {type: "static"};
fixDef.shape = new PolygonShape;
fixDef.shape.SetAsBox(convertPixelsToMeters(canvas.width/2), convertPixelsToMeters(32/2));
world.CreateBody(bodyDef).CreateFixture(fixDef);
//creating player
bodyDef.type = Body.b2_dynamicBody;
bodyDef.fixedRotation = true;
bodyDef.position.x = convertPixelsToMeters(160);
bodyDef.position.y = convertPixelsToMeters(160);
bodyDef.userData = {type: "player"};
fixDef.shape = new PolygonShape;
fixDef.shape.SetAsBox(convertPixelsToMeters(16), convertPixelsToMeters(16));
player = world.CreateBody(bodyDef);
player.CreateFixture(fixDef);
//setup debug draw
var debugDraw = new DebugDraw();
debugDraw.SetSprite(document.getElementById("collisionCanvas").getContext("2d"));
debugDraw.SetDrawScale(32.0);
debugDraw.SetFillAlpha(0.3);
debugDraw.SetLineThickness(1.0);
debugDraw.SetFlags(DebugDraw.e_shapeBit | DebugDraw.e_jointBit);
world.SetDebugDraw(debugDraw);
window.setInterval(update, 1000 / 60);
}
And in the end are only update function, one helping function and that's it.
function update()
{
world.Step(
1 / 60 //frame-rate
, 10 //velocity iterations
, 10 //position iterations
);
world.DrawDebugData();
world.ClearForces();
}
function convertPixelsToMeters(x)
{
return x*0.03125;
}
$(function(){
setup();
})
Important is only the middle code where is BeginContact event where is calling the SetPosition function which doesn't work.
I tried change position in other places, for example on KeyDown event and there it was correct, so it's for me understandable why it doesn't work.
In the b2Contactlistner method we can not change any prperty or position.
You can take any boolean variable and make it true when in beign contact and if change the position of body according to boolean variable.
as in your code.......
var bodyyy;
var boolennn
listener.BeginContact = function(contact)
{
var body1 = contact.GetFixtureA().GetBody();
var body2 = contact.GetFixtureB().GetBody();
if(body1.GetUserData().type == "player")
{
//body1.SetPosition({x:5, y:5});
bodyyy = body1;
booleannn = true;
}
else
{
// body2.SetPosition({x:5, y:5});
bodyyy = body2;
boolennn = true;
}
}
Now In your Update method
if(booleann)
{
bodyyy.SetPosition({x:5, y:5})
}
SORRY I Donot know syntax of java script

Categories