Three js - Textures Loading - async / await - javascript

I did a Drawing class:
export class Drawing {
constructor(texture) {
const material = new MeshBasicMaterial({
color: 0xffffff,
map: texture
});
this.mesh = new Mesh(new PlaneGeometry(texture.image.naturalWidth / 20, texture.image.naturalHeight / 20), material);
}
setPosition(x, y, z) {
this.mesh.position.x = x;
this.mesh.position.y = y;
this.mesh.position.z = z;
}
}
I would like to access the texture.image properties in order to set the PlaneGeometry. So, before invoking a Drawing object, I do some async/await calls to load the textures (the Drawing invokations are made in the World constructor):
let world;
let raycasterDown;
let prevTime = performance.now();
const direction = new Vector3();
const globalInputs = new GlobalInputs();
const textureLoader = new TextureLoader();
const promiseTextureBack = (pathName) => {
return new Promise((resolve) => {
resolve(textureLoader.load(pathName));
});
}
const allTexturesPromises = [];
drawingPaths.map(pathName => { //drawingPaths is an array of string
allTexturesPromises.push(promiseTextureBack(pathName));
});
const loadingWorld = async () => {
const allTextures = await Promise.all(allTexturesPromises);
console.log(allTextures[0]);
world = new World(allTextures);
document.body.appendChild(world.renderer.domElement);
world.instructions.addEventListener('click', function () {
world.controls.lock();
});
world.controls.addEventListener('lock', function () {
world.instructions.style.display = 'none';
world.blocker.style.display = 'none';
});
world.controls.addEventListener('unlock', function () {
world.blocker.style.display = 'block';
world.instructions.style.display = '';
});
}
init();
function init() {
loadingWorld();
raycasterDown = new Raycaster(new Vector3(), new Vector3(0, -1, 0), 0, 10);
document.addEventListener('keydown', (event) => {
globalInputs.onKeyDown(event);
});
document.addEventListener('keyup', (event) => {
globalInputs.onKeyUp(event);
});
window.addEventListener('resize', onWindowResize);
animate();
}
Nevertheless,
console.log(allTextures[0])
in the loadingWorld returns:
And the image is still undefined... I'm quite sure the issue comes from:
textureLoader.load(pathName)
I'm open to any suggestions !

The load method takes a callback. It doesn't return anything that you can call resolve with. Instead of all this promiseTextureBack code, just use the loadAsync method which returns a promise:
const allTexturesPromises = drawingPaths.map(pathName => {
return textureLoader.load(pathName);
});

Related

scene.js:68 Uncaught (in promise) TypeError: Cannot read properties of undefined (reading 'scene')

I'm trying to load an animated model(.glb) in three.js but I get the above error.
If I paste the Load animated model function in the main method then it works but if I use it in a seperate class then it doesn't work anymore. Also the LoadStaticModel function does work but not the animated function. Any ideas what's wrong?
Thanks in advance!
Here is the code:
class CharacterControllerInput{
mixers = [];
scene;
constructor(scene){
this.scene = scene;
this.LoadAnimatedModel();
}
LoadAnimatedModel(scene){
this.scene = scene;
const loader = new GLTFLoader();
loader.load( 'http://127.0.0.1:3000/docs/BeterWerktDit.glb', function ( gltf ) {
gltf.scene.traverse( function ( object ) {
if ( object.isMesh ) object.castShadow = true;
} );
const model1 = SkeletonUtils.clone( gltf.scene );
const mixer1 = new THREE.AnimationMixer( model1 );
mixer1.clipAction( gltf.animations[ 0 ] ).play();
this.scene.add( model1);
this.mixers.push( mixer1);
render();
} );
}
Here is an abbreviated version of the class where I instantiate the Class.
class Scene{
constructor(){
this.main();
}
main(){
const canvas = document.querySelector('#c');
const renderer = new THREE.WebGLRenderer({canvas, antialias: true});
const scene = new THREE.Scene();
this.character = new CharacterControllerInput(scene);
render();
function render(){
const width = window.innerWidth;
const height = window.innerHeight;
camera.aspect = window.innerWidth/window.innerHeight;
camera.updateProjectionMatrix();
renderer.setSize(width, height, false);
const delta = clock.getDelta();
for (const mixer of mixers) mixer.update(delta);
renderer.render(scene, camera);
requestAnimationFrame(render)
}
requestAnimationFrame(render);
}
};
let _APP = null;
window.addEventListener('DOMContentLoaded', () => {
_APP = new Scene();
});
I resolved the error. The problem was that I used this.scene in the callback function. So this. refers to the callback function.
The code below works:
class CharacterControllerInput {
scene;
constructor(scene, mixers) {
this.scene = scene;
this.mixers = mixers;
this.LoadAnimatedModel();
}
_init() {
this.LoadAnimatedModel();
}
LoadAnimatedModel() {
const loader = new GLTFLoader();
const scene = this.scene;
const mixers = this.mixers;
loader.load('http://127.0.0.1:3000/docs/BeterWerktDit.glb', function (gltf) {
gltf.scene.traverse(function (object) {
if (object.isMesh) object.castShadow = true;
});
const model = SkeletonUtils.clone(gltf.scene);
const mixer = new THREE.AnimationMixer(model);
mixer.clipAction(gltf.animations[0]).play();
scene.add(model);
mixers.push(mixer);
});
}

Three,js- OBJLoader not importing model due to a material error

I am attempting to use threejs to load a model (OBJ file) into the browser, however, there seems to be some issue with the materials. Is there any way I can just set a default material like black so it loads the model? I have tried to set a texture, however, this error originates from inside the OBJLoader library. How can I fix this?
The error is :
An error happened TypeError: Cannot read properties of undefined (reading 'scene')
at Scene.THREE.Mesh.THREE.MeshStandardMaterial.color ((index):168:10)
at Object.onLoad (OBJLoader.js:457:5)
at three.module.js:39650:38
My code is:
<!DOCTYPE html>
<html>
<head>
<title>
three.js WebRTC template
</title>
<meta name='viewport' content='initial-scale=1,maximum-scale=1,user-scalable=no' />
<script src="js/libs/socket.io.min.js"></script>
<script src="js/libs/three.min.js"></script>
<script src="resources/threejs/r105/js/controls/OrbitControls.js"></script>
<script src="js/libs/firstPersonControls.js"></script>
<script src="js/libs/simplepeer.min.js"></script>
<script src="js/environment.js"></script>
<script type="importmap">
{
"imports": {
"three": "https://unpkg.com/three#0.138.0/build/three.module.js",
"OrbitControls": "https://unpkg.com/three#0.138.0/examples/jsm/controls/OrbitControls.js"
}
}
</script>
<script type="module">
import { Group , ObjectLoader } from 'three';
//import MODEL from './park.obj';
import * as THREE from 'https://unpkg.com/three#0.138.0/build/three.module.js';
import {OBJLoader} from 'https://unpkg.com/three#0.138.0/examples/jsm/loaders/OBJLoader.js'
//import obj from "./park.obj"
// import { loadScene } from '../../Loaders/loader';
// instantiate a loader
class Scene {
constructor() {
//THREE scene
this.scene = new THREE.Scene();
const loader = new OBJLoader();
// load a resource
loader.load(
// resource URL
'./park.obj',
// called when resource is loaded
function ( object ) {
console.log(object)
object.traverse( function ( child ) {
if ( child instanceof THREE.Mesh ) {
const texture = new THREE.TextureLoader().load( "assets/texture.png" );
child.material.map = texture;
}
} );
this.scene.add( object );
},
// called when loading is in progresses
function ( xhr ) {
console.log( ( xhr.loaded / xhr.total * 100 ) + '% loaded' );
},
// called when loading has errors
function ( error ) {
console.log( 'An error happened', error );
}
);
// Floor
const floor = new THREE.Mesh(
new THREE.PlaneBufferGeometry(20, 20),
new THREE.MeshStandardMaterial({ color: "#a9c388" })
);
floor.position.y = 0;
floor.rotation.x = -Math.PI * 0.5;
this.scene.add(floor);
//Utility
this.width = window.innerWidth;
this.height = window.innerHeight * 0.9;
// lerp value to be used when interpolating positions and rotations
this.lerpValue = 0;
//THREE Camera
this.camera = new THREE.PerspectiveCamera(
50,
this.width / this.height,
0.1,
5000
);
this.camera.position.set(0, 3, 6);
this.scene.add(this.camera);
// create an AudioListener and add it to the camera
this.listener = new THREE.AudioListener();
this.camera.add(this.listener);
//THREE WebGL renderer
this.renderer = new THREE.WebGLRenderer({
antialiasing: true,
});
this.renderer.setClearColor(new THREE.Color("lightblue"));
this.renderer.setSize(this.width, this.height);
// add controls:
this.controls = new FirstPersonControls(this.scene, this.camera, this.renderer);
//Push the canvas to the DOM
let domElement = document.getElementById("canvas-container");
domElement.append(this.renderer.domElement);
//Setup event listeners for events and handle the states
window.addEventListener("resize", (e) => this.onWindowResize(e), false);
// Helpers
this.scene.add(new THREE.GridHelper(500, 500));
this.scene.add(new THREE.AxesHelper(10));
//createEnvironment(this.scene);
// Start the loop
this.frameCount = 0;
this.update();
}
//////////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////
// Lighting 💡
//////////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////
// Clients 👫
// add a client meshes, a video element and canvas for three.js video texture
addClient(id) {
let videoMaterial = makeVideoMaterial(id);
let otherMat = new THREE.MeshNormalMaterial();
let head = new THREE.Mesh(new THREE.BoxGeometry(1, 1, 1), [otherMat,otherMat,otherMat,otherMat,otherMat,videoMaterial]);
// set position of head before adding to parent object
head.position.set(0, 0, 0);
// https://threejs.org/docs/index.html#api/en/objects/Group
var group = new THREE.Group();
group.add(head);
// add group to scene
this.scene.add(group);
peers[id].group = group;
peers[id].previousPosition = new THREE.Vector3();
peers[id].previousRotation = new THREE.Quaternion();
peers[id].desiredPosition = new THREE.Vector3();
peers[id].desiredRotation = new THREE.Quaternion();
}
removeClient(id) {
this.scene.remove(peers[id].group);
}
// overloaded function can deal with new info or not
updateClientPositions(clientProperties) {
this.lerpValue = 0;
for (let id in clientProperties) {
if (id != mySocket.id) {
peers[id].previousPosition.copy(peers[id].group.position);
peers[id].previousRotation.copy(peers[id].group.quaternion);
peers[id].desiredPosition = new THREE.Vector3().fromArray(
clientProperties[id].position
);
peers[id].desiredRotation = new THREE.Quaternion().fromArray(
clientProperties[id].rotation
);
}
}
}
interpolatePositions() {
this.lerpValue += 0.1; // updates are sent roughly every 1/5 second == 10 frames
for (let id in peers) {
if (peers[id].group) {
peers[id].group.position.lerpVectors(peers[id].previousPosition,peers[id].desiredPosition, this.lerpValue);
peers[id].group.quaternion.slerpQuaternions(peers[id].previousRotation,peers[id].desiredRotation, this.lerpValue);
}
}
}
updateClientVolumes() {
for (let id in peers) {
let audioEl = document.getElementById(id + "_audio");
if (audioEl && peers[id].group) {
let distSquared = this.camera.position.distanceToSquared(
peers[id].group.position
);
if (distSquared > 500) {
audioEl.volume = 0;
} else {
// from lucasio here: https://discourse.threejs.org/t/positionalaudio-setmediastreamsource-with-webrtc-question-not-hearing-any-sound/14301/29
let volume = Math.min(1, 10 / distSquared);
audioEl.volume = volume;
}
}
}
}
//////////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////
// Interaction 🤾‍♀️
getPlayerPosition() {
// TODO: use quaternion or are euler angles fine here?
return [
[
this.camera.position.x,
this.camera.position.y,
this.camera.position.z,
],
[
this.camera.quaternion._x,
this.camera.quaternion._y,
this.camera.quaternion._z,
this.camera.quaternion._w,
],
];
}
//////////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////
// Rendering 🎥
update() {
requestAnimationFrame(() => this.update());
this.frameCount++;
//updateEnvironment();
if (this.frameCount % 25 === 0) {
this.updateClientVolumes();
}
this.interpolatePositions();
this.controls.update();
this.render();
}
render() {
this.renderer.render(this.scene, this.camera);
}
//////////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////
// Event Handlers 🍽
onWindowResize(e) {
this.width = window.innerWidth;
this.height = Math.floor(window.innerHeight * 0.9);
this.camera.aspect = this.width / this.height;
this.camera.updateProjectionMatrix();
this.renderer.setSize(this.width, this.height);
}
}
//////////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////
// Utilities
function makeVideoMaterial(id) {
let videoElement = document.getElementById(id + "_video");
let videoTexture = new THREE.VideoTexture(videoElement);
let videoMaterial = new THREE.MeshBasicMaterial({
map: videoTexture,
overdraw: true,
side: THREE.DoubleSide,
});
return videoMaterial;
}
/*
*
* This uses code from a THREE.js Multiplayer boilerplate made by Or Fleisher:
* https://github.com/juniorxsound/THREE.Multiplayer
* And a WEBRTC chat app made by Mikołaj Wargowski:
* https://github.com/Miczeq22/simple-chat-app
*
* Aidan Nelson, April 2020
*
*/
// socket.io
let mySocket;
// array of connected clients
let peers = {};
// Variable to store our three.js scene:
let myScene;
// set video width / height / framerate here:
const videoWidth = 80;
const videoHeight = 60;
const videoFrameRate = 15;
// Our local media stream (i.e. webcam and microphone stream)
let localMediaStream = null;
// Constraints for our local audio/video stream
let mediaConstraints = {
audio: true,
video: {
width: videoWidth,
height: videoHeight,
frameRate: videoFrameRate,
},
};
////////////////////////////////////////////////////////////////////////////////
// Start-Up Sequence:
////////////////////////////////////////////////////////////////////////////////
window.onload = async () => {
console.log("Window loaded.");
// first get user media
localMediaStream = await getMedia(mediaConstraints);
createLocalVideoElement();
// then initialize socket connection
initSocketConnection();
// finally create the threejs scene
console.log("Creating three.js scene...");
myScene = new Scene();
// start sending position data to the server
setInterval(function () {
mySocket.emit("move", myScene.getPlayerPosition());
}, 200);
};
////////////////////////////////////////////////////////////////////////////////
// Local media stream setup
////////////////////////////////////////////////////////////////////////////////
// https://developer.mozilla.org/en-US/docs/Web/API/MediaDevices/getUserMedia
async function getMedia(_mediaConstraints) {
let stream = null;
try {
stream = await navigator.mediaDevices.getUserMedia(_mediaConstraints);
} catch (err) {
console.log("Failed to get user media!");
console.warn(err);
}
return stream;
}
////////////////////////////////////////////////////////////////////////////////
// Socket.io
////////////////////////////////////////////////////////////////////////////////
// establishes socket connection
function initSocketConnection() {
console.log("Initializing socket.io...");
mySocket = io();
mySocket.on("connect", () => {
console.log("My socket ID:", mySocket.id);
});
//On connection server sends the client his ID and a list of all keys
mySocket.on("introduction", (otherClientIds) => {
// for each existing user, add them as a client and add tracks to their peer connection
for (let i = 0; i < otherClientIds.length; i++) {
if (otherClientIds[i] != mySocket.id) {
let theirId = otherClientIds[i];
console.log("Adding client with id " + theirId);
peers[theirId] = {};
let pc = createPeerConnection(theirId, true);
peers[theirId].peerConnection = pc;
createClientMediaElements(theirId);
myScene.addClient(theirId);
}
}
});
// when a new user has entered the server
mySocket.on("newUserConnected", (theirId) => {
if (theirId != mySocket.id && !(theirId in peers)) {
console.log("A new user connected with the ID: " + theirId);
console.log("Adding client with id " + theirId);
peers[theirId] = {};
createClientMediaElements(theirId);
myScene.addClient(theirId);
}
});
mySocket.on("userDisconnected", (clientCount, _id, _ids) => {
// Update the data from the server
if (_id != mySocket.id) {
console.log("A user disconnected with the id: " + _id);
myScene.removeClient(_id);
removeClientVideoElementAndCanvas(_id);
delete peers[_id];
}
});
mySocket.on("signal", (to, from, data) => {
// console.log("Got a signal from the server: ", to, from, data);
// to should be us
if (to != mySocket.id) {
console.log("Socket IDs don't match");
}
// Look for the right simplepeer in our array
let peer = peers[from];
if (peer.peerConnection) {
peer.peerConnection.signal(data);
} else {
console.log("Never found right simplepeer object");
// Let's create it then, we won't be the "initiator"
// let theirSocketId = from;
let peerConnection = createPeerConnection(from, false);
peers[from].peerConnection = peerConnection;
// Tell the new simplepeer that signal
peerConnection.signal(data);
}
});
// Update when one of the users moves in space
mySocket.on("positions", (_clientProps) => {
myScene.updateClientPositions(_clientProps);
});
}
////////////////////////////////////////////////////////////////////////////////
// Clients / WebRTC
////////////////////////////////////////////////////////////////////////////////
// this function sets up a peer connection and corresponding DOM elements for a specific client
function createPeerConnection(theirSocketId, isInitiator = false) {
console.log('Connecting to peer with ID', theirSocketId);
console.log('initiating?', isInitiator);
let peerConnection = new SimplePeer({ initiator: isInitiator })
// simplepeer generates signals which need to be sent across socket
peerConnection.on("signal", (data) => {
// console.log('signal');
mySocket.emit("signal", theirSocketId, mySocket.id, data);
});
// When we have a connection, send our stream
peerConnection.on("connect", () => {
// Let's give them our stream
peerConnection.addStream(localMediaStream);
console.log("Send our stream");
});
// Stream coming in to us
peerConnection.on("stream", (stream) => {
console.log("Incoming Stream");
updateClientMediaElements(theirSocketId, stream);
});
peerConnection.on("close", () => {
console.log("Got close event");
// Should probably remove from the array of simplepeers
});
peerConnection.on("error", (err) => {
console.log(err);
});
return peerConnection;
}
// temporarily pause the outgoing stream
function disableOutgoingStream() {
localMediaStream.getTracks().forEach((track) => {
track.enabled = false;
});
}
// enable the outgoing stream
function enableOutgoingStream() {
localMediaStream.getTracks().forEach((track) => {
track.enabled = true;
});
}
////////////////////////////////////////////////////////////////////////////////
// Three.js
////////////////////////////////////////////////////////////////////////////////
function onPlayerMove() {
// console.log('Sending movement update to server.');
}
//////////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////
// Utilities 🚂
// created <video> element for local mediastream
function createLocalVideoElement() {
const videoElement = document.createElement("video");
videoElement.id = "local_video";
videoElement.autoplay = true;
videoElement.width = videoWidth;
videoElement.height = videoHeight;
// videoElement.style = "visibility: hidden;";
if (localMediaStream) {
let videoStream = new MediaStream([localMediaStream.getVideoTracks()[0]]);
videoElement.srcObject = videoStream;
}
document.body.appendChild(videoElement);
}
// created <video> element using client ID
function createClientMediaElements(_id) {
console.log("Creating <html> media elements for client with ID: " + _id);
const videoElement = document.createElement("video");
videoElement.id = _id + "_video";
videoElement.autoplay = true;
// videoElement.style = "visibility: hidden;";
document.body.appendChild(videoElement);
// create audio element for client
let audioEl = document.createElement("audio");
audioEl.setAttribute("id", _id + "_audio");
audioEl.controls = "controls";
audioEl.volume = 1;
document.body.appendChild(audioEl);
audioEl.addEventListener("loadeddata", () => {
audioEl.play();
});
}
function updateClientMediaElements(_id, stream) {
let videoStream = new MediaStream([stream.getVideoTracks()[0]]);
let audioStream = new MediaStream([stream.getAudioTracks()[0]]);
const videoElement = document.getElementById(_id + "_video");
videoElement.srcObject = videoStream;
let audioEl = document.getElementById(_id + "_audio");
audioEl.srcObject = audioStream;
}
// remove <video> element and corresponding <canvas> using client ID
function removeClientVideoElementAndCanvas(_id) {
console.log("Removing <video> element for client with id: " + _id);
let videoEl = document.getElementById(_id + "_video");
if (videoEl != null) {
videoEl.remove();
}
}
</script>
<style>
body {
overflow: hidden;
}
#info {
padding: 1em;
border: 1em;
color: rgb(223, 223, 223);
}
</style>
</head>
<body style="background: yellow; margin: 0; padding: 0">
<div id="canvas-container"></div>
</body>
</html>
Any help appreciated.

Three.js how to add multiple Mixamo animations without skin to an FBX model?

I'm trying to create a game where there is an animated character that has multiple animations from Mixamo. I want the animation to change based on what the user is doing on the game, like Walking, Running, or Idle. Here is how I'm loading the FBX model (without animations):
loader.load('Assets/Animations/Main.fbx', function(object){
object.traverse(function (child){
if (child.isMesh) {
child.castShadow = true;
child.receiveShadow = true;
child.frustumCulled = false;
}
});
object.rotation.x = Math.PI / 2
object.position.x = 11;
scene.add(object);
});
I also have the following files, which are animations without skin.
Idle.fbx
Walking.fbx
Running.fbx
My goal is to try to make something like this or like this. The only problem with these 2 links is that in the first one, they are using a model with multiple animations attached to it (I have a plain model with 3 animations without skin), and in the second one, the code is written using TypeScript (I prefer JavaScript).
I am a newbie to 3D modelling, so I don't know how to attach all the animations without skin to the main fbx model. How can I combine the animations to one model in Blender, or is there a way to do it in three.js?
I appreciate any help with this, thanks!
EDIT:
According to #GuyNachshon, is this how I should handle this?
So first I load the model without animations (yourMesh), and also create an AnimationMixer:
var mixer;
loader.load('Assets/Animations/Main.fbx', function(object){
object.traverse(function (child){
if (child.isMesh) {
child.castShadow = true;
child.receiveShadow = true;
child.frustumCulled = false;
}
});
mixer = new THREE.AnimationMixer(object);
object.rotation.x = Math.PI / 2
object.position.x = 11;
scene.add(object);
});
Then, I have to load the 3 animations files without skin and add them to animationsArray. (Not sure if I'm loading the animations correctly...):
loader.load('Assets/Animations/Idle.fbx', function(object){
object.traverse(function (child){
if (child.isMesh) {
child.castShadow = true;
child.receiveShadow = true;
child.frustumCulled = false;
}
});
object.rotation.x = Math.PI / 2
object.position.x = 11;
animationsArray.push(object);
scene.add(object);
});
loader.load('Assets/Animations/Walking.fbx', function(object){
object.traverse(function (child){
if (child.isMesh) {
child.castShadow = true;
child.receiveShadow = true;
child.frustumCulled = false;
}
});
object.rotation.x = Math.PI / 2
object.position.x = 11;
animationsArray.push(object);
scene.add(object);
});
loader.load('Assets/Animations/Running.fbx', function(object){
object.traverse(function (child){
if (child.isMesh) {
child.castShadow = true;
child.receiveShadow = true;
child.frustumCulled = false;
}
});
object.rotation.x = Math.PI / 2
object.position.x = 11;
animationsArray.push(object);
scene.add(object);
});
After everything has loaded completely, I create the actions:
let actions = mixer.clipAction(animationsArray).play();
But, after you say to do:
actions.play();
What is that line going to play? Is it going to play the first animation in animationsArray?
you need to create an AnimationMixer.
so let's say you have created a scene, added a mesh etc. now you can init an animation mixer
let mixer = new THREE.AnimationMixer(yourMesh);
then to add animations use clipActions,
let actions = mixer.clipAction(animationsArray).play();
now play
actions.play();
but to really know how to use it you should read the docs (attached above :) )
Edit - responding to your edit
In order to control what animation will play you can do several things, here is an example from the docs:
const mixer = new THREE.AnimationMixer( mesh );
const clips = mesh.animations;
// Update the mixer on each frame
function update () {
mixer.update( deltaSeconds );
}
// Play a specific animation
const clip = THREE.AnimationClip.findByName( clips, 'dance' );
const action = mixer.clipAction( clip );
action.play();
// Play all animations
clips.forEach( function ( clip ) {
mixer.clipAction( clip ).play();
} );
now, if you are having trouble with structuring your code, here is a general example regarding how to attach animations to fbx and control them:
let mixer = THREE.AnimationMixer
let modelReady = false
const animationActions = THREE.AnimationAction
let activeAction = THREE.AnimationAction
let lastAction = THREE.AnimationAction
const fbxLoader = new FBXLoader()
after we initiated everything we need, let's load everything:
fbxLoader.load(
(object) => {
'path/to/your/model.fbx',
object.scale.set(0.01, 0.01, 0.01)
mixer = new THREE.AnimationMixer(object)
const animationAction = mixer.clipAction(animations[0])
animationActions.push(animationAction)
animationsFolder.add(animations, 'default')
activeAction = animationActions[0] // sets current animation
scene.add(object) // adds animated object to your scene
//add an animation from another file
fbxLoader.load(
'path/to/animation.fbx',
(object) => {
console.log('loaded animation')
const animationAction = mixer.clipAction(.animations[0])
animationActions.push(animationAction)
animationsFolder.add(animations, 'animationName')
//add an animation from another file
fbxLoader.load(
'path/to/other/animation.fbx',
(object) => {
console.log('loaded second animation')
const animationAction = mixer.clipAction(animations[0])
animationActions.push(animationAction)
animationsFolder.add(animations, 'animationName')
//add an animation from another file
fbxLoader.load(
'path/to/animation.fbx',
(object) => {
console.log('loaded third animation');
const animationAction = mixer.clipAction(animations[0])
animationActions.push(animationAction)
animationsFolder.add(animations, 'animationName')
modelReady = true
},
(xhr) => {
console.log(
(xhr.loaded / xhr.total) * 100 + '% loaded'
)
},
(error) => {
console.log(error)
}
)
},
(xhr) => {
console.log((xhr.loaded / xhr.total) * 100 + '% loaded')
},
(error) => {
console.log(error)
}
)
},
(xhr) => {
console.log((xhr.loaded / xhr.total) * 100 + '% loaded')
},
(error) => {
console.log(error)
}
)
},
(xhr) => {
console.log((xhr.loaded / xhr.total) * 100 + '% loaded')
},
(error) => {
console.log(error)
}
)
now we should set what our animations and actions are:
const animations = {
function default() {
setAction(animationActions[0])
},
function firstAnimation() {
setAction(animationActions[1])
},
function sceondAnimation() {
setAction(animationActions[2])
},
function thirdAnimation() {
setAction(animationActions[3])
}
}
const setAction = {
if (toAction != activeAction) {
lastAction = activeAction
activeAction = toAction
//lastAction.stop()
lastAction.fadeOut(1)
activeAction.reset()
activeAction.fadeIn(1)
activeAction.play()
}
}
lets animate!
const clock = new THREE.Clock()
function animate() {
requestAnimationFrame(animate)
controls.update()
if (modelReady) {mixer.update(clock.getDelta())}
render()
}
function render() {
renderer.render(scene, camera)
}
animate()
putting it all together:
import * as THREE from 'three'
import { FBXLoader } from 'three/examples/jsm/loaders/FBXLoader'
let mixer = THREE.AnimationMixer
let modelReady = false
const animationActions = THREE.AnimationAction
let activeAction = THREE.AnimationAction
let lastAction = THREE.AnimationAction
const fbxLoader = new FBXLoader()
fbxLoader.load(
(object) => {
'path/to/your/model.fbx',
object.scale.set(0.01, 0.01, 0.01)
mixer = new THREE.AnimationMixer(object)
const animationAction = mixer.clipAction(animations[0])
animationActions.push(animationAction)
animationsFolder.add(animations, 'default')
activeAction = animationActions[0] // sets current animation
scene.add(object) // adds animated object to your scene
//add an animation from another file
fbxLoader.load(
'path/to/animation.fbx',
(object) => {
console.log('loaded animation')
const animationAction = mixer.clipAction(.animations[0])
animationActions.push(animationAction)
animationsFolder.add(animations, 'animationName')
//add an animation from another file
fbxLoader.load(
'path/to/other/animation.fbx',
(object) => {
console.log('loaded second animation')
const animationAction = mixer.clipAction(animations[0])
animationActions.push(animationAction)
animationsFolder.add(animations, 'animationName')
//add an animation from another file
fbxLoader.load(
'path/to/animation.fbx',
(object) => {
console.log('loaded third animation');
const animationAction = mixer.clipAction(animations[0])
animationActions.push(animationAction)
animationsFolder.add(animations, 'animationName')
modelReady = true
},
(xhr) => {
console.log(
(xhr.loaded / xhr.total) * 100 + '% loaded'
)
},
(error) => {
console.log(error)
}
)
},
(xhr) => {
console.log((xhr.loaded / xhr.total) * 100 + '% loaded')
},
(error) => {
console.log(error)
}
)
},
(xhr) => {
console.log((xhr.loaded / xhr.total) * 100 + '% loaded')
},
(error) => {
console.log(error)
}
)
},
(xhr) => {
console.log((xhr.loaded / xhr.total) * 100 + '% loaded')
},
(error) => {
console.log(error)
}
)
const animations = {
function default() {
setAction(animationActions[0])
},
function firstAnimation() {
setAction(animationActions[1])
},
function sceondAnimation() {
setAction(animationActions[2])
},
function thirdAnimation() {
setAction(animationActions[3])
}
}
const setAction = {
if (toAction != activeAction) {
lastAction = activeAction
activeAction = toAction
//lastAction.stop()
lastAction.fadeOut(1)
activeAction.reset()
activeAction.fadeIn(1)
activeAction.play()
}
}
const clock = new THREE.Clock()
function animate() {
requestAnimationFrame(animate)
controls.update()
if (modelReady) {mixer.update(clock.getDelta())}
render()
}
function render() {
renderer.render(scene, camera)
}
animate()

Need help converting code for fabric canvas from Vanilla JS to ReactJS

so I was playing around with the fabricjs canvas library and I found this fiddle written in vanillajs which lets you draw polygons on the canvas. I wanted to implement this exact thing in my react project so I tried to convert the entire code into react (https://codesandbox.io/s/jolly-kowalevski-tjt58). The code works somewhat but there are some new bugs which are not in the original fiddle and I'm having trouble fixing them.
for eg: try to create a polygon by clicking the draw button, when you do this first time, the polygon is drawn without any bugs, but when you click the draw button again for the second time, the canvas starts acting weird and a weird polygon is created.
So basically I need help in converting the vanilla code to react with 0 bugs.
extra information:
fabric version used in the fiddle: 4.0.0
fabric version in sandbox: 4.0.0
Vanilla Js Code:
const getPathBtn = document.getElementById("get-path");
const drawPolygonBtn = document.getElementById("draw-polygon");
const showPolygonBtn = document.getElementById("show-polygon");
const editPolygonBtn = document.getElementById("edit-polygon");
const canvas = new fabric.Canvas("canvas", {
selection: false
});
let line, isDown;
let prevCords;
let vertices = [];
let polygon;
const resetCanvas = () => {
canvas.off();
canvas.clear();
};
const resetVariables = () => {
line = undefined;
isDown = undefined;
prevCords = undefined;
polygon = undefined;
vertices = [];
};
const addVertice = (newPoint) => {
if (vertices.length > 0) {
const lastPoint = vertices[vertices.length - 1];
if (lastPoint.x !== newPoint.x && lastPoint.y !== newPoint.y) {
vertices.push(newPoint);
}
} else {
vertices.push(newPoint);
}
};
const drawPolygon = () => {
resetVariables();
resetCanvas();
canvas.on("mouse:down", function(o) {
isDown = true;
const pointer = canvas.getPointer(o.e);
let points = [pointer.x, pointer.y, pointer.x, pointer.y];
if (prevCords && prevCords.x2 && prevCords.y2) {
const prevX = prevCords.x2;
const prevY = prevCords.y2;
points = [prevX, prevY, prevX, prevY];
}
const newPoint = {
x: points[0],
y: points[1]
};
addVertice(newPoint);
line = new fabric.Line(points, {
strokeWidth: 2,
fill: "black",
stroke: "black",
originX: "center",
originY: "center",
});
canvas.add(line);
});
canvas.on("mouse:move", function(o) {
if (!isDown) return;
const pointer = canvas.getPointer(o.e);
const coords = {
x2: pointer.x,
y2: pointer.y
};
line.set(coords);
prevCords = coords;
canvas.renderAll();
});
canvas.on("mouse:up", function(o) {
isDown = false;
const pointer = canvas.getPointer(o.e);
const newPoint = {
x: pointer.x,
y: pointer.y
};
addVertice(newPoint);
});
canvas.on("object:moving", function(option) {
const object = option.target;
canvas.forEachObject(function(obj) {
if (obj.name == "Polygon") {
if (obj.PolygonNumber == object.polygonNo) {
const points = window["polygon" + object.polygonNo].get(
"points"
);
points[object.circleNo - 1].x = object.left;
points[object.circleNo - 1].y = object.top;
window["polygon" + object.polygonNo].set({
points: points,
});
}
}
});
canvas.renderAll();
});
};
const showPolygon = () => {
resetCanvas();
if (!polygon) {
polygon = new fabric.Polygon(vertices, {
fill: "transparent",
strokeWidth: 2,
stroke: "black",
objectCaching: false,
transparentCorners: false,
cornerColor: "blue",
});
}
polygon.edit = false;
polygon.hasBorders = true;
polygon.cornerColor = "blue";
polygon.cornerStyle = "rect";
polygon.controls = fabric.Object.prototype.controls;
canvas.add(polygon);
};
// polygon stuff
// define a function that can locate the controls.
// this function will be used both for drawing and for interaction.
function polygonPositionHandler(dim, finalMatrix, fabricObject) {
let x = fabricObject.points[this.pointIndex].x - fabricObject.pathOffset.x,
y = fabricObject.points[this.pointIndex].y - fabricObject.pathOffset.y;
return fabric.util.transformPoint({
x: x,
y: y
},
fabric.util.multiplyTransformMatrices(
fabricObject.canvas.viewportTransform,
fabricObject.calcTransformMatrix()
)
);
}
// define a function that will define what the control does
// this function will be called on every mouse move after a control has been
// clicked and is being dragged.
// The function receive as argument the mouse event, the current trasnform object
// and the current position in canvas coordinate
// transform.target is a reference to the current object being transformed,
function actionHandler(eventData, transform, x, y) {
let polygon = transform.target,
currentControl = polygon.controls[polygon.__corner],
mouseLocalPosition = polygon.toLocalPoint(
new fabric.Point(x, y),
"center",
"center"
),
polygonBaseSize = polygon._getNonTransformedDimensions(),
size = polygon._getTransformedDimensions(0, 0),
finalPointPosition = {
x: (mouseLocalPosition.x * polygonBaseSize.x) / size.x +
polygon.pathOffset.x,
y: (mouseLocalPosition.y * polygonBaseSize.y) / size.y +
polygon.pathOffset.y,
};
polygon.points[currentControl.pointIndex] = finalPointPosition;
return true;
}
// define a function that can keep the polygon in the same position when we change its
// width/height/top/left.
function anchorWrapper(anchorIndex, fn) {
return function(eventData, transform, x, y) {
let fabricObject = transform.target,
absolutePoint = fabric.util.transformPoint({
x: fabricObject.points[anchorIndex].x -
fabricObject.pathOffset.x,
y: fabricObject.points[anchorIndex].y -
fabricObject.pathOffset.y,
},
fabricObject.calcTransformMatrix()
),
actionPerformed = fn(eventData, transform, x, y),
newDim = fabricObject._setPositionDimensions({}),
polygonBaseSize = fabricObject._getNonTransformedDimensions(),
newX =
(fabricObject.points[anchorIndex].x -
fabricObject.pathOffset.x) /
polygonBaseSize.x,
newY =
(fabricObject.points[anchorIndex].y -
fabricObject.pathOffset.y) /
polygonBaseSize.y;
fabricObject.setPositionByOrigin(absolutePoint, newX + 0.5, newY + 0.5);
return actionPerformed;
};
}
function editPolygon() {
canvas.setActiveObject(polygon);
polygon.edit = true;
polygon.hasBorders = false;
let lastControl = polygon.points.length - 1;
polygon.cornerStyle = "circle";
polygon.cornerColor = "rgba(0,0,255,0.5)";
polygon.controls = polygon.points.reduce(function(acc, point, index) {
acc["p" + index] = new fabric.Control({
positionHandler: polygonPositionHandler,
actionHandler: anchorWrapper(
index > 0 ? index - 1 : lastControl,
actionHandler
),
actionName: "modifyPolygon",
pointIndex: index,
});
return acc;
}, {});
canvas.requestRenderAll();
}
// Button events
drawPolygonBtn.onclick = () => {
drawPolygon();
};
showPolygonBtn.onclick = () => {
showPolygon();
};
editPolygonBtn.onclick = () => {
editPolygon();
};
getPathBtn.onclick = () => {
console.log("vertices", polygon.points);
};
On 2nd draw (click the draw button again for the second time), the line is always connected to same point. So there is a problem with prevCords.
By adding a console.log to handler function of "mouse:mouse" confirmed above statement:
fabricCanvas.on("mouse:move", function (o) {
console.log("mousemove fired", prevCords); // always the same value
if (isDown.current || !line.current) return;
const pointer = fabricCanvas.getPointer(o.e);
const coords = {
x2: pointer.x,
y2: pointer.y
};
line.current.set(coords);
setPrevCords(coords); // the line should connect to this new point
fabricCanvas.renderAll();
});
It's because of closure, the function handler of mouse:move will always remember the value of prevCords when it was created (i.e when you click on Draw button) not the value that was updated by setPrevCords
To solve above problem, simply use useRef to store prevCords (or use reference)
Line 6:
const [fabricCanvas, setFabricCanvas] = useState();
const prevCordsRef = useRef();
const line = useRef();
Line 35:
const resetVariables = () => {
line.current = undefined;
isDown.current = undefined;
prevCordsRef.current = undefined;
polygon.current = undefined;
vertices.current = [];
};
Line 65:
if (prevCordsRef.current && prevCordsRef.current.x2 && prevCordsRef.current.y2) {
const prevX = prevCordsRef.current.x2;
const prevY = prevCordsRef.current.y2;
points = [prevX, prevY, prevX, prevY];
}
Line 96:
prevCordsRef.current = coords;
One last suggestion is to change Line 89 (so the feature match the demo):
if (!isDown.current) return;
On summary:
Don't use useState for variable that must have latest value in another function handler. Use useRef instead
Use useState for prevCords is a wasted since React will re-render on every setState

function does not recognize

What am I trying to do:
I'm trying to encode Flappy Bird with the p5.js library.
Problem: The function does not recognize the function I defined.
function Game() {
this.pipes = generatePipes();
setInterval(this.gameLoop, 1000 / 60);
generatePipes = () => {
const firstPipe = new Pipe(null, space);
const secondPipeHeight = winHeight - firstPipe.height - space;
const secondPipe = new Pipe(secondPipeHeight, space);
return [firstPipe, secondPipe]
}
gameLoop = () => {
this.update();
this.draw();
}
update = () => {
if (frameCount % 30 == 0) {
this.pipes = this.generatePipes();
this.pipes.push(...pipes);
}
this.pipes.forEach(pipe => pipe.x = pipe.x - 1);
}
draw = () => {
this.pipes.forEach(pipe => pipe.draw());
}
}
class Pipe {
constructor(height, space) {
this.x = 100;
this.y = height ? winHeight - height : 0; // borunun y eksenine göre konumunu belirler
this.width = pipeWidth;
this.height = height || minPipeHeight + Math.floor(Math.random() * (winHeight - space - minPipeHeight * 2));
}
draw() {
fill(124);
noStroke();
rect(this.x, this.y, this.width, this.height);
}
}
error:
Uncaught TypeError: this.generatePipes is not a function
function Game() {
generatePipes = () => {
const firstPipe = new Pipe(null, space);
const secondPipeHeight = winHeight - firstPipe.height - space;
const secondPipe = new Pipe(secondPipeHeight, space);
return [firstPipe, secondPipe]
}
gameLoop = () => {
this.update();
this.draw();
}
this.pipes = generatePipes();
setInterval(this.gameLoop, 1000 / 60);
update = () => {
if (frameCount % 30 == 0) {
this.pipes = this.generatePipes();
this.pipes.push(...pipes);
}
this.pipes.forEach(pipe => pipe.x = pipe.x - 1);
}
draw = () => {
this.pipes.forEach(pipe => pipe.draw());
}
}
This updated code should work.
In your code as you have invoked the generatePipes() before your function expression it wont work. Function expressions load only when the interpreter reaches the that line of code where you have your function expression first defined.
The way you wrote it: you assigned a function to the variable generatePipes, which means you can only access it once the variable is instanciated.
You have two options: instanciate the generatePipes variable before using it, or declare it as a subfunction.
function Game() {
generatePipes = () => {
...
return x;
}
this.pipes = generatePipes();
}
OR
function Game() {
this.pipes = generatePipes();
function generatePipes() {
...
return x;
}
}
Just assign your functions to this:
this.generatePipes = () => {...}
this.gameLoop = () => {...}
this.update = () => {...}
this.draw = () => {...}

Categories