Loading 3D model with animation using CreateFromMorphTargetSequence Three.js - javascript

I'm trying to load this GBL file and play the animation. This code returns the following error:
TypeError: Cannot read property 'length' of undefined at Function.CreateFromMorphTargetSequence
function Animate() {
if(window.anim_flag)
{
// Hotspot_Ring_Anim();
requestAnimationFrame(Animate);
renderer.clear();
TWEEN.update();
orbit.update();
if (mixer.length > 0) {
var delta = clock.getDelta();
for (var i = 0; i < mixer.length; i++) {
mixer[i].update(delta);
}
}
renderer.render(scene, camera);
}
}
function Add_Hotspot_Rings(id,px,py,pz,rx,ry,rz,sx,sy,sz) {
const loader = new GLTFLoader();
// Optional: Provide a DRACOLoader instance to decode compressed mesh data
const dracoLoader = new DRACOLoader();
dracoLoader.setDecoderPath( '../jsm/draco/' );
loader.setDRACOLoader( dracoLoader );
loader.load( '../Models/ABB_Clinic_AnimatedRings_Lowpoly_02.glb', function ( gltf ) {
const model = gltf.scene;
model.name = 'hotspot_rings';
model.position.set(px,py,pz);
model.rotation.set(0,ry,rz);
model.scale.set(0.90, 0.3, 0.90);
scene.add(model);
// MORPH
const mixerr = new THREE.AnimationMixer( model );
const clips = model.animations;
const morphClip = THREE.AnimationClip.CreateFromMorphTargetSequence( 'RingsRising', model.morphTargets );
mixerr.clipAction(morphClip).setDuration(1).play();
mixer.push(mixerr);
window.anim_flag= true;
Animate();
}, undefined, function ( error ) {
console.error( error );
} );
}
How can I resolve this error and load the model with the animation playing?

const morphClip = THREE.AnimationClip.CreateFromMorphTargetSequence( 'RingsRising', model.morphTargets );
Instances of Object3D do not have a morphTargets property.
Playing an animation from a glTF asset should look like so:
const animations = gltf.animations;
const mixer = new THREE.AnimationMixer( model );
mixer.clipAction( animations[ 0 ] ).setDuration( 1 ).play();
Live example: https://threejs.org/examples/webgl_morphtargets_horse

Related

Raycasting not working on imported OBJ model,but working on object created with the program

I am new to three.js, and i am trying to use raycasting, i am quite confused with how raycasting would work on imported 3d models. Here i imported an obj model, and when i try to detect whether i touch with the imported 3D model, the funciton is not working as i expect. But when i change it to detect a box object i create with three.js, is working as i expected, can someone help with this problem?
import * as THREE from 'https://unpkg.com/three#0.126.1/build/three.module.js';
import { OrbitControls } from 'https://unpkg.com/three#0.126.1/examples/jsm/controls/OrbitControls.js';
import {OBJLoader} from "https://unpkg.com/three#0.126.1/examples/jsm/loaders/OBJLoader.js";
import {MTLLoader} from "https://unpkg.com/three#0.126.1/examples/jsm/loaders/MTLLoader.js";
import * as GUI from "https://unpkg.com/dat.gui#0.7.7/build/dat.gui.module.js";
//Set Up Enviroment for 3D model
const renderer = new THREE.WebGLRenderer();
renderer.setSize(window.innerWidth,window.innerHeight);
document.body.appendChild(renderer.domElement);
const scene = new THREE.Scene();
//Set up Lighting
var keyLight = new THREE.DirectionalLight(new THREE.Color('hsl(30,100%,75%)'),1.0);
var fillLight = new THREE.DirectionalLight(new THREE.Color('hsl(240,100%,75%)'),0.75);
var backLight = new THREE.DirectionalLight(0xffffff,1.0);
keyLight.position.set(-100,0,100);
fillLight.position.set(100,0,100);
backLight.position.set(100,0,-100).normalize();
scene.add(keyLight);
scene.add(fillLight);
scene.add(backLight);
//Set up Camera
const camera = new THREE.PerspectiveCamera(
75,
window.innerWidth/window.innerHeight,
0.1,
1000
);
const orbit = new OrbitControls(camera,renderer.domElement);
camera.position.set(0,2,7.5);
orbit.update();
//Set up base
const planeGeometry = new THREE.PlaneGeometry(100,100);
const planeMaterial = new THREE.MeshBasicMaterial({
color: 0xffffff,
wireframe: false,
side: THREE.DoubleSide
});
var plane = new THREE.Mesh(planeGeometry,planeMaterial);
scene.add(plane);
plane.rotation.x = -0.5 * Math.PI;
//Model for testing
const boxGeometry = new THREE.BoxGeometry(10,10,10);
const boxMaterial = new THREE.MeshBasicMaterial({color: 0x0000ff})
var box = new THREE.Mesh(boxGeometry,boxMaterial);
scene.add(box);
box.position.x += 20;
//Set up GUI for controlling base
const gridHelper = new THREE.GridHelper(100);
scene.add(gridHelper);
const gui = new GUI.GUI();
const options = {
planeColor: '#ffea00',
wireframe: false
};
gui.addColor(options,'planeColor').onChange(function(e){
plane.material.color.set(e);
});
gui.add(options,'wireframe').onChange(function(e){
plane.material.wireframe = e;
});
//Set up Background image
const textureLoader = new THREE.TextureLoader();
textureLoader.load(
'../img/doge.jpg',
function ( texture ) {
scene.background = texture;
},
undefined,
function ( err ) {
console.error( 'An error happened.' );
}
);
//Import obj and mtl file to build 3D model
var building;
const mtlLoader = new MTLLoader();
mtlLoader.load(
'../source/building_04.mtl',
(materials)=>{
materials.preload();
console.log(materials);
const objLoader = new OBJLoader()
objLoader.setMaterials(materials)
objLoader.load(
'../source/building_04.obj',
(object)=>{
scene.add(object)
building = object;
object.position.y +=1;
},
(xhr) => {
console.log((xhr.loaded / xhr.total) * 100 + '% loaded')
},
(error) => {
console.log("Object error")
}
)
},
(xhr) => {
console.log((xhr.loaded / xhr.total) * 100 + '% loaded')
},
(error) => {
console.log("Material Eror")
}
)
//Set up raycasting environment
const raycaster = new THREE.Raycaster();
const mousePosition = new THREE.Vector2();
window.addEventListener('mousemove', function(e){
mousePosition.x = (e.clientX/this.window.innerWidth)*2 - 1;
mousePosition.y = -(e.clientY/this.window.innerHeight)*2 + 1;
});
//Function making sure everything to be up to date
function animate(){
//Raycasting
raycaster.setFromCamera( mousePosition, camera );
const intersects = raycaster.intersectObjects( scene.children,true);
if(intersects.length > 0){
for ( let i = 0; i < intersects.length; i ++ ) {
if(building != undefined){
if(intersects[0].object.id === building.id){
console.log("Touched!");
}
else{
console.log("did not touch!");
}
}
else{
console.log("Not reeady!");
}
console.log(intersects[i].object.id);
}
}
else{
console.log("did not touched")
}
console.log("finished")
renderer.render(scene,camera);
}
renderer.setAnimationLoop(animate);
//Make the screen to resize following the browser size
window.addEventListener('resize',function(){
camera.aspect = window.innerWidth/this.window.innerHeight;
camera.updateProjectionMatrix();
renderer.setSize(window.innerWidth,window.innerHeight);
});
The problem is that the object you get from OBJLoader and assign to your building variable is an instance of THREE.Group. Groups do not have an own geometry or material. They are more or less containers holding other 3D objects (like meshes, point clouds, lines or other groups) and representing them as a logical group.
That means they are never the result of a raycasting evaluation which means if(intersects[0].object.id === building.id) always fails.
I suggest you do this after the loading process which tags your objects:
building.traverse( function( object ) {
object.userData.tag = 'building';
} );
You can then use the tag property in your raycasting routine for detecting the building.

Access parts of a GLTF import in the main rendering loop (sorry if noobish)

I’m a Unity developer, trying to learn Three.js.
One the many problems I encounter may sound simple, but it’s a pain in the a** for me.
All I wanna do is to import and animate a 3D logo in my Three.js app.
This logo is made out of 4 different meshes (elem1 to elem4) which don’t overlap.
It was exported as a FBX, then converted to GLTF using an online converter.
No problem when importing it, resizing it and even changing its material.
My problem is : how to refer to the whole object, and also to its 4 elements, in order to animate them in my « animate » function (I mean in my main rendering loop) ?
The only thing I could do was to create a second « animate » function within the loader callback, which seems a bit weird to me.
I can’t find a way to refer to them in the main scope of my app.
Dumping my GLTF import gives this hierarchy (these are called « nodes », am I right ?) :
AuxScene [Scene]
*no-name* [Object3D]
elem1 [Mesh]
elem2 [Mesh]
elem3 [Mesh]
elem4 [Mesh]
Here’s my code, cleared from unnecessary stuff :
'use strict';
// CANVAS AND RENDERER
const canvas = document.querySelector('#myCanvas');
const renderer = new THREE.WebGLRenderer({ canvas, antialias: true });
renderer.setSize( window.innerWidth, window.innerHeight );
// SCENE AND BACKGROUND
var scene = new THREE.Scene();
const loader = new THREE.TextureLoader();
scene.background = loader.load( 'images/background.jpg');
// CAMERA
var camera = new THREE.PerspectiveCamera( 75, window.innerWidth / window.innerHeight, 0.1, 1000 );
camera.position.z = 100;
// MATERIALS
var material = new THREE.MeshNormalMaterial ();
// ---------------------------------- LOGO IMPORTATION
var gltfLoader = new THREE.GLTFLoader();
var root;
var elem1, elem2, elem3, elem4;
gltfLoader.load('gltf/logo.gltf', function(gltf) {
root = gltf.scene;
root.rotation.x = Math.PI / 2;
scene.add(root);
root.traverse( function( child ) {
if ( child instanceof THREE.Mesh ) { child.material = material; }
} );
elem1 = root.getObjectByName('element1');
elem2 = root.getObjectByName('element2');
elem3 = root.getObjectByName('element3');
elem4 = root.getObjectByName('element4');
console.log(dumpObject(root).join('\n'));
// logo animations
var speed = 0.0005;
var turnsBeforeStop = 4;
requestAnimationFrame( animate2 );
function animate2( time ) {
root.rotation.z = Math.sin (time * 0.0005) * 0.5;
root.rotation.x = Math.PI/3 + Math.sin(time * 0.0003) * 0.5;
if(elem1.rotation.y < Math.PI * turnsBeforeStop){
elem1.rotation.y = time * speed*2;
elem2.rotation.z = time * speed*2;
elem3.rotation.y = time * -speed;
elem4.rotation.z = time * -speed*2;
}
requestAnimationFrame( animate2 );
}
});
// ------------------------------------------------------------ END LOGO
renderer.render( scene, camera );
requestAnimationFrame( animate );
// ANIMATION MAIN LOOP
function animate( time ) {
/*
This is where I would like to access my logo (as a whole, and also its separate parts).
But root.rotation or elem1.rotation won't work here and give me this error :
TypeError: undefined is not an object (evaluating 'elem1.rotation')
*/
renderer.render( scene, camera );
requestAnimationFrame( animate );
}
// OBJECT DUMPING
function dumpObject(obj, lines = [], isLast = true, prefix = ' ') {
const localPrefix = isLast ? '└─' : '├─';
lines.push(`${prefix}${prefix ? localPrefix : ''}${obj.name || '*no-name*'} [${obj.type}]`);
const newPrefix = prefix + (isLast ? ' ' : '│ ');
const lastNdx = obj.children.length - 1;
obj.children.forEach((child, ndx) => {
const isLast = ndx === lastNdx;
dumpObject(child, lines, isLast, newPrefix);
});
return lines;
}
Thanx for any help.
The problem is that you try to access root before a value (the glTF model) is assigned to it. Notice that GLTFLoader.load() is asynchronous. So the onLoad() callback which sets root is not immediately called but with some amount of delay.
There are several approaches to solve this issue. You can check in your animation loop if root is not undefined. It would look like so:
function animate( time ) {
requestAnimationFrame( animate );
if ( root ) {
// do something with root
}
renderer.render( scene, camera );
}
Or you start animating after onLoad() has finished. In this case, the code would look like so:
'use strict';
// CANVAS AND RENDERER
const canvas = document.querySelector('#myCanvas');
const renderer = new THREE.WebGLRenderer({ canvas, antialias: true });
renderer.setSize( window.innerWidth, window.innerHeight );
// SCENE AND BACKGROUND
var scene = new THREE.Scene();
const loader = new THREE.TextureLoader();
scene.background = loader.load( 'images/background.jpg');
// CAMERA
var camera = new THREE.PerspectiveCamera( 75, window.innerWidth / window.innerHeight, 0.1, 1000 );
camera.position.z = 100;
// MATERIALS
var material = new THREE.MeshNormalMaterial ();
// ---------------------------------- LOGO IMPORTATION
var gltfLoader = new THREE.GLTFLoader();
var root;
var elem1, elem2, elem3, elem4;
gltfLoader.load('gltf/logo.gltf', function(gltf) {
root = gltf.scene;
root.rotation.x = Math.PI / 2;
scene.add(root);
root.traverse( function( child ) {
if ( child instanceof THREE.Mesh ) { child.material = material; }
} );
animate(); // start animating
});
// ------------------------------------------------------------ END LOGO
// ANIMATION MAIN LOOP
function animate() {
requestAnimationFrame( animate );
// do something with root
renderer.render( scene, camera );
}
// OBJECT DUMPING
function dumpObject(obj, lines = [], isLast = true, prefix = ' ') {
const localPrefix = isLast ? '└─' : '├─';
lines.push(`${prefix}${prefix ? localPrefix : ''}${obj.name || '*no-name*'} [${obj.type}]`);
const newPrefix = prefix + (isLast ? ' ' : '│ ');
const lastNdx = obj.children.length - 1;
obj.children.forEach((child, ndx) => {
const isLast = ndx === lastNdx;
dumpObject(child, lines, isLast, newPrefix);
});
return lines;
}

Three.js animation mixer not playing?

Ok I have an fbx successfully loaded into Three.js exported from blender with all animation boxes checked. Ive tried all export settings from blender so that's not it.
This is how I bring the model in, and I am able to accurately determine whether the fbx has animations or not:
var geoFromScene = new THREE.Geometry();
var FBXLoader = require('three-fbx-loader');
var loader = new FBXLoader();
loader.load(string, function ( object ) {
object.traverse( function ( child ) {
if ( child.isMesh ) {
child.castShadow = true;
child.receiveShadow = true;
geoFromScene = (new THREE.Geometry()).fromBufferGeometry(child.geometry);
}
} );
var theModel = new THREE.Mesh();
theModel.geometry = geoFromScene;
theModel.material = material;
theModel.position.set(5,5,-8);
//theModel.rotation.set(new THREE.Vector3( 0, MATH.pi/2, 0));
theModel.scale.set(0.1, 0.1, 0.1);
localThis.scene.add(theModel);
localThis.mixer = new THREE.AnimationMixer(theModel);
if(theModel.animations[0])
{
var action = localThis.mixer.clipAction(theModel.animations[0]);
action.play();
} else {
console.log("No animations");
}
} );
In update (which does work, because I can print the animation.time):
this.mixer.update(this.clock.getDelta());
And yet the model is just static. Whats wrong here?
UPDATE:
code from example copy pasted-
var geoFromScene = new THREE.Geometry();
var FBXLoader = require('three-fbx-loader');
var loader = new FBXLoader();
loader.load( string, function ( object ) {
localThis.mixer = new THREE.AnimationMixer( object );
var action = localThis.mixer.clipAction( object.animations[ 0 ] );
action.play();
object.traverse( function ( child ) {
if ( child.isMesh ) {
child.castShadow = true;
child.receiveShadow = true;
}
} );
object.position.set(5,5,-8)
object.scale.set(0.1, 0.1, 0.1);
localThis.scene.add( object );
} );
in animate-
this.mixer.update(this.clock.getDelta());
All I get is the model armature it seems -
New approach:
var FBXLoader = require('wge-three-fbx-loader'); //https://www.npmjs.com/package/wge-three-fbx-loader
var loader = new FBXLoader();
loader.load( string, function ( object ) {

OBJLoader and MTLLoader aren't rendering png/texture in ThreeJS

I imported the 3D model that contains .obj .mtl and bunch of jpeg and pngs
trying to load the model with /images like this
But, I'm getting is only a black model like his
I wonder what I have missed as I followed the guidelines for using the two loaders.
here is my code.
//loader
var MTTLoader = new THREE.MTLLoader();
MTTLoader.setPath( '/assets/HotAirBalloonIridesium/' );
MTTLoader.load('Air_Balloon.mtl',(materials) => {
console.log(materials);
materials.preload()
var objLoader = new THREE.OBJLoader();
objLoader.load('/assets/HotAirBalloonIridesium/Air_Balloon.obj', (object) => {
console.log(materials)
objLoader.setMaterials(materials)
scene.add(object);
})
})
I wonder what i'm missing as my asset folder contains all the model files
try loading obj using
var loader = new THREE.OBJLoader( manager );
loader.load( 's_v1.obj', function ( object ) {
object.traverse( function ( child ) {
if ( child instanceof THREE.Mesh )
{
// child.material.map = texture2;
// child.material.specularMap=texture;
// child.material.map=texture;
}
} );
// object.position.x = - 60;
// object.rotation.x = 0; //20* Math.PI / 180;
// object.rotation.z = 0;//20* Math.PI / 180;
object.scale.x = 80;
object.scale.y = 80;
object.scale.z = 80;
obj = object
scene.add( obj );
animate(obj);
} );
okay quick update, there was nothing wrong with the loader but I was using the wrong lighting as Phong Material needed
var hemiLight = new THREE.HemisphereLight(0xffffff, 0xffffff, 0.50);
var dirLight = new THREE.DirectionalLight(0xffffff, 0.50);
to be apparent.
You must call "setMaterials" before load obj.
//loader
var MTTLoader = new THREE.MTLLoader();
MTTLoader.setPath( '/assets/HotAirBalloonIridesium/' );
MTTLoader.load('Air_Balloon.mtl',(materials) => {
console.log(materials);
materials.preload()
var objLoader = new THREE.OBJLoader();
objLoader.setMaterials(materials); // "setMaterials" must before "load"
objLoader.load('/assets/HotAirBalloonIridesium/Air_Balloon.obj', (object) => {
console.log(materials)
scene.add(object);
})
})

Three.js Blender Animation Mic Frequency Trigger Issue

I am attempting to import a multiple layer animation from Blender exported using the three.js plugin into JavaScript and have the different animations triggered by microphone frequency input. I have been able to combine the playing of the animations via buttons and a .obj being rotated depending on the frequency from mic input. But I am struggling to assign the mic to the animation.
<div class="info">
<button onclick="fadeAction( 'idle' );">idle</button>
<button onclick="fadeAction( 'run' );">run</button>
<button onclick="fadeAction( 'wrap' );">wrap</button>
</div>
loader.load( 'tentacleanimation6.json', function( geometry, materials ) {
materials.forEach( function ( material ) {
material.skinning = true;
} );
mesh = new THREE.SkinnedMesh(
geometry,
new THREE.MeshFaceMaterial( materials )
);
mixer = new THREE.AnimationMixer( mesh );
action.idle = mixer.clipAction( geometry.animations[ 1 ] );
action.run = mixer.clipAction( geometry.animations[ 0 ] );
action.wrap = mixer.clipAction( geometry.animations[ 2 ] );
action.idle.setEffectiveWeight( 1 );
action.run.setEffectiveWeight( 1 );
action.wrap.setEffectiveWeight( 1 );
action.idle.play();
mesh.position.y = 0;
mesh.position.x = -3;
scene.add( mesh );
} );
fadeAction = function () {
var activeActionName = 'idle';
return function ( name ) {
var from = action[ activeActionName ].play();
var to = action[ name ].play();
from.enabled = true;
to.enabled = true;
from.crossFadeTo( to, .3 );
activeActionName = name;
}
}();
Then the mic input from getUserMedia is processed as followed triggering a .obj file to rotate:
script_processor_analysis_node.onaudioprocess = function() {
analyser_node.getByteFrequencyData(array_freq_domain);
if (microphone_stream.playbackState == microphone_stream.PLAYING_STATE) {
var render = function () {
for ( var i = 0; i < (array_freq_domain.length); i++ ){
var valueone = array_freq_domain[array_freq_domain.length / 2];
mouthone.rotation.x = deg2rad( valueone ) / 7;
var valuetwo = array_freq_domain[6];
mouthtwo.rotation.x = deg2rad( valuetwo ) / 7;
}
renderer.render(scene, camera);
}
requestAnimationFrame( render );
}
}
The issue becomes when the two are combined and I am not sure what needs to be called in the onaudioprocess section to trigger an animation to play? Also there seems to be an issue with rendering, as the function for onaudioprocess needs to be rendered as above and the animation needs to be rendered with the following and when these are combined they cancel each other out:
;( function update () {
requestAnimationFrame( update );
var delta = clock.getDelta();
var theta = clock.getElapsedTime();
if ( mixer ) { mixer.update( delta ); }
renderer.render( scene, camera );
} )();

Categories