I'm using three.js to create a minecraft texture editor, similar to this. I'm just trying to get the basic click-and-paint functionality down, but I can't seem to figure it out. I currently have textures for each face of each cube and apply them by making shader materials with the following functions.
this.createBodyShaderTexture = function(part, update)
{
sides = ['left', 'right', 'top', 'bottom', 'front', 'back'];
images = [];
for (i = 0; i < sides.length; i++)
{
images[i] = 'img/'+part+'/'+sides[i]+'.png';
}
texCube = new THREE.ImageUtils.loadTextureCube(images);
texCube.magFilter = THREE.NearestFilter;
texCube.minFilter = THREE.LinearMipMapLinearFilter;
if (update)
{
texCube.needsUpdate = true;
console.log(texCube);
}
return texCube;
}
this.createBodyShaderMaterial = function(part, update)
{
shader = THREE.ShaderLib['cube'];
shader.uniforms['tCube'].value = this.createBodyShaderTexture(part, update);
shader.fragmentShader = document.getElementById("fshader").innerHTML;
shader.vertexShader = document.getElementById("vshader").innerHTML;
material = new THREE.ShaderMaterial({fragmentShader: shader.fragmentShader, vertexShader: shader.vertexShader, uniforms: shader.uniforms});
return material;
}
SkinApp.prototype.onClick =
function(event)
{
event.preventDefault();
this.change(); //makes texture file a simple red square for testing
this.avatar.remove(this.HEAD);
this.HEAD = new THREE.Mesh(new THREE.CubeGeometry(8, 8, 8), this.createBodyShaderMaterial('head', false));
this.HEAD.position.y = 10;
this.avatar.add(this.HEAD);
this.HEAD.material.needsUpdate = true;
this.HEAD.dynamic = true;
}
Then, when the user clicks any where on the mesh, the texture file itself is update using canvas. The update occurs, but the change isn't showing up in the browser unless the page is refreshed. I've found plenty of examples of how to change the texture image to a new file, but not on how to show changes in the same texture file during runtime, or even if it's possible. Is this possible, and if not what alternatives are there?
When you update a texture, whether its based on canvas, video or loaded externally, you need to set the following property on the texture to true:
If an object is created like this:
var canvas = document.createElement("canvas");
var canvasMap = new THREE.Texture(canvas)
var mat = new THREE.MeshPhongMaterial();
mat.map = canvasMap;
var mesh = new THREE.Mesh(geom,mat);
After the texture has been changed, set the following to true:
cube.material.map.needsUpdate = true;
And next time you render the scene it'll show the new texture.
Here is all the basics of what you have to know about "updating" stuff in three.js: https://threejs.org/docs/#manual/introduction/How-to-update-things
Related
I'm using Three.js in a website project for a construction company. They want some 360° photos (photospheres) that I made using my phone (Google pixel 5). They also want to show a 3D representation of one of their projects so using Three.js seems to be the best solution.
Here is what it looks like in Google Photos:
Google Photos screenshot
And what it looks like in Three.js:
Three.js screenshot
You can see the colors are really bad (contrast, white balance, idk) compared to the original version.
It's the first time I use Three.js so here is my code:
Scene:
async connectedCallback() {
// Scene
this.scene = new THREE.Scene();
// Background equirectangular texture
const background_img = this.getAttribute('background');
if (background_img) this.loadBackground(background_img);
// Camera
this.camera = new THREE.PerspectiveCamera(60, this.clientWidth / this.clientHeight, 1, 5000);
// Lights
// this.scene.add(new THREE.HemisphereLight(0xffeeb1, 0x080820, 0));
const spotLight = new THREE.SpotLight(0xffa95c, 5);
spotLight.position.set(20, 20, 20);
spotLight.castShadow = true;
spotLight.shadow.bias = -0.0001;
spotLight.shadow.mapSize.width = 1024 * 4;
spotLight.shadow.mapSize.height = 1024 * 4;
this.scene.add(spotLight);
// Renderer
this.renderer = new THREE.WebGLRenderer({ antialias: true });
this.renderer.toneMapping = THREE.ReinhardToneMapping;
this.renderer.toneMappingExposure = 2.3;
this.renderer.shadowMap.enabled = true;
this.renderer.setPixelRatio(devicePixelRatio);
this.renderer.setSize(this.clientWidth, this.clientHeight);
this.appendChild(this.renderer.domElement);
// Orbit controls
this.controls = new OrbitControls(this.camera, this.renderer.domElement);
this.controls.autoRotate = true;
// Resize event
addEventListener('resize', e => this.resize());
// Load model
const url = this.getAttribute('model');
if (url) this.loadModel(url);
// Animate
this.resize();
this.animate();
// Resize again in .1s
setTimeout(() => this.resize(), 100);
// Init observer
new IntersectionObserver(entries => this.classList.toggle('visible', entries[0].isIntersecting), { threshold: 0.1 }).observe(this);
}
Background (photosphere):
loadBackground(src) {
const equirectangular = new THREE.TextureLoader().load(src);
equirectangular.mapping = THREE.EquirectangularReflectionMapping;
// Things Github Copilot suggested, removing it does not change colors so I thing it's not the problem
equirectangular.magFilter = THREE.LinearFilter;
equirectangular.minFilter = THREE.LinearMipMapLinearFilter;
equirectangular.format = THREE.RGBFormat;
equirectangular.encoding = THREE.sRGBEncoding;
equirectangular.anisotropy = 16;
this.scene.background = equirectangular;
}
As Marquizzo said in a comment:
You’re changing the renderer’s tone mapping and the exposure. It’s normal for the results to be different when you make modifications to the color output.
I kept saying the problem is not the lines Github Copilot suggested because I got the same result when removing those but I tried removing one line at a time and it turns out that this line was the problem:
equirectangular.format = THREE.RGBFormat;
Now my result is a bit dark compared to Google Photos's one but the colors are finally accurate! I still have a lot of things to learn haha.
Three.js screenshot after removing the problematic line
Thank you Marquizzo!
I'm using THREE.OBJLoader to load my 3D objects in Three.js r100 and, at a certain moment, I need to load some dynamic textures with THREE.TextureLoader().
Then I simply create a new material with THREE.MeshBasicMaterial() and set this to my obj.
This is the code:
//this contains the texture loaded
let texture = await new Promise((resolve, rejects) => loadGeneralTexture(resolve, rejects, url));
texture.minFilter = THREE.LinearFilter;
texture.needsUpdate = true;
texture.wrapS = texture.wrapT = THREE.RepeatWrapping;
var material = new THREE.MeshBasicMaterial({
map: texture
});
//this loop set the new material with Texture
el.traverse(child => {
if (child instanceof THREE.Mesh) {
child.material = material;
}
});
The result is:
But the image loaded is:
I fix this error only with gizmo (3dMax tool) by "rotate" the texture but I can't do the same fix with Threejs.
The same problem with two other 3d objects but this time it's even worse
Edit: obj files are our client'file (so i didn't create It myself), i already checked the various "faces" and they are equal. Can i change uvmapping myself with threejs?
I am completely new to Three.JS and cant seem to get my head around getting a skybox to appear in my scene. I'm not getting any errors from my code, which has left me stumped. Any help would be greatly appreciated.
function createSky(){
var imageList = "CubeMap"
THREE.ImageUtils.crossOrigin = '';
var faces = ["HDR0001",
"HDR0002",
"HDR0003",
"HDR0004",
"HDR0005"];
var imgType = ".jpg";
var skyGeo = new THREE.CubeGeometry (500, 500, 500);
var matFacesArray = [];
for (var i = 0; i < 6; i++) {
matFacesArray.push( new THREE.MeshBasicMaterial({
map: THREE.ImageUtils.loadTexture( imageList + faces[i] + imgType ),
side: THREE.BackSide
}));
}
var sky = new THREE.MeshFaceMaterial ( matFacesArray );
var skyBox = new THREE.Mesh ( skyGeo, sky );
scene.add ( skyBox );
}
Where are you watching your log for warnings/errors?
Seems strange you are not getting any feedback since THREE.ImageUtils.loadTexture is deprecated, use TextureLoader.load instead, this warning should show up if you are using a somewhat later version of three js.
Also, what browser are you using? I find that Firefox is usually a bit more generous with showing textures than Chrome for example. This has to do with cross-origin image load and can be a problem when you try to run your code locally.
im currently trying to update individual textures based on cursor position on imported JSON model. Below is the import code for the JSON model
var loader = new THREE.JSONLoader();
loader.load('slicedNew.json', function(geometry, materials) {
console.log(materials);
mesh = new THREE.Mesh(geometry, new THREE.MeshFaceMaterial( materials ));
mesh.scale.x = mesh.scale.y = mesh.scale.z = 8;
mesh.translation = THREE.GeometryUtils.center(geometry);
scene.add(mesh);
console.log('IMPORTED OBJECT: ', mesh);
});
Below is raycaster code for when the cursor is over a particular material
switch(intersects[0].face.materialIndex)
{
case 0:
console.log('0 material index');
intersects[0].object.material.needsUpdate = true;
intersects[0].object.material.materials[0] = new THREE.MeshLambertMaterial({
map: crate
});
break;
Anytime I hover over a certain side of the shape the texture is loaded but it is always black, even initialise the model to use the texture it still appears as black, yet I can load in a simple cube and map the image as a texture to this shape with no issues.
Any help would be appreciated.
I'm using the pixi.js render engine for my current project in javascript. I'm loading a spritesheet defined in json, using the assetloader. Problem is I need to create sprites or movieclips well after the onComplete event that the assetloader uses, finishes. However the texture cache seems to not be accessible after that point. Here is some of the code below demonstrating the problem I'm coming across.
var spriteSheet = [ "test.json" ];
loader = new PIXI.AssetLoader(spriteSheet); // only using the flappy bird sprite sheet as a test
loader.onComplete = OnAssetsLoaded;
loader.load();
function OnAssetsLoaded() {
var sprite = PIXI.Sprite.fromFrame("someFrame.png"); //this works
}
var sprite2 = PIXI.Sprite.fromFrame("someFrame2.png"); //This does not work, tells me "someFrame2" is not in the texture cache
The sprite sheet must be fully loaded before the images get stored in the cache. Once the Sprite sheet has loaded, those assets will exist in the cache until you delete them.
The reason your code above fails is because the line var sprite2... executes before the sprite sheet has finished loading.
This example will continuously add a new Sprite to the stage every second.
//build stage
var stage = new PIXI.Stage(0x66FF99);
var renderer = PIXI.autoDetectRenderer(400, 300);
document.body.appendChild(renderer.view);
//update renderer
requestAnimFrame( animate );
function animate() {
requestAnimFrame( animate );
renderer.render(stage);
}
//Flag will prevent Sprites from being created until the Sprite sheet is loaded.
var assetsReadyFlag = false;
//load sprite sheet
var loader = new PIXI.AssetLoader([ "test.json" ]);
loader.onComplete = function(){
assetsReadyFlag = true;
};
loader.load();
//Add a new bird every second
setInterval( addBird, 1000);
function addBird()
{
//assets must be loaded before creating sprites
if(!assetsReadyFlag) return;
//create Sprite
var bird = PIXI.Sprite.fromFrame("bird.png");
bird.anchor.x = bird.anchor.y = 0.5;
bird.x = Math.random() * stage.width;
bird.y = Math.random() * height;
stage.addChild(bird);
};