Three.js / WebGL How to mirror one side of a texture - javascript

I'm basically trying to achieve a kaleidoscopic effect with just one side, but I'm working with lots of Points, so I'd like that to happen in the shader. However if there's a Threejs trick that mirrors half of the texture or the Points object, that would be great. I tried to apply transformation matrices but I can't get it to work.
I found an old KaleidoShader that requires the usage of EffectComposer, but I'd like to implement it manually myself (without EffectComposer) and I'm struggling to do so. I'm using an FBO and I tried adding the code from that shader in both my simulation and render shaders but it's having no effect at all. Do I have to add yet another FBO texture or is it possibile to do those calculations in one of the existing shaders?
For visual reference https://ma-hub.imgix.net/wp-images/2019/01/23205110/premiere-pro-mirror-effect.jpg
I've spent so much time without getting to the bottom of this, hopefully someone can point me in the right direction.
Thanks

I just followed this article
Pasting in the code from that repo seems to work
body {
margin: 0;
}
#c {
width: 100vw;
height: 100vh;
display: block;
}
<canvas id="c"></canvas>
<script type="module">
import * as THREE from 'https://threejsfundamentals.org/threejs/resources/threejs/r115/build/three.module.js';
import {EffectComposer} from 'https://threejsfundamentals.org/threejs/resources/threejs/r115/examples/jsm/postprocessing/EffectComposer.js';
import {RenderPass} from 'https://threejsfundamentals.org/threejs/resources/threejs/r115/examples/jsm/postprocessing/RenderPass.js';
import {ShaderPass} from 'https://threejsfundamentals.org/threejs/resources/threejs/r115/examples/jsm/postprocessing/ShaderPass.js';
import {GUI} from 'https://threejsfundamentals.org/threejs/../3rdparty/dat.gui.module.js';
function main() {
const canvas = document.querySelector('#c');
const renderer = new THREE.WebGLRenderer({canvas});
const fov = 75;
const aspect = 2; // the canvas default
const near = 0.1;
const far = 5;
const camera = new THREE.PerspectiveCamera(fov, aspect, near, far);
camera.position.z = 2;
const scene = new THREE.Scene();
{
const color = 0xFFFFFF;
const intensity = 2;
const light = new THREE.DirectionalLight(color, intensity);
light.position.set(-1, 2, 4);
scene.add(light);
}
const boxWidth = 1;
const boxHeight = 1;
const boxDepth = 1;
const geometry = new THREE.BoxGeometry(boxWidth, boxHeight, boxDepth);
function makeInstance(geometry, color, x) {
const material = new THREE.MeshPhongMaterial({color});
const cube = new THREE.Mesh(geometry, material);
scene.add(cube);
cube.position.x = x;
return cube;
}
const cubes = [
makeInstance(geometry, 0x44aa88, 0),
makeInstance(geometry, 0x8844aa, -2),
makeInstance(geometry, 0xaa8844, 2),
];
const composer = new EffectComposer(renderer);
composer.addPass(new RenderPass(scene, camera));
// from:
// https://github.com/mistic100/three.js-examples/blob/master/LICENSE
const kaleidoscopeShader = {
uniforms: {
"tDiffuse": { value: null },
"sides": { value: 6.0 },
"angle": { value: 0.0 }
},
vertexShader: `
varying vec2 vUv;
void main() {
vUv = uv;
gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );
}
`,
fragmentShader: `
uniform sampler2D tDiffuse;
uniform float sides;
uniform float angle;
varying vec2 vUv;
void main() {
vec2 p = vUv - 0.5;
float r = length(p);
float a = atan(p.y, p.x) + angle;
float tau = 2. * 3.1416 ;
a = mod(a, tau/sides);
a = abs(a - tau/sides/2.) ;
p = r * vec2(cos(a), sin(a));
vec4 color = texture2D(tDiffuse, p + 0.5);
gl_FragColor = color;
}
`
};
const kaleidoscopePass = new ShaderPass(kaleidoscopeShader);
kaleidoscopePass.renderToScreen = true;
composer.addPass(kaleidoscopePass);
function resizeRendererToDisplaySize(renderer) {
const canvas = renderer.domElement;
const width = canvas.clientWidth;
const height = canvas.clientHeight;
const needResize = canvas.width !== width || canvas.height !== height;
if (needResize) {
renderer.setSize(width, height, false);
}
return needResize;
}
const gui = new GUI();
gui.add(kaleidoscopePass.uniforms.sides, 'value', 0, 20).name('sides');
gui.add(kaleidoscopePass.uniforms.angle, 'value', 0, 6.28, 0.01).name('angle');
let then = 0;
function render(now) {
now *= 0.001; // convert to seconds
const deltaTime = now - then;
then = now;
if (resizeRendererToDisplaySize(renderer)) {
const canvas = renderer.domElement;
camera.aspect = canvas.clientWidth / canvas.clientHeight;
camera.updateProjectionMatrix();
composer.setSize(canvas.width, canvas.height);
}
cubes.forEach((cube, ndx) => {
const speed = 1 + ndx * .1;
const rot = now * speed;
cube.rotation.x = rot;
cube.rotation.y = rot;
});
composer.render(deltaTime);
requestAnimationFrame(render);
}
requestAnimationFrame(render);
}
main();
</script>

There is a texture wrap mode that does mirroring.
texture.wrapS = texture.wrapT = THREE.MirroredRepeatWrapping
Does that help?
edit: Here's an example showing mirroredrepeatwrapping on both axis:
https://glitch.com/~three-mirroredrepeatwrapping

Related

Progressively reveal circle in loop

I currently have this: Codepen (embedded snippet below).
I would like to be able to change the color progressively after the scanline, something like this:
After searching online and trying to find something in three.js / WebGL I failed to procure what I wanted, probably because I don't quite know what I should search for.
Could you help me with a solution or pointing me in the right direction?
I have considered the following possibilities:
Having a second green circle and a dynamic mask that reveals it after the scanline.
How to create a mask in three.js that can show a slice of an increasing angle θ?
CircleGeometry has parameters to create a slice with angle θ. But constantly changing the geometry of my mesh doesn't sound very smart.
Adding tiny circle slices after the scanline passes so it creates the impression of revealing a circle but it's actually just adding tiny slices.
P.S. - I am using three.js because later there will be 3d elements to this project.
const scene = new THREE.Scene();
const camera = new THREE.PerspectiveCamera(75, window.innerWidth / window.innerHeight, 0.1, 1000);
const renderer = new THREE.WebGLRenderer();
renderer.setSize(window.innerWidth, window.innerHeight);
document.body.appendChild(renderer.domElement);
const innerRadius = 1;
const outerRadius = innerRadius*2;
const barLenght = innerRadius;
// create scanline
const outerMaterial = new THREE.MeshBasicMaterial({color: 0x34ebd2});
const outerCircle = new THREE.Mesh(new THREE.CircleGeometry(outerRadius, 60), outerMaterial);
scene.add(outerCircle);
// Create innerCircle
const innerMaterial = new THREE.MeshBasicMaterial({color: 0x0000ff});
const innerCircle = new THREE.Mesh(new THREE.CircleGeometry(innerRadius, 60), innerMaterial);
scene.add(innerCircle);
// create static line
const staticLine = new THREE.Mesh(new THREE.PlaneGeometry(0.05, barLenght), new THREE.MeshBasicMaterial({color: 0xff0000, side: THREE.DoubleSide}));
scene.add(staticLine);
// create scan line
const scanLine = new THREE.Mesh(new THREE.PlaneGeometry(0.05, barLenght), new THREE.MeshBasicMaterial({color: 0xff0000, side: THREE.DoubleSide}));
scene.add(scanLine);
// position static line
staticLine.position.y = innerRadius + barLenght/2;
// position scan line
scanLine.position.y = innerRadius + barLenght/2;
// create pivot to rotate dateline
const pivot = new THREE.Group();
pivot.position.set( 0.0, 0.0, 0 );
pivot.add(scanLine);
scene.add(pivot);
camera.position.z = 5;
function animate() {
requestAnimationFrame(animate);
renderer.render(scene, camera);
pivot.rotation.z -= 0.005;
}
animate();
<script src="https://cdnjs.cloudflare.com/ajax/libs/three.js/r128/three.min.js"></script>
This is a 2-in-1 example (fragment and vertex shader implementations) of progressive arcs, just from the scratch. Use it as a starting point.
body{
overflow: hidden;
margin: 0;
}
<script type="module">
import * as THREE from "https://cdn.skypack.dev/three#0.132.2";
import {OrbitControls} from "https://cdn.skypack.dev/three#0.132.2/examples/jsm/controls/OrbitControls.js";
let scene = new THREE.Scene();
let camera = new THREE.PerspectiveCamera(60, innerWidth / innerHeight, 1, 1000);
camera.position.set(-5, 3, 8);
let renderer = new THREE.WebGLRenderer();
renderer.setSize(innerWidth, innerHeight);
document.body.appendChild(renderer.domElement);
let controls = new OrbitControls(camera, renderer.domElement);
// fragment shader option
let g = new THREE.CircleGeometry(5, 64);
let m = new THREE.MeshBasicMaterial({
color: 0x7f7f7f,
side: THREE.DoubleSide,
onBeforeCompile: shader => {
shader.uniforms.time = m.userData.uniforms.time;
shader.uniforms.currColor = m.userData.uniforms.currColor;
shader.uniforms.prevColor = m.userData.uniforms.prevColor;
shader.fragmentShader = `
uniform float time;
uniform vec3 currColor;
uniform vec3 prevColor;
${shader.fragmentShader}
`.replace(
`#include <color_fragment>`,
`#include <color_fragment>
vec2 cUv = vUv - 0.5;
float dist = length(cUv);
vec3 col = prevColor;
float ang = mod(atan(cUv.y, cUv.x) + PI * 3.5, PI2);
float aRatio = 1. - ang / PI2;
float slice = 1. - step(time, aRatio);
col = mix(prevColor, currColor, slice);
float innerCirc = 1. - step(0.25, dist);
col = mix(col, diffuseColor.rgb, innerCirc);
diffuseColor.rgb = col;
`
);
console.log(shader.fragmentShader);
}
})
m.defines = {
"USE_UV": " "
};
m.userData = {
uniforms: {
time: {
value: 0.5
},
currColor: {
value: new THREE.Color(0xff00ff)
},
prevColor: {
value: new THREE.Color(0x00ffff)
}
}
}
let o = new THREE.Mesh(g, m);
scene.add(o);
// vertex shader option
let g2 = new THREE.PlaneGeometry(1, 1, 180, 1);
let m2 = new THREE.MeshBasicMaterial({
color: 0xffff00,
wireframe: true,
onBeforeCompile: shader => {
shader.uniforms.rMin = m2.userData.uniforms.rMin;
shader.uniforms.rMax = m2.userData.uniforms.rMax;
shader.uniforms.arcRatio = m2.userData.uniforms.arcRatio;
shader.vertexShader = `
uniform float rMin;
uniform float rMax;
uniform float arcRatio;
mat2 rot(float a){return mat2(cos(a), -sin(a), sin(a), cos(a));}
${shader.vertexShader}
`.replace(
`#include <begin_vertex>`,
`#include <begin_vertex>
float rDiff = rMax - rMin;
float r = rMin + (rDiff * uv.y);
float ang = PI2 * uv.x * arcRatio;
transformed.xy = rot(ang) * vec2(0., r);
`
);
console.log(shader.vertexShader);
}
});
m2.userData = {
uniforms: {
rMin: {value: 2.5},
rMax: {value: 5},
arcRatio: {value: 0.25} // 0..1
}
}
let o2 = new THREE.Mesh(g2, m2);
o2.position.z = 2;
scene.add(o2);
let clock = new THREE.Clock();
window.addEventListener("resize", onResize);
renderer.setAnimationLoop(_ => {
let t = (clock.getElapsedTime() * 0.1) % 1;
m.userData.uniforms.time.value = t;
m2.userData.uniforms.arcRatio.value = t;
renderer.render(scene, camera);
})
function onResize(){
camera.aspect = innerWidth / innerHeight;
camera.updateProjectionMatrix();
renderer.setSize(innerWidth, innerHeight);
}
</script>

Three.js fundamentals, shadertoy example, how to transform the pattern along with the object?

Here's an example based on the second live example from https://threejsfundamentals.org/threejs/lessons/threejs-shadertoy.html:
html, body {
height: 100%;
margin: 0;
}
#c {
width: 100%;
height: 100%;
display: block;
}
<canvas id="c"></canvas>
<script type="module">
// Three.js - Shadertoy Basic
// from https://threejsfundamentals.org/threejs/threejs-shadertoy-basic.html
import * as THREE from 'https://unpkg.com/three#0.122.0/build/three.module.js';
function main() {
const canvas = document.querySelector('#c');
const renderer = new THREE.WebGLRenderer({canvas});
renderer.autoClearColor = false;
const camera = new THREE.PerspectiveCamera(
45, 16/9, 0.01, 1000
);
camera.position.z = 5
const scene = new THREE.Scene();
const plane = new THREE.PlaneBufferGeometry(2, 2);
const fragmentShader = `
#include <common>
uniform vec3 iResolution;
uniform float iTime;
// https://www.shadertoy.com/view/MtXSWj
float alternate(float p, float d){;
return sign(fract(p*d*.5)*2.-1.);
}
vec3 rainbow(float t){
return sin(t+vec3(0,.33,.66)*6.28)*.5+.5;
}
vec3 TwinDragon(vec2 p){
float time = fract(iTime*0.05)*20.;
//scaling
p = (p*2.-iResolution.xy)/iResolution.y*1.5;
//----------the fractal stuff---- ---THIS IS ANIMATIONS----(so remove them if you want)
p.y += alternate(p.x, 256. )/512. * clamp(time-16.,0.,2.)/2.;
p.x -= alternate(p.y, 128. )/256. * clamp(time-14.,0.,2.)/2.;
p.y += alternate(p.x, 64. )/128. * clamp(time-12.,0.,2.)/2.;
p.x -= alternate(p.y, 32. )/ 64. * clamp(time-10.,0.,2.)/2.;
p.y += alternate(p.x, 16. )/ 32. * clamp(time- 8.,0.,2.)/2.;
p.x -= alternate(p.y, 8. )/ 16. * clamp(time- 6.,0.,2.)/2.;
p.y += alternate(p.x, 4. )/ 8. * clamp(time- 4.,0.,2.)/2.;
p.x -= alternate(p.y, 2. )/ 4. * clamp(time- 2.,0.,2.)/2.;
// prettifying
vec2 block = ceil(p+.5); //index for blocks from which the fractal is shifted
vec3 color = rainbow(block.x*4.+block.y); //rainbow palette using block index as t
float dis = length(fract(p+.5)*2.-1.);//distance to middle of block
color *= .5+dis*.7; //using distance within block for some more pretty.
return color;
}
void mainImage( out vec4 fragColor, in vec2 fragCoord ){
vec2 d = vec2(.5,0);
//some antialiasing
vec3 col = (
TwinDragon(fragCoord+d.xy)+
TwinDragon(fragCoord-d.xy)+
TwinDragon(fragCoord+d.yx)+
TwinDragon(fragCoord-d.yx)
)*.25;
fragColor = vec4(col,1.);
}
void main() {
mainImage(gl_FragColor, gl_FragCoord.xy);
}
`;
const uniforms = {
iTime: { value: 0 },
iResolution: { value: new THREE.Vector3() },
};
const material = new THREE.ShaderMaterial({
fragmentShader,
uniforms,
side: THREE.DoubleSide
});
const mesh = new THREE.Mesh(plane, material)
scene.add(mesh);
function resizeRendererToDisplaySize(renderer) {
const canvas = renderer.domElement;
const width = canvas.clientWidth;
const height = canvas.clientHeight;
const needResize = canvas.width !== width || canvas.height !== height;
if (needResize) {
renderer.setSize(width, height, false);
}
return needResize;
}
function render(time) {
time *= 0.001; // convert to seconds
resizeRendererToDisplaySize(renderer);
const canvas = renderer.domElement;
uniforms.iResolution.value.set(canvas.width, canvas.height, 1);
uniforms.iTime.value = time;
mesh.rotation.y += 0.01
renderer.render(scene, camera);
requestAnimationFrame(render);
}
requestAnimationFrame(render);
}
main();
</script>
But as you see, when I change to PerspectiveCamera, and rotate the plane, the texture does not transform with the object.
What's the simplest way to modify the example to make the shader transform with the plane, so that the graphic appears to be on the surface of the plane instead of the plane appearing to be like a mask?
The answer is to pass the uv coordinates from the vertex shader to the fragment shader using a varying variable.
Basically we can replace the lines
const fragment = `
... clipped ...
void main() {
mainImage(gl_FragColor, gl_FragCoord.xy);
}
`;
const uniforms = {
iTime: { value: 0 },
iResolution: { value: new THREE.Vector3() },
};
const material = new THREE.ShaderMaterial({
fragmentShader,
with
const fragment = `
... clipped ...
varying vec2 vUv;
void main() {
mainImage(gl_FragColor, vUv * iResolution.xy);
}
`;
const vertexShader = `
varying vec2 vUv;
void main() {
vUv = uv;
gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );
}
`;
const uniforms = {
iTime: { value: 0 },
iResolution: { value: new THREE.Vector3() },
};
const material = new THREE.ShaderMaterial({
vertexShader,
fragmentShader,
and we get the following result:
html, body {
height: 100%;
margin: 0;
}
#c {
width: 100%;
height: 100%;
display: block;
}
<canvas id="c"></canvas>
<script type="module">
// Three.js - Shadertoy Basic
// from https://threejsfundamentals.org/threejs/threejs-shadertoy-basic.html
import * as THREE from 'https://unpkg.com/three#0.122.0/build/three.module.js';
function main() {
const canvas = document.querySelector('#c');
const renderer = new THREE.WebGLRenderer({canvas});
renderer.autoClearColor = false;
const camera = new THREE.PerspectiveCamera(
45, 16/9, 0.01, 1000
);
camera.position.z = 5
const scene = new THREE.Scene();
const plane = new THREE.PlaneBufferGeometry(2, 2);
const fragmentShader = `
#include <common>
uniform vec3 iResolution;
uniform float iTime;
// https://www.shadertoy.com/view/MtXSWj
float alternate(float p, float d){;
return sign(fract(p*d*.5)*2.-1.);
}
vec3 rainbow(float t){
return sin(t+vec3(0,.33,.66)*6.28)*.5+.5;
}
vec3 TwinDragon(vec2 p){
float time = fract(iTime*0.05)*20.;
//scaling
p = (p*2.-iResolution.xy)/iResolution.y*1.5;
//----------the fractal stuff---- ---THIS IS ANIMATIONS----(so remove them if you want)
p.y += alternate(p.x, 256. )/512. * clamp(time-16.,0.,2.)/2.;
p.x -= alternate(p.y, 128. )/256. * clamp(time-14.,0.,2.)/2.;
p.y += alternate(p.x, 64. )/128. * clamp(time-12.,0.,2.)/2.;
p.x -= alternate(p.y, 32. )/ 64. * clamp(time-10.,0.,2.)/2.;
p.y += alternate(p.x, 16. )/ 32. * clamp(time- 8.,0.,2.)/2.;
p.x -= alternate(p.y, 8. )/ 16. * clamp(time- 6.,0.,2.)/2.;
p.y += alternate(p.x, 4. )/ 8. * clamp(time- 4.,0.,2.)/2.;
p.x -= alternate(p.y, 2. )/ 4. * clamp(time- 2.,0.,2.)/2.;
// prettifying
vec2 block = ceil(p+.5); //index for blocks from which the fractal is shifted
vec3 color = rainbow(block.x*4.+block.y); //rainbow palette using block index as t
float dis = length(fract(p+.5)*2.-1.);//distance to middle of block
color *= .5+dis*.7; //using distance within block for some more pretty.
return color;
}
void mainImage( out vec4 fragColor, in vec2 fragCoord ){
vec2 d = vec2(.5,0);
//some antialiasing
vec3 col = (
TwinDragon(fragCoord+d.xy)+
TwinDragon(fragCoord-d.xy)+
TwinDragon(fragCoord+d.yx)+
TwinDragon(fragCoord-d.yx)
)*.25;
fragColor = vec4(col,1.);
}
varying vec2 vUv;
void main() {
mainImage(gl_FragColor, vUv * iResolution.xy);
}
`;
const vertexShader = `
varying vec2 vUv;
void main() {
vUv = uv;
gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );
}
`;
const uniforms = {
iTime: { value: 0 },
iResolution: { value: new THREE.Vector3() },
};
const material = new THREE.ShaderMaterial({
vertexShader,
fragmentShader,
uniforms,
side: THREE.DoubleSide
});
const mesh = new THREE.Mesh(plane, material)
scene.add(mesh);
function resizeRendererToDisplaySize(renderer) {
const canvas = renderer.domElement;
const width = canvas.clientWidth;
const height = canvas.clientHeight;
const needResize = canvas.width !== width || canvas.height !== height;
if (needResize) {
renderer.setSize(width, height, false);
}
return needResize;
}
function render(time) {
time *= 0.001; // convert to seconds
resizeRendererToDisplaySize(renderer);
const canvas = renderer.domElement;
uniforms.iResolution.value.set(canvas.width, canvas.height, 1);
uniforms.iTime.value = time;
mesh.rotation.y += 0.01
renderer.render(scene, camera);
requestAnimationFrame(render);
}
requestAnimationFrame(render);
}
main();
</script>

Apply color gradient to material on mesh - three.js

I have an STL file loaded into my scene with a single colour applied to a phong material
I'd like a way of applying two colours to this mesh's material with a gradient effect applied on the Z axis a like the example below.Gradient Vase]1
I have a feeling I may have to introduce shaders but I've not gotten this far with three.js.
Simple gradient shader, based on uvs:
var scene = new THREE.Scene();
var camera = new THREE.PerspectiveCamera(60, 1, 1, 1000);
camera.position.set(13, 25, 38);
camera.lookAt(scene.position);
var renderer = new THREE.WebGLRenderer({
antialias: true
});
var canvas = renderer.domElement
document.body.appendChild(canvas);
var controls = new THREE.OrbitControls(camera, renderer.domElement);
var geometry = new THREE.CylinderBufferGeometry(2, 5, 20, 32, 1, true);
var material = new THREE.ShaderMaterial({
uniforms: {
color1: {
value: new THREE.Color("red")
},
color2: {
value: new THREE.Color("purple")
}
},
vertexShader: `
varying vec2 vUv;
void main() {
vUv = uv;
gl_Position = projectionMatrix * modelViewMatrix * vec4(position,1.0);
}
`,
fragmentShader: `
uniform vec3 color1;
uniform vec3 color2;
varying vec2 vUv;
void main() {
gl_FragColor = vec4(mix(color1, color2, vUv.y), 1.0);
}
`,
wireframe: true
});
var mesh = new THREE.Mesh(geometry, material);
scene.add(mesh);
render();
function resize(renderer) {
const canvas = renderer.domElement;
const width = canvas.clientWidth;
const height = canvas.clientHeight;
const needResize = canvas.width !== width || canvas.height !== height;
if (needResize) {
renderer.setSize(width, height, false);
}
return needResize;
}
function render() {
if (resize(renderer)) {
camera.aspect = canvas.clientWidth / canvas.clientHeight;
camera.updateProjectionMatrix();
}
renderer.render(scene, camera);
requestAnimationFrame(render);
}
html,
body {
height: 100%;
margin: 0;
overflow: hidden;
}
canvas {
width: 100%;
height: 100%;
display;
block;
}
<script src="https://cdn.jsdelivr.net/npm/three#0.115.0/build/three.min.js"></script>
<script src="https://cdn.jsdelivr.net/npm/three#0.115.0/examples/js/controls/OrbitControls.js"></script>
Simple gradient shader, based on coordinates:
var scene = new THREE.Scene();
var camera = new THREE.PerspectiveCamera(60, 1, 1, 1000);
camera.position.set(13, 25, 38);
camera.lookAt(scene.position);
var renderer = new THREE.WebGLRenderer({
antialias: true
});
var canvas = renderer.domElement
document.body.appendChild(canvas);
var controls = new THREE.OrbitControls(camera, renderer.domElement);
var geometry = new THREE.CylinderBufferGeometry(2, 5, 20, 16, 4, true);
geometry.computeBoundingBox();
var material = new THREE.ShaderMaterial({
uniforms: {
color1: {
value: new THREE.Color("red")
},
color2: {
value: new THREE.Color("purple")
},
bboxMin: {
value: geometry.boundingBox.min
},
bboxMax: {
value: geometry.boundingBox.max
}
},
vertexShader: `
uniform vec3 bboxMin;
uniform vec3 bboxMax;
varying vec2 vUv;
void main() {
vUv.y = (position.y - bboxMin.y) / (bboxMax.y - bboxMin.y);
gl_Position = projectionMatrix * modelViewMatrix * vec4(position,1.0);
}
`,
fragmentShader: `
uniform vec3 color1;
uniform vec3 color2;
varying vec2 vUv;
void main() {
gl_FragColor = vec4(mix(color1, color2, vUv.y), 1.0);
}
`,
wireframe: true
});
var mesh = new THREE.Mesh(geometry, material);
scene.add(mesh);
render();
function resize(renderer) {
const canvas = renderer.domElement;
const width = canvas.clientWidth;
const height = canvas.clientHeight;
const needResize = canvas.width !== width || canvas.height !== height;
if (needResize) {
renderer.setSize(width, height, false);
}
return needResize;
}
function render() {
if (resize(renderer)) {
camera.aspect = canvas.clientWidth / canvas.clientHeight;
camera.updateProjectionMatrix();
}
renderer.render(scene, camera);
requestAnimationFrame(render);
}
html,
body {
height: 100%;
margin: 0;
overflow: hidden;
}
canvas {
width: 100%;
height: 100%;
display: block;
}
<script src="https://cdn.jsdelivr.net/npm/three#0.115.0/build/three.min.js"></script>
<script src="https://cdn.jsdelivr.net/npm/three#0.115.0/examples/js/controls/OrbitControls.js"></script>
Gradient with vertex colours:
var scene = new THREE.Scene();
var camera = new THREE.PerspectiveCamera(60, 1, 1, 1000);
camera.position.set(0, 0, 10);
var renderer = new THREE.WebGLRenderer({
antialias: true
});
var canvas = renderer.domElement
document.body.appendChild(canvas);
var geom = new THREE.TorusKnotGeometry(2.5, .5, 100, 16);
var rev = true;
var cols = [{
stop: 0,
color: new THREE.Color(0xf7b000)
}, {
stop: .25,
color: new THREE.Color(0xdd0080)
}, {
stop: .5,
color: new THREE.Color(0x622b85)
}, {
stop: .75,
color: new THREE.Color(0x007dae)
}, {
stop: 1,
color: new THREE.Color(0x77c8db)
}];
setGradient(geom, cols, 'z', rev);
function setGradient(geometry, colors, axis, reverse) {
geometry.computeBoundingBox();
var bbox = geometry.boundingBox;
var size = new THREE.Vector3().subVectors(bbox.max, bbox.min);
var vertexIndices = ['a', 'b', 'c'];
var face, vertex, normalized = new THREE.Vector3(),
normalizedAxis = 0;
for (var c = 0; c < colors.length - 1; c++) {
var colorDiff = colors[c + 1].stop - colors[c].stop;
for (var i = 0; i < geometry.faces.length; i++) {
face = geometry.faces[i];
for (var v = 0; v < 3; v++) {
vertex = geometry.vertices[face[vertexIndices[v]]];
normalizedAxis = normalized.subVectors(vertex, bbox.min).divide(size)[axis];
if (reverse) {
normalizedAxis = 1 - normalizedAxis;
}
if (normalizedAxis >= colors[c].stop && normalizedAxis <= colors[c + 1].stop) {
var localNormalizedAxis = (normalizedAxis - colors[c].stop) / colorDiff;
face.vertexColors[v] = colors[c].color.clone().lerp(colors[c + 1].color, localNormalizedAxis);
}
}
}
}
}
var mat = new THREE.MeshBasicMaterial({
vertexColors: THREE.VertexColors,
wireframe: true
});
var obj = new THREE.Mesh(geom, mat);
scene.add(obj);
render();
function resize(renderer) {
const canvas = renderer.domElement;
const width = canvas.clientWidth;
const height = canvas.clientHeight;
const needResize = canvas.width !== width || canvas.height !== height;
if (needResize) {
renderer.setSize(width, height, false);
}
return needResize;
}
function render() {
if (resize(renderer)) {
camera.aspect = canvas.clientWidth / canvas.clientHeight;
camera.updateProjectionMatrix();
}
renderer.render(scene, camera);
obj.rotation.y += .01;
requestAnimationFrame(render);
}
html,
body {
height: 100%;
margin: 0;
overflow: hidden;
}
canvas {
width: 100%;
height: 100%;
display;
block;
}
<script src="https://cdn.jsdelivr.net/npm/three#0.115.0/build/three.min.js"></script>
Actually, it's up to you which approach to use: shaders, vertex colours, textures etc.
If you want to keep the functionality of the MeshPhongMaterial you can try extending the material.
This is a somewhat broad topic with several approaches, and you can read more about it in depth here.
There is a line in the phong materials shader that looks like this
vec4 diffuseColor = vec4( diffuse, opacity );
So after studying the book of shaders or some other tutorials, you will learn that you can mix two colors by using a normalized factor ( a number between 0,1).
That means that you could change this line to something like this
vec4 diffuseColor = vec4( mix(diffuse, myColor, vec3(myFactor)), opacity);
You can extend the shader as such
const myFactor = { value: 0 }
const myColor = {value: new THREE.Color}
myMaterial.onBeforeCompile = shader=>{
shader.uniforms.myFactor = myFactor
shader.uniforms.myColor = myColor
shader.fragmentShader = `
uniform vec3 myColor;
uniform float myFactor;
${shader.fragmentShader.replace(
vec4 diffuseColor = vec4( diffuse, opacity );
vec4 diffuseColor = vec4( mix(diffuse, myColor, vec3(myFactor)), opacity);
)}
`
Now when you change myFactor.value the color of your object should change from myMaterial.color to myColor.value.
Now to actually make it into a gradient you would replace myFactor with something dynamic. I like prisoners solution to use the uvs. It's entirely done in javascript, and very simple to hook up in this shader. Other approaches would probably require more shader work.
vec4 diffuseColor = vec4( mix(diffuse, myColor, vec3(vUv.y)), opacity);
Now the problem you may encounter - if you call new PhongMaterial({color}) ie. without any textures provided to it, the shader will compile without vUv.
There are many conditions that would cause it to compile and be useful to you, but i'm not sure if they break other stuff:
#if defined( USE_MAP ) || defined( USE_BUMPMAP ) || defined( USE_NORMALMAP ) || defined( USE_SPECULARMAP ) || defined( USE_ALPHAMAP ) || defined( USE_EMISSIVEMAP ) || defined( USE_ROUGHNESSMAP ) || defined( USE_METALNESSMAP )
So, adding something like
myMaterial.defines = {USE_MAP:''}
Might make vUv variable available for your shader. This way you get all the lights of the phong material to affect the material, you just change the base color.
If you want your gradient to be static, you could just add a texture to your material using the .map property. Or you could assign it to the .emissiveMap property if you want it to "glow" without the need of lights.
However, if you want your gradient to change, and always fade in the z-axis, even after rotating the model or camera, you'd have to write a custom shader, which would require you to take some tutorials. You could look at this example for how to implement custom shaders in Three.js, and visit https://thebookofshaders.com/ to get a good understanding on how to write a simple gradient shader.

Three.js: Setting `texture.needsUpdate = true` is very slow

I'm working on a Three.js scene in which I'd like to update some textures after some time. I'm finding that updating the textures is very slow, however, and drags FPS to only 1-2 FPS for several seconds (when updating just a single texture).
Is there anything one can do to expedite texture updates? Any insights others can offer on this question would be very appreciated.
To see this behavior, click the window of the example below. This will load the first texture update (another click will trigger the second texture update). If you try to zoom after one of these clicks, you'll find the screen freezes and the FPS will drop terribly. Does anyone know how to fix this problem?
<html>
<head>
<style>
html, body { width: 100%; height: 100%; background: #000; }
body { margin: 0; overflow: hidden; }
canvas { width: 100%; height: 100%; }
</style>
</head>
<body>
<script src='https://cdnjs.cloudflare.com/ajax/libs/three.js/88/three.min.js'></script>
<script src='https://rawgit.com/YaleDHLab/pix-plot/master/assets/js/trackball-controls.js'></script>
<script src='https://rawgit.com/mrdoob/stats.js/master/build/stats.min.js'></script>
<script type='x-shader/x-vertex' id='vertex-shader'>
precision highp float;
uniform mat4 modelViewMatrix;
uniform mat4 projectionMatrix;
uniform vec3 cameraPosition;
attribute vec3 position; // sets the blueprint's vertex positions
attribute vec3 translation; // x y translation offsets for an instance
attribute float texIdx; // the texture index to access
varying float vTexIdx;
void main() {
// set point position
vec3 pos = position + translation;
vec4 projected = projectionMatrix * modelViewMatrix * vec4(pos, 1.0);
gl_Position = projected;
// assign the varyings
vTexIdx = texIdx;
// use the delta between the point position and camera position to size point
float xDelta = pow(projected[0] - cameraPosition[0], 2.0);
float yDelta = pow(projected[1] - cameraPosition[1], 2.0);
float zDelta = pow(projected[2] - cameraPosition[2], 2.0);
float delta = pow(xDelta + yDelta + zDelta, 0.5);
gl_PointSize = 40000.0 / delta;
}
</script>
<script type='x-shader/x-fragment' id='fragment-shader'>
precision highp float;
uniform sampler2D a;
uniform sampler2D b;
varying float vTexIdx;
void main() {
int textureIndex = int(vTexIdx);
vec2 uv = vec2(gl_PointCoord.x, gl_PointCoord.y);
if (textureIndex == 0) {
gl_FragColor = texture2D(a, uv);
} else if (textureIndex == 1) {
gl_FragColor = texture2D(b, uv);
}
}
</script>
<script>
/**
* Generate a scene object with a background color
**/
function getScene() {
var scene = new THREE.Scene();
scene.background = new THREE.Color(0xaaaaaa);
return scene;
}
/**
* Generate the camera to be used in the scene
**/
function getCamera() {
var aspectRatio = window.innerWidth / window.innerHeight;
var camera = new THREE.PerspectiveCamera(75, aspectRatio, 0.1, 100000);
camera.position.set(0, 1, -6000);
return camera;
}
/**
* Generate the renderer to be used in the scene
**/
function getRenderer() {
// Create the canvas with a renderer
var renderer = new THREE.WebGLRenderer({antialias: true});
// Add support for retina displays
renderer.setPixelRatio(window.devicePixelRatio);
// Specify the size of the canvas
renderer.setSize(window.innerWidth, window.innerHeight);
// Add the canvas to the DOM
document.body.appendChild(renderer.domElement);
return renderer;
}
/**
* Generate the controls to be used in the scene
**/
function getControls(camera, renderer) {
var controls = new THREE.TrackballControls(camera, renderer.domElement);
controls.zoomSpeed = 0.4;
controls.panSpeed = 0.4;
return controls;
}
/**
* Generate the points for the scene
**/
function addPoints(scene) {
var BA = THREE.BufferAttribute;
var IBA = THREE.InstancedBufferAttribute;
var geometry = new THREE.InstancedBufferGeometry();
// add data for each observation
var n = 10000; // number of observations
var rootN = n**(1/2);
var cellSize = 20;
var translation = new Float32Array( n * 3 );
var texIdx = new Float32Array( n );
var translationIterator = 0;
var texIterator = 0;
for (var i=0; i<n*3; i++) {
var x = Math.random() * n - (n/2);
var y = Math.random() * n - (n/2);
translation[translationIterator++] = x;
translation[translationIterator++] = y;
translation[translationIterator++] = Math.random() * n - (n/2);
texIdx[texIterator++] = (x + y) > (n/8) ? 1 : 0;
}
var positionAttr = new BA(new Float32Array( [0, 0, 0] ), 3);
var translationAttr = new IBA(translation, 3, 1);
var texIdxAttr = new IBA(texIdx, 1, 1);
positionAttr.dynamic = true;
translationAttr.dynamic = true;
texIdxAttr.dynamic = true;
geometry.addAttribute('position', positionAttr);
geometry.addAttribute('translation', translationAttr);
geometry.addAttribute('texIdx', texIdxAttr);
var canvases = [
getElem('canvas', { width: 16384, height: 16384, }),
getElem('canvas', { width: 16384, height: 16384, }),
]
var textures = [
getTexture( canvases[0] ),
getTexture( canvases[1] ),
];
var material = new THREE.RawShaderMaterial({
uniforms: {
a: {
type: 't',
value: textures[0],
},
b: {
type: 't',
value: textures[1],
}
},
vertexShader: document.getElementById('vertex-shader').textContent,
fragmentShader: document.getElementById('fragment-shader').textContent,
});
var mesh = new THREE.Points(geometry, material);
mesh.frustumCulled = false; // prevent the mesh from being clipped on drag
scene.add(mesh);
// on the first window click, paint red points
// on the second window click, paint blue points
var clicks = 0;
window.addEventListener('click', function() {
if (clicks == 0 || clicks == 1) {
var canvas = canvases[clicks];
var ctx = canvas.getContext('2d');
ctx.fillStyle = clicks == 0 ? 'red' : 'blue';
ctx.rect(0, 0, 16384, 16384);
ctx.fill();
textures[clicks].needsUpdate = true;
clicks++;
}
})
}
function getTexture(canvas) {
var tex = new THREE.Texture(canvas);
tex.needsUpdate = true;
tex.flipY = false;
return tex;
}
/**
* Create an element
**/
function getElem(tag, obj) {
var obj = obj || {};
var elem = document.createElement(tag);
Object.keys(obj).forEach(function(attr) {
elem[attr] = obj[attr];
})
return elem;
}
/**
* Add stats
**/
function getStats() {
var stats = new Stats();
stats.domElement.style.position = 'absolute';
stats.domElement.style.top = '65px';
stats.domElement.style.right = '5px';
stats.domElement.style.left = 'initial';
document.body.appendChild(stats.domElement);
return stats;
}
/**
* Render!
**/
function render() {
requestAnimationFrame(render);
renderer.render(scene, camera);
controls.update();
stats.update();
};
/**
* Main
**/
var stats = getStats();
var scene = getScene();
var camera = getCamera();
var renderer = getRenderer();
var controls = getControls(camera, renderer);
addPoints(scene);
render();
</script>
</body>
</html>
Your canvases are 16384 by 16384. That's basically insanely large.
For RGBA format, that is 1073741824 bytes.. a gigabyte of texture data that is getting sent to your GPU from the CPU when you set that texture.needsUpdate = true
You will definitely notice this getting uploaded to the card.
If your use case absolutely requires textures that large.. then you may need to consider doing incremental updates via gl.texSubImage2D, or using a bunch of smaller textures and only updating one of the per frame, or only updating those textures at the start of your app, and not thereafter.
For reference, there are very few cases i've seen where textures > 4k per side are needed.
And that is about 1/16th the size of your textures.
This has nothing to do with three.js btw. It's a fundamental characteristic of GPU/CPU interaction. Uploads and state changes are slow and have to be carefully orchestrated and monitored.

Texture from WebGLRenderTarget not rendered with StereoEffect

I have a panoramic view and want to blur it in order to present a user-interface in front of the blurred view. The blurred image has to be computed on the client so I decided to use a fragment-shader-based implementation to do that.
This works very nice as long as I only use only the regular renderer.
But when the scene is rendered using the THREE.StereoEffect instead, the blurred image doesn't appear on screen.
You can see this in the attached snippet (jsfiddle here: https://jsfiddle.net/n988sg96/3/): If you press "toggle blur" everything looks like it should. But if you press "toggle stereo" and then activate the blur, the screen just turns black (so basically, the blurred image will not render).
The generation of the blurred image is implemented in createBlurredTexture() using the same renderer that is also used for the scene and two render-targets for the vertical and horizontal pass of the blur.
I already verified (by exporting the framebuffers as image via renderer.readRenderTargetPixels()) that both render-targets contain correct images in both cases (so independent of wether the stereo-mode is on or not).
So the questions I have are:
why is it that the texture from the RenderTarget is not rendered with the StereoEffect?
are there other comparable options to achieve the same effect?
const panoUrl = 'https://farm9.staticflickr.com/8652/29593302665_9e747048f7_k_d.jpg';
const panoTexture = new THREE.Texture();
const image = new Image();
image.crossOrigin = 'Anonymous';
image.onload = () => {
panoTexture.image = image;
panoTexture.format = THREE.RGBFormat;
panoTexture.needsUpdate = true;
};
image.src = panoUrl;
const blurButton = document.querySelector('.blur-btn');
const stereoButton = document.querySelector('.stereo-btn');
// creates meshes
function initScene(scene, renderer) {
const panoSphere = new THREE.Mesh(
new THREE.SphereGeometry(100, 36, 18),
new THREE.MeshBasicMaterial({
depthWrite: false,
map: panoTexture
}));
const blurSphere = new THREE.Mesh(
new THREE.SphereGeometry(80, 36, 18),
new THREE.MeshBasicMaterial({
color: 0x666666
})
);
// flip normals
blurSphere.scale.x = panoSphere.scale.x = -1;
blurSphere.visible = false;
scene.add(panoSphere, blurSphere);
blurButton.addEventListener('click', ev => {
if (blurSphere.visible) {
blurSphere.visible = false;
} else {
blurSphere.material.map = createBlurredTexture(
renderer, panoSphere.material.map.image);
blurSphere.material.needsUpdate = true;
blurSphere.visible = true;
}
});
}
// creates a blurred image-texture from the given image
function createBlurredTexture(renderer, img, prescale = 0.25) {
const width = img.width * prescale;
const height = img.height * prescale;
const material = blurPassMaterial;
const camera = new THREE.OrthographicCamera(-1, 1, 1, -1, 0, 1);
const scene = new THREE.Scene()
.add(new THREE.Mesh(new THREE.PlaneBufferGeometry(2, 2), material));
const renderTargetOpts = {
depthBuffer: false,
stencilBuffer: false
};
const rt1 = new THREE.WebGLRenderTarget(width, height, renderTargetOpts);
const rt2 = new THREE.WebGLRenderTarget(width, height, renderTargetOpts);
material.uniforms.resolution.value.set(width, height);
// prepare: downscale source-image
const canvas = document.createElement('canvas');
canvas.width = width;
canvas.height = height;
canvas.getContext('2d').drawImage(img, 0, 0, width, height);
const texture = new THREE.CanvasTexture(canvas);
texture.wrapS = texture.wrapT = THREE.RepeatWrapping;
// pass 1: vertical blur, texture -> rt1
material.uniforms.image.value = texture;
material.uniforms.direction.value.set(0, 1);
renderer.render(scene, camera, rt1);
// pass 2: horizontal blur, rt1 -> rt2
material.uniforms.image.value = rt1.texture;
material.uniforms.direction.value.set(1, 0);
renderer.render(scene, camera, rt2);
// cleanup
texture.dispose();
rt1.texture.dispose();
rt1.dispose();
return rt2.texture;
}
// simple material for a fast 5px blur pass
const blurPassMaterial = new THREE.ShaderMaterial({
uniforms: {
image: {type: 't', value: null},
resolution: {type: 'v2', value: new THREE.Vector2()},
direction: {type: 'v2', value: new THREE.Vector2(1, 0)}
},
vertexShader: `
varying vec2 vUv;
void main() {
vUv = uv;
gl_Position = projectionMatrix * modelViewMatrix * vec4(position, 1.0);
}
`,
fragmentShader: `
varying vec2 vUv;
uniform vec2 direction;
uniform vec2 resolution;
uniform sampler2D image;
// based on https://github.com/Jam3/glsl-fast-gaussian-blur
vec4 blur5(sampler2D image, vec2 uv, vec2 resolution, vec2 direction) {
vec2 offset = (vec2(1.3333333333333333) * direction) / resolution;
return texture2D(image, uv) * 0.29411764705882354
+ texture2D(image, uv + offset) * 0.35294117647058826
+ texture2D(image, uv - offset) * 0.35294117647058826;
}
void main() {
gl_FragColor = blur5(image, vUv, resolution, direction);
}
`
});
// ---- boilerplate-code
// .... setup renderer and stereo-effect
let isStereoMode = false;
const renderer = new THREE.WebGLRenderer();
renderer.setSize(window.innerWidth, window.innerHeight);
const effect = new THREE.StereoEffect(renderer);
// .... setup scene
const scene = window.scene = new THREE.Scene();
// .... setup camera and controls
const camera = new THREE.PerspectiveCamera(
70, window.innerWidth / window.innerHeight, 1, 1000);
const controls = new THREE.OrbitControls(camera);
controls.enableZoom = false;
controls.enableDamping = true;
controls.dampingFactor = .15;
camera.position.set(0, 0, .1);
camera.lookAt(new THREE.Vector3(0, 0, 0));
// .... setup and run
initScene(scene, renderer);
requestAnimationFrame(function loop(time) {
controls.update();
if (isStereoMode) {
effect.render(scene, camera);
} else {
renderer.render(scene, camera);
}
requestAnimationFrame(loop);
});
// .... bind events
stereoButton.addEventListener('click', ev => {
isStereoMode = !isStereoMode;
if (!isStereoMode) {
renderer.setViewport(0, 0, window.innerWidth, window.innerHeight);
}
});
window.addEventListener('resize', ev => {
renderer.setSize(window.innerWidth, window.innerHeight);
camera.aspect = window.innerWidth / window.innerHeight;
camera.updateProjectionMatrix();
});
document.body.appendChild(renderer.domElement);
body {
margin: 0;
overflow: hidden;
}
canvas {
width: 100vw;
height: 100vh;
}
.buttons {
position: absolute;
top: 10px;
left: 0;
right: 0;
text-align: center;
}
button {
display: inline-block;
}
<script src="https://cdn.rawgit.com/mrdoob/three.js/master/build/three.js"></script>
<script src="https://cdn.rawgit.com/mrdoob/three.js/master/examples/js/controls/OrbitControls.js"></script>
<script src="https://cdn.rawgit.com/mrdoob/three.js/master/examples/js/effects/StereoEffect.js"></script>
<div class="buttons">
<button class="blur-btn">toggle blur</button>
<button class="stereo-btn">toggle stereo</button>
</div>
Found a solution reading this Question: three.js - THREE.StereoEffect / webVR-boilerplate + THREE.Mirror
And I just needed to add a single line to the createBlurredTexture()-function.
When cleaning up, it is neccessary to manually unset the renderTarget by calling
renderer.setRenderTarget(null);
The reason for this is that the rendering of the stereo-effect will call renderer.clear(), which will - without unsetting the rendertarget - clear the renderTarget instead of the screen framebuffer.
So thanks a lot stackoverflow <3

Categories