How to properly implement Cook-Torrance shading in three.js? - javascript

I'm trying to implement the Cook-Torrance shading algorithm in three.js I have a mostly working solution, however it doesn't show the effects of the ambient light. The sides of the cube not illuminated by the light are completely black. If I remove the "Beckmann term" then I can indeed see the ambient light effect:
While, replacing Beckmann with the function that always return 0.0 I get:
It seems like the cause of the wrong behaviour is the division in:
vec3 Specular = (Beckmann(NdotH) * G(NdotH, NdotV, VdotH, NdotL) * R_F(VdotH)) / ( NdotL* NdotV);
If I modify NdotL * NdotV to NdotV and change the computation for gl_FragColor to:
gl_FragColor = vec4(beta * NdotL * (1.0-s)*Kd + beta * s*Specular + ambient*Kd, 1.0);
Everything seems to work correctly.
What I don't understand is: why? This problem with the division isn't mentioned anywhere, and I'm not 100% sure that even the remaining division wont cause problems in other situations.
Here's the full MWE:
<html>
<head>
<title>Cook-Torrance BRDF computed by shader</title>
<style>
body {
font-family: Monospace;
background-color: #f0f0f0;
margin: 0px;
overflow: hidden;
}
canvas {
width: 100%;
height: 100%;
}
</style>
<script src="lib/three.min.js"></script>
<script src="lib/OrbitControls.js"></script>
</head>
<body>
<script type="text/x-glsl" id="vertex">
varying vec3 transformedNormal;
varying vec3 pointPosition;
varying vec3 lightVector;
uniform vec3 pointLightPosition;
void main()
{
transformedNormal = normalMatrix * normal;
pointPosition = (modelViewMatrix * vec4( position, 1.0 )).xyz;
vec4 lPosition = viewMatrix * vec4( pointLightPosition, 1.0 );
lightVector = lPosition.xyz - pointPosition;
gl_Position = projectionMatrix * vec4(pointPosition,1.0);
}
</script>
<script type="text/x-glsl" id="ct-fragment">
uniform vec3 lightPower;
uniform vec3 ambient;
uniform vec3 Kd; // surface diffuse color
uniform vec3 Ks; // surface specular color: equal to R_F(0)
uniform float m; // material roughness (average slope of microfacets)
uniform float s; // percentage of incoming light which is specularly reflected
varying vec3 transformedNormal;
varying vec3 pointPosition;
varying vec3 lightVector;
#define PI 3.14159265
float G(float NdotH, float NdotV, float VdotH, float NdotL)
{
float G1 = 2.0 * NdotH * NdotV / VdotH;
float G2 = 2.0 * NdotH * NdotL / VdotH;
return min( 1.0, min( G1, G2 ));
}
vec3 R_F(float VdotH) {
return Ks + (1.0 - Ks)*pow(1.0-VdotH, 5.0);
}
float Beckmann(float NdotH){
float A = 1.0 / (pow(m,2.0)+pow(NdotH,4.0)*PI);
float B = exp( - pow( tan(acos(NdotH)) , 2.0) / pow(m,2.0));
return A*B;
}
void main()
{
vec3 n = normalize( transformedNormal );
vec3 v = normalize( -pointPosition );
vec3 l = normalize( lightVector );
vec3 h = normalize( v+l );
float NdotH = max(0.0, dot( n, h ));
float VdotH = max(0.0, dot( v, h ));
float NdotV = max(0.0, dot( n, v ));
float NdotL = max(0.0, dot( n, l ));
// specular BRDF
vec3 Specular = (Beckmann(NdotH) * G(NdotH, NdotV, VdotH, NdotL) * R_F(VdotH)) / ( NdotL* NdotV);
vec3 beta = lightPower / ( 4.0 * PI * pow( length(lightVector),2.0) );
gl_FragColor = vec4(beta * NdotL * ((1.0-s)*Kd + s*Specular) + ambient*Kd, 1.0);
}
</script>
<script>
var scene = new THREE.Scene();
var camera = new THREE.PerspectiveCamera( 75, window.innerWidth / window.innerHeight, 0.1, 1000 );
camera.position = new THREE.Vector3(0,0,5);
var renderer = new THREE.WebGLRenderer({ antialias: true });
renderer.setSize( window.innerWidth, window.innerHeight );
renderer.setClearColor( 0xf0f0f0 );
document.body.appendChild( renderer.domElement );
controls = new THREE.OrbitControls(camera, renderer.domElement);
controls.target.set(0, 0, 0);
var uniforms = {
Ks: { type: "v3", value: new THREE.Vector3() },
Kd: { type: "v3", value: new THREE.Vector3() },
ambient: { type: "v3", value: new THREE.Vector3() },
pointLightPosition: { type: "v3", value: new THREE.Vector3() },
lightPower: { type: "v3", value: new THREE.Vector3() },
s: {type: "f", value: 0},
m: {type: "f", value: 0}
};
var vs = document.getElementById("vertex").textContent;
var fs = document.getElementById("ct-fragment").textContent;
var material = new THREE.ShaderMaterial({ uniforms: uniforms, vertexShader: vs, fragmentShader: fs });
var geometry = new THREE.CubeGeometry(1, 1, 1);
var mesh = new THREE.Mesh(geometry, material);
scene.add(mesh);
light = new THREE.Mesh( new THREE.SphereGeometry( 1, 16, 16), new THREE.MeshBasicMaterial ({color: 0xffff00, wireframe:true}));
light.position = new THREE.Vector3( 10.0, 10.0, 10.0 );
scene.add( light );
uniforms.Ks.value = new THREE.Vector3( 0.95, 0.93, 0.88 );
uniforms.Kd.value = (new THREE.Vector3( 0.50754, 0.50754, 0.50754 ));
uniforms.ambient.value = (new THREE.Vector3( 0.5, 0.5, 0.5 ));
uniforms.pointLightPosition.value = new THREE.Vector3(light.position.x, light.position.y, light.position.z);
uniforms.lightPower.value = new THREE.Vector3( 7000.0, 7000.0, 7000.0 );
uniforms.s.value = 0.5;
uniforms.m.value = 0.1;
function animate() {
requestAnimationFrame( animate );
render();
}
function render() {
controls.update();
renderer.render(scene, camera);
}
animate();
</script>
</body>
</html>

The shading equation is a mathematical description of the Cook-Torrance shading model.
Writing an actual shader is a different thing that should take into account the fact that not all operations between floats have the same properties of the real mathematical operations in the equation.
In this case diving by 0 causes problems. In fact the problem is that the definition of Specular is diving by 0, but when assigning to gl_FragColor I'm multiplying again by NdotL obtaining 0 * inf = NaN, and it seems like NaN is interpreted as a zero/negative number by the GPU (thus displaying black).
As a reference, the correct main() is:
void main()
{
vec3 n = normalize( transformedNormal );
vec3 v = normalize( -pointPosition );
vec3 l = normalize( lightVector );
vec3 h = normalize( v+l );
vec3 specular = vec(0.0, 0.0, 0.0);
float NdotH = max(0.0, dot( n, h ));
float VdotH = max(0.0, dot( v, h ));
float NdotV = max(0.0, dot( n, v ));
float NdotL = max(0.0, dot( n, l ));
if (NdotL > 0 && NdotV > 0)
{
specular = (Beckmann(NdotH) * G(NdotH, NdotV, VdotH, NdotL) * R_F(VdotH)) / ( NdotL* NdotV);
}
vec3 beta = lightPower / ( 4.0 * PI * pow( length(lightVector),2.0) );
gl_FragColor = vec4(beta * NdotL * ((1.0-s)*Kd + s*specular) + ambient*Kd, 1.0);
}

Related

THREE.js ShaderMaterial UV wrapping issues on SphereBufferGeometry

I'm trying to wrap a SphereBufferGeometry with a ShaderMaterial where I'm using noise to resemble the surface of Jupiter, but it's wrapping very oddly to the sphere geometry. All of the animated texture appears in a thin belt around one of the lines of latitude rather than wrapped around the 'planet' like a normal texture. I've attached images below.
It works well on a plane, but I was probably naive to think it would simply wrap like a texture would wrap, and I'm quite new to Shader programming so I'm a bit stuck.
this is the plane which is wrapping fine
this is not wrapping correctly
I've a feeling that maybe I can move the noise equations to the fragmentShader - but my knowledge isn't there yet, it broke when I tried. I even tried morphing the targets of the plane into a sphere but ShaderMaterial doesn't natively support morphTargets and I after a LOT of trying to inject the #include <morphtarget_pars_vertex> using onBeforeCompile I still couldn't get it to work. I've also tried THREE's wrapping equations on the uniform texture, but it yields similar results.
Here's all of my code, the shaderMaterial implementation is in addPlanet():
import * as THREE from '../../build/three.module';
import { OrbitControls } from '../../examples/jsm/controls/OrbitControls';
const displacementVert = `
precision mediump float;
varying vec2 vUv;
varying float vWave;
uniform float uTime;
//
// Description : Array and textureless GLSL 2D/3D/4D simplex
// noise functions.
// Author : Ian McEwan, Ashima Arts.
// Maintainer : ijm
// Lastmod : 20110822 (ijm)
// License : Copyright (C) 2011 Ashima Arts. All rights reserved.
// Distributed under the MIT License. See LICENSE file.
// https://github.com/ashima/webgl-noise
//
vec3 mod289(vec3 x) {
return x - floor(x * (1.0 / 289.0)) * 289.0;
}
vec4 mod289(vec4 x) {
return x - floor(x * (1.0 / 289.0)) * 289.0;
}
vec4 permute(vec4 x) {
return mod289(((x*34.0)+1.0)*x);
}
vec4 taylorInvSqrt(vec4 r)
{
return 1.79284291400159 - 0.85373472095314 * r;
}
float snoise(vec3 v) {
const vec2 C = vec2(1.0/6.0, 1.0/3.0) ;
const vec4 D = vec4(0.0, 0.5, 1.0, 2.0);
// First corner
vec3 i = floor(v + dot(v, C.yyy) );
vec3 x0 = v - i + dot(i, C.xxx) ;
// Other corners
vec3 g = step(x0.yzx, x0.xyz);
vec3 l = 1.0 - g;
vec3 i1 = min( g.xyz, l.zxy );
vec3 i2 = max( g.xyz, l.zxy );
// x0 = x0 - 0.0 + 0.0 * C.xxx;
// x1 = x0 - i1 + 1.0 * C.xxx;
// x2 = x0 - i2 + 2.0 * C.xxx;
// x3 = x0 - 1.0 + 3.0 * C.xxx;
vec3 x1 = x0 - i1 + C.xxx;
vec3 x2 = x0 - i2 + C.yyy; // 2.0*C.x = 1/3 = C.y
vec3 x3 = x0 - D.yyy; // -1.0+3.0*C.x = -0.5 = -D.y
// Permutations
i = mod289(i);
vec4 p = permute( permute( permute(
i.z + vec4(0.0, i1.z, i2.z, 1.0 ))
+ i.y + vec4(0.0, i1.y, i2.y, 1.0 ))
+ i.x + vec4(0.0, i1.x, i2.x, 1.0 ));
// Gradients: 7x7 points over a square, mapped onto an octahedron.
// The ring size 17*17 = 289 is close to a multiple of 49 (49*6 = 294)
float n_ = 0.142857142857; // 1.0/7.0
vec3 ns = n_ * D.wyz - D.xzx;
vec4 j = p - 49.0 * floor(p * ns.z * ns.z); // mod(p,7*7)
vec4 x_ = floor(j * ns.z);
vec4 y_ = floor(j - 7.0 * x_ ); // mod(j,N)
vec4 x = x_ *ns.x + ns.yyyy;
vec4 y = y_ *ns.x + ns.yyyy;
vec4 h = 1.0 - abs(x) - abs(y);
vec4 b0 = vec4( x.xy, y.xy );
vec4 b1 = vec4( x.zw, y.zw );
//vec4 s0 = vec4(lessThan(b0,0.0))*2.0 - 1.0;
//vec4 s1 = vec4(lessThan(b1,0.0))*2.0 - 1.0;
vec4 s0 = floor(b0)*2.0 + 1.0;
vec4 s1 = floor(b1)*2.0 + 1.0;
vec4 sh = -step(h, vec4(0.0));
vec4 a0 = b0.xzyw + s0.xzyw*sh.xxyy ;
vec4 a1 = b1.xzyw + s1.xzyw*sh.zzww ;
vec3 p0 = vec3(a0.xy,h.x);
vec3 p1 = vec3(a0.zw,h.y);
vec3 p2 = vec3(a1.xy,h.z);
vec3 p3 = vec3(a1.zw,h.w);
// Normalise gradients
vec4 norm = taylorInvSqrt(vec4(dot(p0,p0), dot(p1,p1), dot(p2, p2), dot(p3,p3)));
p0 *= norm.x;
p1 *= norm.y;
p2 *= norm.z;
p3 *= norm.w;
// Mix final noise value
vec4 m = max(0.6 - vec4(dot(x0,x0), dot(x1,x1), dot(x2,x2), dot(x3,x3)), 0.0);
m = m * m;
return 42.0 * dot( m*m, vec4( dot(p0,x0), dot(p1,x1),
dot(p2,x2), dot(p3,x3) ) );
}
void main() {
vUv = uv;
vec3 pos = position;
float noiseFreq = 3.5;
float noiseAmp = 0.15;
vec3 noisePos = vec3(pos.x * noiseFreq + uTime, pos.y, pos.z);
pos.z += snoise(noisePos) * noiseAmp;
vWave = pos.z;
gl_Position = projectionMatrix * modelViewMatrix * vec4(position, 1.);
}
`;
const displacementFrag = `
precision mediump float;
varying vec2 vUv;
varying float vWave;
uniform sampler2D uTexture;
void main() {
float wave = vWave * 0.25;
vec3 texture = texture2D(uTexture, vUv + wave).rgb;
gl_FragColor = vec4(texture, 1.);
}`;
let width, height;
let scene, camera, renderer;
let controls;
let wireframe;
let clock;
let planetShaderMaterial;
let jupiterSphere;
const init = ( params ) => {
colors = params.colors;
model = params.model;
width = params.width;
height = params.height;
scene = new THREE.Scene();
clock = new THREE.Clock();
camera = new THREE.PerspectiveCamera( params.fov, width / height, params.near, params.far );
camera.position.set( params.cameraPos.x, params.cameraPos.y, params.cameraPos.z );
renderer = new THREE.WebGLRenderer({ antialias: true, logarithmicDepthBuffer: true });
renderer.setSize( width, height );
renderer.outputEncoding = THREE.sRGBEncoding;
wireframe = params.wireframe;
renderer.render( scene, camera );
controls = new OrbitControls( camera, renderer.domElement );
addLights();
addPlanet();
}
const addLights = () => {
const ambientLight = new THREE.AmbientLight( 0xffffff, 10 );
scene.add( ambientLight );
const dir = 1024;
const light = new THREE.DirectionalLight( 0xffffff, 1 );
light.position.set( 100, 100, 50 );
light.castShadow = true;
light.shadow.camera.left = -dir;
light.shadow.camera.right = dir;
light.shadow.camera.top = dir;
light.shadow.camera.bottom = -dir;
light.shadow.camera.near = 0.1;
light.shadow.camera.far = 1000;
light.shadow.mapSize.x = 1024;
light.shadow.mapSize.y = 1024;
scene.add( light );
}
// ******** HERE'S THE ShaderMaterial implementation
const addPlanet = () => {
const texture = new THREE.TextureLoader().load( './assets/textures/disp/jupiter.jpg' );
planetShaderMaterial = new THREE.ShaderMaterial( {
uniforms: {
uTime: { value: 0.0 },
uTexture: { value: texture }
},
wireframe: false,
side: THREE.FrontSide,
vertexShader: displacementVert,
fragmentShader: displacementFrag,
});
// these have no effect. Repeat Wrapping just repeats the current effect
planetShaderMaterial.uniforms.uTexture.value.wrapS = THREE.ClampToEdgeWrapping;
planetShaderMaterial.uniforms.uTexture.value.wrapT = THREE.ClampToEdgeWrapping;
jupiterSphere = new THREE.Mesh( new THREE.SphereBufferGeometry( 25, 32, 32), planetShaderMaterial );
scene.add( jupiterSphere );
}
const render = () => {
planetShaderMaterial.uniforms.uTime.value = clock.getElapsedTime();
renderer.render( scene, camera );
}
const resize = ( width, height ) => {
windowWidth = width;
windowHeight = height;
camera.aspect = width / height;
camera.updateProjectionMatrix();
renderer.setSize( width, height );
}
const getRenderer = () => {
return renderer;
}
const TestWorld = {
init,
render,
resize,
getRenderer
};
export default TestWorld;
The problem probably lies in the magnitude of your uv displacement. This is essentially what your shader is doing:
vWave = pos.z;
float wave = vWave * 0.25;
vec3 texture = texture2D(uTexture, vUv + wave);
Your SphereGeometry has a radius of 25, so you're displacing your UVs by 25 * 0.25, based on their depth along the z-axis. This means you're getting UVs with a range of about [-6.25, 6.25].
You could re-calculate this value to be smaller (keep in mind that UVs are typically in the [0, 1] range, so a displacement of 6 will be far outside this range. Or, you could keep your UV displacement really large, and allow the texture to repeat with:
texture.wrapS = THREE.RepeatWrapping;
texture.wrapT = THREE.RepeatWrapping;
You can read about wrapping in the Texture docs page

Three.js uniform dashed line relative to camera

I'm working to display geometric figures in 3D, using three.js.
When you draw (by hand) hidden lines as dashed lines, the 'dashes' are regular for all them. This means that a line parallel to the camera plane or a line (nearly) perpendicular to the camera plane should do have the same length and gap.
But this seems to not work with LineDashedMaterial.
For the attached example, I'm using this (very) basic code:
var scene = new THREE.Scene();
var camera = new THREE.PerspectiveCamera( 75, window.innerWidth/window.innerHeight, 0.1, 1000 );
var renderer = new THREE.WebGLRenderer();
renderer.setSize( window.innerWidth, window.innerHeight );
document.body.appendChild( renderer.domElement );
var geometry = new THREE.BoxGeometry( 2, 2, 2 );
var LINES_DASHED = new THREE.LineSegments(
new THREE.EdgesGeometry(geometry),
new THREE.LineDashedMaterial({
linewidth: 2,
color: 0x000000,
dashSize: 0.2,
gapSize: 0.1,
depthTest: false,
polygonOffset: true, polygonOffsetFactor: 1, polygonOffsetUnits: 1
})
);
LINES_DASHED.computeLineDistances();
scene.add( LINES_DASHED );
scene.background = new THREE.Color( 0xffffff);
camera.position.z = 5;
var animate = function () {
requestAnimationFrame( animate );
LINES_DASHED.rotation.x += 0.01;
LINES_DASHED.rotation.y += 0.01;
renderer.render( scene, camera );
};
animate();
body { margin: 0; }
canvas { width: 100%; height: 100% }
<script src="https://threejs.org/build/three.min.js"></script>
Working example:
https://bs4.scolcours.ch/_dev/3js_ex.php
I thougth that using:
line.computeLineDistance();
will solve the problem. But it seems to calculate the line length in 3D space (which seems to be logical).
Is there something I missed ?
Thanks for your help!
That's abroad task. It seems that THREE.LineDashedMaterial does not support this.
But it is possible to write a shader and to use a THREE.ShaderMaterial.
The trick is to know the start of a line in the fragment shader. In general this easy by using a flat interpolation qualifier.
Sadly WebGL 1.0 / GLSL ES 1.00 doesn't support this. So we have to use WebGL 2.0 / GLSL ES 3.00.
In OpenGL ES there exists the extension GL_NV_shader_noperspective_interpolation. Unfortunately there doesn't seem to be a corresponding WebGL extension. (See WebGL Extension Registry)
So lets cerate a THREE.WebGLRenderer with a WebGL2 context. See How to use WebGL2:
var canvas = document.createElement( 'canvas' );
var context = canvas.getContext( 'webgl2' );
var renderer = new THREE.WebGLRenderer( { canvas: canvas, context: context } );
The vertex shader has to pass the normalized device coordinate to the fragment shader. Once with default interpolation and once with no (flat) interpolation. This causes that in the fragment shade the first input parameter contains the NDC coordinate of the actual position on the line and th later the NDC coordinate of the start of the line.
flat out vec3 startPos;
out vec3 vertPos;
void main() {
vec4 pos = projectionMatrix * modelViewMatrix * vec4(position, 1.0);
gl_Position = pos;
vertPos = pos.xyz / pos.w;
startPos = vertPos;
}
Additionally the varying inputs, the fragment shader has uniform variables. u_resolution contains the width and the height of the viewport. u_dashSize contains the length of line and u_gapSize the length of a gap in pixel.
So the length of the line from the start to the actual fragment can be calculated:
vec2 dir = (vertPos.xy-startPos.xy) * u_resolution/2.0;
float dist = length(dir);
And fragment on the gab can be discarded, by the discard command.
if (fract(dist / (u_dashSize + u_gapSize)) > u_dashSize/(u_dashSize + u_gapSize))
discard;
Fragment shader:
precision highp float;
flat in vec3 startPos;
in vec3 vertPos;
uniform vec3 u_color;
uniform vec2 u_resolution;
uniform float u_dashSize;
uniform float u_gapSize;
void main(){
vec2 dir = (vertPos.xy-startPos.xy) * u_resolution/2.0;
float dist = length(dir);
if ( fract(dist / (u_dashSize + u_gapSize)) > u_dashSize/(u_dashSize + u_gapSize) )
discard;
gl_FragColor = vec4(u_color.rgb, 1.0);
}
Setup the THREE.ShaderMaterial and the uniforms:
var uniforms = {
u_resolution: {type: 'v2', value: {x: vpSize[0], y: vpSize[1]}},
u_dashSize : {type:'f', value: 10.0},
u_gapSize : {type:'f', value: 5.0},
u_color : {type: 'v3', value: {x:0.0, y:0.0, z:0.0} }
};
var material = new THREE.ShaderMaterial({
uniforms: uniforms,
vertexShader: document.getElementById('vertex-shader').textContent,
fragmentShader: document.getElementById('fragment-shader').textContent
});
var LINES_DASHED = new THREE.LineSegments(
new THREE.EdgesGeometry(geometry),
material);
Note, if the resolution of the canvas changes, the values of the u_resolution have to be set:
e.g.
LINES_DASHED.material.uniforms.u_resolution.value.x = window.innerWidth;
LINES_DASHED.material.uniforms.u_resolution.value.y = window.innerHeight;
I applied the suggestions to your original code. See the preview and the example:
var scene = new THREE.Scene();
var camera = new THREE.PerspectiveCamera( 60, window.innerWidth/window.innerHeight, 0.1, 1000 );
var canvas = document.createElement( 'canvas' );
var context = canvas.getContext( 'webgl2' );
var renderer = new THREE.WebGLRenderer( { canvas: canvas, context: context } );
var vpSize = [window.innerWidth, window.innerHeight];
renderer.setSize( window.innerWidth, window.innerHeight );
document.body.appendChild( renderer.domElement );
var geometry = new THREE.BoxGeometry( 2, 2, 2 );
var uniforms = {
u_resolution: {type: 'v2', value: {x: vpSize[0], y: vpSize[1]}},
u_dashSize : {type:'f', value: 10.0},
u_gapSize : {type:'f', value: 5.0},
u_color : {type: 'v3', value: {x:0.0, y:0.0, z:0.0} }
};
var material = new THREE.ShaderMaterial({
uniforms: uniforms,
vertexShader: document.getElementById('vertex-shader').textContent,
fragmentShader: document.getElementById('fragment-shader').textContent
});
var LINES_DASHED = new THREE.LineSegments(
new THREE.EdgesGeometry(geometry),
material);
LINES_DASHED.computeLineDistances();
scene.add( LINES_DASHED );
scene.background = new THREE.Color( 0xffffff);
camera.position.z = 5;
var animate = function () {
requestAnimationFrame( animate );
LINES_DASHED.rotation.x += 0.01;
LINES_DASHED.rotation.y += 0.01;
renderer.render( scene, camera );
};
window.onresize = function() {
vpSize = [window.innerWidth, window.innerHeight];
LINES_DASHED.material.uniforms.u_resolution.value.x = window.innerWidth;
LINES_DASHED.material.uniforms.u_resolution.value.y = window.innerHeight;
renderer.setSize(window.innerWidth, window.innerHeight);
camera.aspect = window.innerWidth / window.innerHeight;
camera.updateProjectionMatrix();
}
animate();
<script type='x-shader/x-vertex' id='vertex-shader'>
flat out vec3 startPos;
out vec3 vertPos;
void main() {
vec4 pos = projectionMatrix * modelViewMatrix * vec4(position, 1.0);
gl_Position = pos;
vertPos = pos.xyz / pos.w;
startPos = vertPos;
}
</script>
<script type='x-shader/x-fragment' id='fragment-shader'>
precision highp float;
flat in vec3 startPos;
in vec3 vertPos;
uniform vec3 u_color;
uniform vec2 u_resolution;
uniform float u_dashSize;
uniform float u_gapSize;
void main(){
vec2 dir = (vertPos.xy-startPos.xy) * u_resolution.xy/2.0;
float dist = length(dir);
if (fract(dist / (u_dashSize + u_gapSize)) > u_dashSize/(u_dashSize + u_gapSize))
discard;
gl_FragColor = vec4(u_color.rgb, 1.0);
}
</script>
<script src="https://rawcdn.githack.com/mrdoob/three.js/r128/build/three.js"></script>

Fragment shader with reflection issue three.js

I have gltf CUP model with texture and PBR effect. I want to apply reflection of the environment(Cubereflection). Facing issue while applying reflection.Color and texture changed and only env reflection is coming. I am unable to solve this issue. Where is my shader is wrong or some other issue. I don't have much knowledge on shader program. How should I get the proper color(as like first image) with reflection. I have attached two images without reflection and with reflection. Reflection is working fine but don't have any clue why this this proper color is not comming. Kindly help?
my shader programme.
var meshlambert_vert =
varying vec3 vReflect;
varying vec3 vRefract[3];
varying float vReflectionFactor;
attribute vec3 a_normal;
varying vec3 v_normal;
varying vec3 v_position;
uniform mat3 u_normalMatrix;
void main() {
vec4 mvPosition = modelViewMatrix * vec4( position, 1.0 );
v_position = mvPosition.xyz;
vec4 worldPosition = modelMatrix * vec4( position, 1.0 );
vec3 worldNormal = normalize( mat3( modelMatrix[0].xyz, modelMatrix[1].xyz, modelMatrix[2].xyz ) * normal );
vec3 I = worldPosition.xyz - cameraPosition;
vReflect = reflect( I, worldNormal );
v_normal = u_normalMatrix * a_normal;
vRefract[0] = refract( normalize( I ), worldNormal, 0.02 );
vRefract[1] = refract( normalize( I ), worldNormal, 0.02 * 0.2);
vRefract[2] = refract( normalize( I ), worldNormal, 0.02 * 0.2 );
vReflectionFactor = 0.1 + 1.0 * pow( 1.0 + dot( normalize( I ), worldNormal ), 0.2 );
gl_Position = projectionMatrix * mvPosition;
};
var meshlambert_frag =
uniform samplerCube tCube;
varying vec3 vReflect;
varying vec3 vRefract[3];
varying float vReflectionFactor;
uniform vec4 u_ambient;
uniform vec4 u_emission;
uniform vec4 u_specular;
uniform vec4 u_diffuse;
varying vec3 v_normal;
varying vec3 v_position;
void main() {
vec4 color = vec4(0., 0.29411,0.47843, 1.0);
vec3 diffuseLight = vec3(0., 0., 0.);
vec3 u_light2Color = vec3(1.0,1.0,1.0);
vec4 diffuse = vec4(0.0, 0.0, 0.0, 1.0);
vec3 specularLight = vec3(0.5, 0.5,0.5);
float specularIntensity = 0.5;
float attenuation = 0.5;
vec3 l = vec3(0.0,0.5,0.5);
vec3 u_light0Color = vec3(1.0,1.0,1.0);
vec4 emission;
vec4 ambient;
vec4 specular;
ambient = u_ambient;
diffuse = u_diffuse;
emission = u_emission;
specular = u_specular;
vec3 ambientLight = vec3(0., 0., 0.);
ambientLight += u_light2Color;
ambient.xyz *= ambientLight;
color.xyz += ambient.xyz;
specularLight += u_light0Color * specularIntensity;
specular.xyz *= specularLight;
color.xyz += specular.xyz;
vec3 normal = normalize(v_normal);
if ( dot( normal, v_position ) > 0.0 ) {
normal *= -1.0;
}
diffuseLight += u_light0Color * max(dot(normal,l), 0.) * attenuation;
diffuse.xyz *= diffuseLight;
color.xyz += diffuse.xyz;
color.xyz += emission.xyz;
vec4 reflectedColor = textureCube( tCube, vec3( -vReflect.x, vReflect.yz ) );
vec4 refractedColor = vec4( 0.0 );
refractedColor.r = textureCube( tCube, vec3( -vRefract[0].x, vRefract[0].yz ) ).r;
refractedColor.g = textureCube( tCube, vec3( -vRefract[1].x, vRefract[1].yz ) ).g;
refractedColor.b = textureCube( tCube, vec3( -vRefract[2].x, vRefract[2].yz ) ).b;
gl_FragColor = mix( color, reflectedColor, clamp( 0.98, 0.0, 1.0 ) );
}";
cubmaping code with reflection:
var loader = new THREE.CubeTextureLoader();
loader.setPath( 'textures/env1/' );
var textureCube = loader.load( [
'posx.jpg', 'negx.jpg',
'posy.jpg', 'negy.jpg',
'posz.jpg', 'negz.jpg'
] );
textureCube.mapping = THREE.CubeReflectionMapping;
var cubeShader = THREE.ShaderLib[ "cube" ];
var cubeMaterial = new THREE.ShaderMaterial( {
fragmentShader: cubeShader.fragmentShader,
vertexShader: cubeShader.vertexShader,
uniforms: cubeShader.uniforms,
depthWrite: false,
side: THREE.BackSide
} );
cubeMaterial.uniforms[ "tCube" ].value = textureCube;
cubeMesh = new THREE.Mesh( new THREE.BoxBufferGeometry( 100, 100, 100 ), cubeMaterial );
scene.add( cubeMesh );
var sphereMaterial=new THREE.MeshLambertMaterial( {envMap: textureCube } );
object.traverse( function ( child ) {
if ( child instanceof THREE.Mesh ) {
child.material = sphereMaterial;
}
} );
You mix the reflection color and the color of the material by a constant ratio of 98:2
gl_FragColor = mix( color, reflectedColor, clamp( 0.98, 0.0, 1.0 ) );
The color component of the material is to weak to "see" it.
Try a ration of 50:50 for debug reasons:
gl_FragColor = mix( color, reflectedColor, 0.5 );
But probably you want to us vReflectionFactor for the ratio:
gl_FragColor = mix( color, reflectedColor, clamp( vReflectionFactor, 0.0, 1.0 ) );
Further note, if you use vReflectionFactor, then you will see the reflection only, because the result of
vReflectionFactor = 0.1 + 1.0 * pow( 1.0 + dot( normalize( I ), worldNormal ), 0.2 );
is always greater than 1.0. This is caused, because 1.0 + dot( normalize( I ), worldNormal is grater than 1.0.
I don't know what you want to achieve, but you can use
vReflectionFactor = 0.1 + pow( dot(normalize(I), worldNormal), 0.2 );
or
vReflectionFactor = 0.1 + pow( 1.0 - dot(normalize(I), worldNormal), 0.2 );

Threejs shader implementation

Past weeks I have been experimenting with the three.js library & shaders and I’m stuck with implementing a shader on my model. I found this interesting example on pixelshaders.com that I want to implement on a threejs model.
http://pixelshaders.com/examples/noise.html
It's the last example at the bottom of the page and I’m trying to implement.
I'm trying to add it to the 3D model that can be found in the link below:
http://martinr.nl/lab/Speeltuin/webgl_shader2.html
The hard thing is when I add the code of the example the 3D model disappears. This makes it really hard to debug and find the error.
This is the shader code that works, but not with the right shader:
<script id="fragmentShader" type="x-shader/x-fragment">
varying vec2 vUv;
uniform float time;
uniform vec2 resolution;
precision mediump float;
void main( void ) {
vec2 position = 2.0 + 2.0 * vUv;
float red = abs( sin( position.x / position.y + time / 5.0 ) );
float green = abs( sin( position.x / position.y + time / 4.0 ) );
float blue = abs( sin( position.x / position.y + time / 3.0 ) );
gl_FragColor = vec4( red, green, blue, 1.0 );
}
</script>
<script id="vertexShader" type="x-shader/x-vertex">
varying vec2 vUv;
void main()
{
vUv = uv;
vec4 mvPosition = modelViewMatrix * vec4( position, 1.0);
gl_Position = projectionMatrix * mvPosition;
}
</script>
When I try to implement the shader of the above example the 3D model disappears.
Does anyone know how to implement the shader of the pixelshader.com example onto the model in my example?
Or does anyone have any tips what I could try to make it work?
I replaced your fragment shader with the code from pixelshaders.com. Console reported the following error:
> THREE.WebGLProgram: shader error: 0 gl.VALIDATE_STATUS false
> gl.getPRogramInfoLog Varyings with the same name but different type,
> or statically used varyings in fragment shader are not declared in
> vertex shader: position
A varying variable is essentially an interface between the vertex shader and the fragment shader. This error is telling us that position is declared in the fragment shader, but not in the vertex shader.
You actually had the required variable in your vertex shader...except it was named vUv. All I had to do was make the variable names consistent.
Full source (i took the scaling off the time in the render() function too):
<!DOCTYPE html>
<html lang="en">
<head>
<title>three.js webgl - materials - shaders [custom]</title>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, user-scalable=no, minimum-scale=1.0, maximum-scale=1.0">
<style>
body {
color: #ffffff;
font-family:Monospace;
font-size:13px;
text-align:center;
font-weight: bold;
background-color: #050505;
margin: 0px;
overflow: hidden;
}
a {
color: #ffffff;
}
#oldie a { color:#da0 }
</style>
</head>
<body>
<div id="container"></div>
<div id="info">three.js - shader material demo. featuring Monjori by Mic</div>
<link rel="stylesheet" href="css/skeleton.css">
<link rel="stylesheet" href="css/normalize.css">
<link rel="stylesheet" href="css/style.css">
<link rel="stylesheet" href="css/codemirror.css">
<script src="js/lib/three.min.js"></script>
<script src="js/lib/Detector.js"></script>
<script src="js/geo.js"></script>
<script id="fragmentShader" type="x-shader/x-fragment">
precision mediump float;
varying vec2 vUv;
uniform float time;
float random(float p) {
return fract(sin(p)*10000.);
}
float noise(vec2 p) {
return random(p.x + p.y*10000.);
}
vec2 sw(vec2 p) {return vec2( floor(p.x) , floor(p.y) );}
vec2 se(vec2 p) {return vec2( ceil(p.x) , floor(p.y) );}
vec2 nw(vec2 p) {return vec2( floor(p.x) , ceil(p.y) );}
vec2 ne(vec2 p) {return vec2( ceil(p.x) , ceil(p.y) );}
float smoothNoise(vec2 p) {
vec2 inter = smoothstep(0., 1., fract(p));
float s = mix(noise(sw(p)), noise(se(p)), inter.x);
float n = mix(noise(nw(p)), noise(ne(p)), inter.x);
return mix(s, n, inter.y);
return noise(nw(p));
}
float movingNoise(vec2 p) {
float total = 0.0;
total += smoothNoise(p - time);
total += smoothNoise(p*2. + time) / 2.;
total += smoothNoise(p*4. - time) / 4.;
total += smoothNoise(p*8. + time) / 8.;
total += smoothNoise(p*16. - time) / 16.;
total /= 1. + 1./2. + 1./4. + 1./8. + 1./16.;
return total;
}
float nestedNoise(vec2 p) {
float x = movingNoise(p);
float y = movingNoise(p + 100.);
return movingNoise(p + vec2(x, y));
}
void main() {
vec2 p = vUv * 6.;
float brightness = nestedNoise(p);
gl_FragColor.rgb = vec3(brightness);
gl_FragColor.a = 1.;
}
</script>
<script id="vertexShader" type="x-shader/x-vertex">
varying vec2 vUv;
void main()
{
vUv = uv;
vec4 mvPosition = modelViewMatrix * vec4( position, 1.0);
gl_Position = projectionMatrix * mvPosition;
}
</script>
<script>
if ( ! Detector.webgl ) Detector.addGetWebGLMessage();
var container;
var camera, controls, scene, renderer;
var uniforms;
var clock = new THREE.Clock();
init();
animate();
function init() {
container = document.getElementById( 'container' );
camera = new THREE.PerspectiveCamera( 40, window.innerWidth / window.innerHeight, 1, 3000 );
camera.position.z = 2;
scene = new THREE.Scene();
var geometry = new THREE.BoxGeometry( 0.5, 0.5, 0.5 );
uniforms = {
time: { type: "f", value: 1.0 },
resolution: { type: "v2", value: new THREE.Vector3() }
};
var material = new THREE.ShaderMaterial({
uniforms: uniforms,
vertexShader: document.getElementById( 'vertexShader' ).textContent,
fragmentShader: document.getElementById( 'fragmentShader').textContent
});
var mesh = new THREE.Mesh( geometry, material );
scene.add( mesh );
renderer = new THREE.WebGLRenderer();
renderer.setPixelRatio( window.devicePixelRatio );
container.appendChild( renderer.domElement );
onWindowResize();
window.addEventListener( 'resize', onWindowResize, false );
}
function onWindowResize( event ) {
uniforms.resolution.value.x = window.innerWidth;
uniforms.resolution.value.y = window.innerHeight;
camera.aspect = window.innerWidth / window.innerHeight;
camera.updateProjectionMatrix();
renderer.setSize( window.innerWidth, window.innerHeight );
}
//
function animate() {
requestAnimationFrame( animate );
render();
}
function render() {
var delta = clock.getDelta();
uniforms.time.value += delta;
// for ( var i = 0; i < scene.children.length; i ++ ) {
//
// var object = scene.children[ i ];
//
// object.rotation.y += delta * 0.5 * ( i % 2 ? 1 : -1 );
// object.rotation.x += delta * 0.5 * ( i % 2 ? -1 : 1 );
//
// }
renderer.render( scene, camera );
}
</script>
</body>
</html>

three.js, make dashed line through shader

I'm trying to make a shader for my dashed line; but only I can get is white line from origin to .. +x direction(maybe)
If I render this curve (Ellipse.curve), they do just fine. And with shader don't..
..of course, I don't know why.. please help me;;
<script type="x-shader/x-vertex" id="vs-orbit">
uniform float time;
attribute float sovereign;
varying vec3 vColor;
vec3 setColorBySovereign() {
vec3 color;
color.r = 0.5 - ( 0.5 * sovereign );
color.g = 0.25 + ( 0.25 * sovereign );
color.b = 0.5 + ( 0.25 * sovereign );
return _color;
};
void main() {
vColor = setColorBySovereign();
gl_Position = projectionMatrix * modelViewMatrix * vec4(position, 1.0);
};
</script>
<script type="x-shader/x-fragment" id="fs-orbit">
attribute vec3 vColor;
void main() {
gl_FragColor = vec4(vColor, 1.0);
};
</script>
// ... Below is where use shader above;
Ellipse.prototype.createOrbit = function( soveregin ) {
var shape, mater;
var sov = soveregin || 0.0;
shape = new THREE.Geometry();
var v2;
for(var i=0; i<721; i++) {
v2 = this.curve.getPoint(i);
shape.vertices.push(new THREE.Vector3(v2.x, v2.y, 0));
};
shape.computeLineDistances();
mater = new THREE.ShaderMaterial({
attribute: { soveregin: sov },
vertexShader: document.getElementById('vs-orbit'),
fragmentShader: document.getElementById('fs-orbit')
});
this.orbit = new THREE.Line( shape, mater, THREE.LineStrip );
return;
}
I solve this problem; after main(){} I shouldn't use ';' and other syntax errors too.
in fragment shader, I have to declare vColor with operator 'varying', just like vertex shader. In fragment shader; from GLSL Reference; varying is equal to in operator.

Categories