three.js animated sprite / offset texture for shadow shader - javascript

i'm using http://stemkoski.github.io/Three.js/ for animated sprite,
for the shadow, i'm using this shader
<script type="x-shader/x-fragment" id="fragmentShaderDepth">
uniform sampler2D texture;
varying vec2 vUV;
vec4 pack_depth( const in float depth ) {
const vec4 bit_shift = vec4( 256.0 * 256.0 * 256.0, 256.0 * 256.0, 256.0, 1.0 );
const vec4 bit_mask = vec4( 0.0, 1.0 / 256.0, 1.0 / 256.0, 1.0 / 256.0 );
vec4 res = fract( depth * bit_shift );
res -= res.xxyz * bit_mask;
return res;
}
void main() {
vec4 pixel = texture2D( texture, vUV );
if ( pixel.a < 0.5 ) discard;
gl_FragData[ 0 ] = pack_depth( gl_FragCoord.z );
}
</script>
<script type="x-shader/x-vertex" id="vertexShaderDepth">
varying vec2 vUV;
void main() {
vUV = 0.75 * uv;
vec4 mvPosition = modelViewMatrix * vec4( position, 1.0 );
gl_Position = projectionMatrix * mvPosition;
}
</script>
to get juste one sprite
vec2 tailleA = vec2( taillex , tailley); // 1/22 num sprite horiz & 1/3 Vert
vUV = vec2( tailleA[0] * uv[0] , (tailleA[1] * uv[1]) );
How can i get the offset forthe texture from the shader automatic generated from three.js ?
How to get animated shadow ?
thanks in advance for your help.

Related

Normal Mapping on Texture Splatted Terrain

I have developed a terrain generation system with texture splatting in Three.js and I am having trouble applying normal mapping to it. How should I go about it? This code is fully functional as of now.
I have looked at some tutorials of WebGL shading and normal mapping and I could not find one to accurately fit my code.
FRAGMENT_SHADER: `
uniform sampler2D albedoA;
uniform sampler2D albedoB;
uniform sampler2D albedoC;
uniform sampler2D albedoD;
uniform sampler2D albedoE;
uniform sampler2D normalA;
uniform sampler2D normalB;
uniform sampler2D normalC;
uniform sampler2D normalD;
uniform sampler2D normalE;
uniform float repeatScale;
uniform vec3 sunPosition;
varying vec2 vUV;
varying float vAmount;
varying vec3 vNormal;
varying vec3 vWorldPosition;
void main()
{
vec3 diffA = (smoothstep(0.01, 0.25, vAmount) - smoothstep(0.25, 0.35, vAmount)) * texture2D(albedoA, vUV * repeatScale).rgb;
vec3 diffB = (smoothstep(0.24, 0.27, vAmount) - smoothstep(0.27, 0.37, vAmount)) * texture2D(albedoB, vUV * repeatScale).rgb;
vec3 diffC = (smoothstep(0.28, 0.32, vAmount) - smoothstep(0.32, 0.42, vAmount)) * texture2D(albedoC, vUV * repeatScale).rgb;
vec3 diffD = (smoothstep(0.30, 0.60, vAmount) - smoothstep(0.40, 0.70, vAmount)) * texture2D(albedoD, vUV * repeatScale).rgb;
vec3 diffE = (smoothstep(0.50, 0.85, vAmount)) * texture2D(albedoE, vUV * repeatScale).rgb;
vec3 albedoVector = diffA + diffB + diffC + diffD + diffE;
vec3 normA = (smoothstep(0.01, 0.25, vAmount) - smoothstep(0.25, 0.35, vAmount)) * texture2D(normalA, vUV * repeatScale).rgb;
vec3 normB = (smoothstep(0.24, 0.27, vAmount) - smoothstep(0.27, 0.37, vAmount)) * texture2D(normalB, vUV * repeatScale).rgb;
vec3 normC = (smoothstep(0.28, 0.32, vAmount) - smoothstep(0.32, 0.42, vAmount)) * texture2D(normalC, vUV * repeatScale).rgb;
vec3 normD = (smoothstep(0.30, 0.60, vAmount) - smoothstep(0.40, 0.70, vAmount)) * texture2D(normalD, vUV * repeatScale).rgb;
vec3 normE = (smoothstep(0.50, 0.85, vAmount)) * texture2D(normalE, vUV * repeatScale).rgb;
vec3 normalVector = normA + normB + normC + normD + normE;
float diffuseFloat = max(dot(normalize(sunPosition - vWorldPosition), vNormal), 0.0);
if (diffuseFloat < 0.25) { diffuseFloat = 0.25; }
if (diffuseFloat > 1.0) { diffuseFloat = 1.0; }
gl_FragColor = vec4(diffuseFloat * albedoVector, 1.0);
}
`,
VERTEX_SHADER: `
uniform sampler2D heightTexture;
varying vec2 vUV;
varying float vAmount;
varying vec3 vNormal;
varying vec3 vWorldPosition;
void main()
{
vUV = uv;
vAmount = texture2D(heightTexture, uv).r;
vec4 worldPosition = modelViewMatrix * vec4(position, 1.0);
vWorldPosition = worldPosition.xyz;
vNormal = vec3(normal);
gl_Position = projectionMatrix * worldPosition;
}
`
I am using the THREE.ShaderMaterial and as of now the code splats the textures and accounts for lighting and shadows, but not the normal maps.
You can calculate the normal of a surface (terrain) by the simple numerical approximation - See Finite difference method.
You have to know the size of the height map texture. I recommend to set the size of the texture to a uniform variable:
uniform sampler2D heightTexture;
uniform vec2 heightTextureSize; // (width, height) of heightTexture;
Calculate the offset between adjacent texels of the texture:
vec2 offset = 1.0 / heightTextureSize;
Read the height of the adjacent texels of the texture
vA = texture2D(heightTexture, uv).r;
vAL = texture2D(heightTexture, uv + vec2(-offset.x, 0.0)).r;
vAR = texture2D(heightTexture, uv + vec2( offset.x, 0.0)).r;
vAB = texture2D(heightTexture, uv + vec2( 0.0, -offset.y)).r;
vAT = texture2D(heightTexture, uv + vec2( 0.0, offset.y)).r;
Finally you can calculate the approximated normal vector in texture space
vec3 normalMap = normalize( vec3(vAL - vAR, vAT - vAB, 2.0) );

Fragment shader with reflection issue three.js

I have gltf CUP model with texture and PBR effect. I want to apply reflection of the environment(Cubereflection). Facing issue while applying reflection.Color and texture changed and only env reflection is coming. I am unable to solve this issue. Where is my shader is wrong or some other issue. I don't have much knowledge on shader program. How should I get the proper color(as like first image) with reflection. I have attached two images without reflection and with reflection. Reflection is working fine but don't have any clue why this this proper color is not comming. Kindly help?
my shader programme.
var meshlambert_vert =
varying vec3 vReflect;
varying vec3 vRefract[3];
varying float vReflectionFactor;
attribute vec3 a_normal;
varying vec3 v_normal;
varying vec3 v_position;
uniform mat3 u_normalMatrix;
void main() {
vec4 mvPosition = modelViewMatrix * vec4( position, 1.0 );
v_position = mvPosition.xyz;
vec4 worldPosition = modelMatrix * vec4( position, 1.0 );
vec3 worldNormal = normalize( mat3( modelMatrix[0].xyz, modelMatrix[1].xyz, modelMatrix[2].xyz ) * normal );
vec3 I = worldPosition.xyz - cameraPosition;
vReflect = reflect( I, worldNormal );
v_normal = u_normalMatrix * a_normal;
vRefract[0] = refract( normalize( I ), worldNormal, 0.02 );
vRefract[1] = refract( normalize( I ), worldNormal, 0.02 * 0.2);
vRefract[2] = refract( normalize( I ), worldNormal, 0.02 * 0.2 );
vReflectionFactor = 0.1 + 1.0 * pow( 1.0 + dot( normalize( I ), worldNormal ), 0.2 );
gl_Position = projectionMatrix * mvPosition;
};
var meshlambert_frag =
uniform samplerCube tCube;
varying vec3 vReflect;
varying vec3 vRefract[3];
varying float vReflectionFactor;
uniform vec4 u_ambient;
uniform vec4 u_emission;
uniform vec4 u_specular;
uniform vec4 u_diffuse;
varying vec3 v_normal;
varying vec3 v_position;
void main() {
vec4 color = vec4(0., 0.29411,0.47843, 1.0);
vec3 diffuseLight = vec3(0., 0., 0.);
vec3 u_light2Color = vec3(1.0,1.0,1.0);
vec4 diffuse = vec4(0.0, 0.0, 0.0, 1.0);
vec3 specularLight = vec3(0.5, 0.5,0.5);
float specularIntensity = 0.5;
float attenuation = 0.5;
vec3 l = vec3(0.0,0.5,0.5);
vec3 u_light0Color = vec3(1.0,1.0,1.0);
vec4 emission;
vec4 ambient;
vec4 specular;
ambient = u_ambient;
diffuse = u_diffuse;
emission = u_emission;
specular = u_specular;
vec3 ambientLight = vec3(0., 0., 0.);
ambientLight += u_light2Color;
ambient.xyz *= ambientLight;
color.xyz += ambient.xyz;
specularLight += u_light0Color * specularIntensity;
specular.xyz *= specularLight;
color.xyz += specular.xyz;
vec3 normal = normalize(v_normal);
if ( dot( normal, v_position ) > 0.0 ) {
normal *= -1.0;
}
diffuseLight += u_light0Color * max(dot(normal,l), 0.) * attenuation;
diffuse.xyz *= diffuseLight;
color.xyz += diffuse.xyz;
color.xyz += emission.xyz;
vec4 reflectedColor = textureCube( tCube, vec3( -vReflect.x, vReflect.yz ) );
vec4 refractedColor = vec4( 0.0 );
refractedColor.r = textureCube( tCube, vec3( -vRefract[0].x, vRefract[0].yz ) ).r;
refractedColor.g = textureCube( tCube, vec3( -vRefract[1].x, vRefract[1].yz ) ).g;
refractedColor.b = textureCube( tCube, vec3( -vRefract[2].x, vRefract[2].yz ) ).b;
gl_FragColor = mix( color, reflectedColor, clamp( 0.98, 0.0, 1.0 ) );
}";
cubmaping code with reflection:
var loader = new THREE.CubeTextureLoader();
loader.setPath( 'textures/env1/' );
var textureCube = loader.load( [
'posx.jpg', 'negx.jpg',
'posy.jpg', 'negy.jpg',
'posz.jpg', 'negz.jpg'
] );
textureCube.mapping = THREE.CubeReflectionMapping;
var cubeShader = THREE.ShaderLib[ "cube" ];
var cubeMaterial = new THREE.ShaderMaterial( {
fragmentShader: cubeShader.fragmentShader,
vertexShader: cubeShader.vertexShader,
uniforms: cubeShader.uniforms,
depthWrite: false,
side: THREE.BackSide
} );
cubeMaterial.uniforms[ "tCube" ].value = textureCube;
cubeMesh = new THREE.Mesh( new THREE.BoxBufferGeometry( 100, 100, 100 ), cubeMaterial );
scene.add( cubeMesh );
var sphereMaterial=new THREE.MeshLambertMaterial( {envMap: textureCube } );
object.traverse( function ( child ) {
if ( child instanceof THREE.Mesh ) {
child.material = sphereMaterial;
}
} );
You mix the reflection color and the color of the material by a constant ratio of 98:2
gl_FragColor = mix( color, reflectedColor, clamp( 0.98, 0.0, 1.0 ) );
The color component of the material is to weak to "see" it.
Try a ration of 50:50 for debug reasons:
gl_FragColor = mix( color, reflectedColor, 0.5 );
But probably you want to us vReflectionFactor for the ratio:
gl_FragColor = mix( color, reflectedColor, clamp( vReflectionFactor, 0.0, 1.0 ) );
Further note, if you use vReflectionFactor, then you will see the reflection only, because the result of
vReflectionFactor = 0.1 + 1.0 * pow( 1.0 + dot( normalize( I ), worldNormal ), 0.2 );
is always greater than 1.0. This is caused, because 1.0 + dot( normalize( I ), worldNormal is grater than 1.0.
I don't know what you want to achieve, but you can use
vReflectionFactor = 0.1 + pow( dot(normalize(I), worldNormal), 0.2 );
or
vReflectionFactor = 0.1 + pow( 1.0 - dot(normalize(I), worldNormal), 0.2 );

Shader linking fails without error message only on mobile devices

I'm working on a graphics engine written in C++. We currently can either build for desktop or for web (using emscripten). Both platforms utilize OpenGL ES 2.0.
I have a vertex and a fragment shader:
Vertex Shader
#version 100
attribute vec4 a_position;
attribute vec2 a_texcoord;
attribute vec3 a_normal;
attribute vec3 a_tangent;
attribute vec3 a_bitangent;
uniform mat4 u_model;
uniform mat4 u_view;
uniform mat4 u_projection;
uniform mat3 u_normalMatrix;
varying vec2 v_texcoord;
varying vec3 v_normal;
varying vec3 v_fragPos;
varying mat3 v_TBN;
void main()
{
vec3 T = normalize(vec3(u_model * vec4(a_tangent, 0.0)));
vec3 B = normalize(vec3(u_model * vec4(a_bitangent, 0.0)));
vec3 N = normalize(vec3(u_model * vec4(a_normal, 0.0)));
v_TBN = mat3(T, B, N);
gl_Position = u_projection * u_view * u_model * a_position;
v_texcoord = a_texcoord;
v_normal = u_normalMatrix * a_normal;
v_fragPos = vec3(u_model * a_position);
};
Fragment Shader
#version 100
precision mediump float;
varying vec2 v_texcoord;
varying vec3 v_normal;
varying vec3 v_fragPos;
varying mat3 v_TBN;
struct Material {
sampler2D diffuse;
sampler2D specular;
sampler2D normal;
float shininess;
};
struct Light {
int type;
vec3 position;
vec3 direction;
vec3 ambient;
vec3 diffuse;
vec3 specular;
float constant;
float linear;
float quadratic;
float cutOff;
float outerCutOff;
};
const int MAX_LIGHTS = 16;
uniform vec3 u_viewPos;
uniform Material u_material;
uniform Light u_lights[MAX_LIGHTS];
uniform int u_lightCount;
uniform bool normalMapping;
vec3 calcDirLight(Light light, vec3 normal, vec3 viewDir);
vec3 calcPointLight(Light light, vec3 normal, vec3 viewDir);
vec3 calcSpotLight(Light light, vec3 normal, vec3 viewDir);
void main()
{
vec3 lightOutput = vec3(0.0);
vec3 norm = normalize(v_normal);
if (normalMapping)
{
norm = texture2D(u_material.normal, v_texcoord).rgb;
norm = normalize(norm * 2.0 - 1.0);
norm = normalize(v_TBN * norm);
}
vec3 viewDir = normalize(u_viewPos - v_fragPos);
for(int i = 0; i < MAX_LIGHTS; i++)
{
if (i >= u_lightCount){break;}
if (u_lights[i].type == 0)
{
lightOutput += calcDirLight(u_lights[i], norm, viewDir)
}
else if (u_lights[i].type == 1)
{
lightOutput += calcPointLight(u_lights[i], norm, viewDir);
}
else if (u_lights[i].type == 2)
{
lightOutput += calcSpotLight(u_lights[i], norm, viewDir);
}
}
gl_FragColor = vec4(lightOutput, 1.0);
}
vec3 calcDirLight(Light light, vec3 normal, vec3 viewDir)
{
vec3 lightDir = normalize(-light.direction);
float diff = max(dot(normal, lightDir), 0.0);
vec3 reflectDir = reflect(-lightDir, normal);
float spec = pow(max(dot(viewDir, reflectDir), 0.0), u_material.shininess);
vec3 ambient = light.ambient * vec3(texture2D(u_material.diffuse, v_texcoord));
vec3 diffuse = light.diffuse * diff * vec3(texture2D(u_material.diffuse, v_texcoord));
vec3 specular = light.specular * spec * vec3(texture2D(u_material.specular, v_texcoord));
return (ambient + diffuse + specular);
}
vec3 calcPointLight(Light light, vec3 normal, vec3 viewDir)
{
vec3 lightDir = normalize(light.position - v_fragPos);
float diff = max(dot(normal, lightDir), 0.0);
vec3 reflectDir = reflect(-lightDir, normal);
float spec = pow(max(dot(viewDir, reflectDir), 0.0), u_material.shininess);
float distance = length(light.position - v_fragPos);
float attenuation = 1.0 / (light.constant + light.linear * distance + light.quadratic * (distance * distance));
vec3 ambient = light.ambient * vec3(texture2D(u_material.diffuse, v_texcoord));
vec3 diffuse = light.diffuse * diff * vec3(texture2D(u_material.diffuse, v_texcoord));
vec3 specular = light.specular * spec * vec3(texture2D(u_material.specular, v_texcoord));
ambient *= attenuation;
diffuse *= attenuation;
specular *= attenuation;
return (ambient + diffuse + specular);
}
vec3 calcSpotLight(Light light, vec3 normal, vec3 viewDir)
{
vec3 lightDir = normalize(light.position - v_fragPos);
float diff = max(dot(normal, lightDir), 0.0);
vec3 reflectDir = reflect(-lightDir, normal);
float spec = pow(max(dot(viewDir, reflectDir), 0.0), u_material.shininess);
float distance = length(light.position - v_fragPos);
float attenuation = 1.0 / (light.constant + light.linear * distance + light.quadratic * (distance * distance));
float theta = dot(lightDir, normalize(light.direction));
float epsilon = light.cutOff - light.outerCutOff;
float intensity = clamp((theta - light.outerCutOff) / epsilon, 0.0, 1.0);
vec3 ambient = light.ambient * vec3(texture2D(u_material.diffuse, v_texcoord));
vec3 diffuse = light.diffuse * diff * vec3(texture2D(u_material.diffuse, v_texcoord));
vec3 specular = light.specular * spec * vec3(texture2D(u_material.specular, v_texcoord));
ambient *= attenuation * intensity;
diffuse *= attenuation * intensity;
specular *= attenuation * intensity;
return (ambient + diffuse + specular);
};
They both compile without errors, but the linking stage fails. The info log (accessed using glGetProgramInfoLog) is empty.
The biggest problem is that it compiles and links perfectly fine on desktop native and desktop browsers (emscripten). It only fails linking on mobile devices.
I've spent hours trying to figure this out, without success. Is there anything obvious i might have missed?
Edit 1: This is the code that builds the shader program:
GLuint GLES2Shader::buildProgram(GLuint vertexShader, GLuint fragmentShader)
{
GL( GLuint programObject = glCreateProgram() );
GL( glAttachShader(programObject, vertexShader) );
GL( glAttachShader(programObject, fragmentShader) );
GL( glLinkProgram(programObject) );
//check if the program linked successfully
GLint linked;
GL( glGetProgramiv(programObject, GL_LINK_STATUS, &linked) );
if (!linked)
{
DEBUG( GLchar infoLog[512] );
DEBUG( glGetProgramInfoLog(programObject, 512, NULL, infoLog) );
DEBUG( LOGE("ERROR::SHADER::LINKING_FAILED %s", infoLog) );
GL( glDeleteProgram(programObject) );
return 0;
}
return programObject;
}

Three.js ParticleSystem transparency issue

I'm using Three.js and I have a ParticleSystem where every particle may have a different transparency and color.
Code:
var shaderMaterial = new THREE.ShaderMaterial({
uniforms: customUniforms,
attributes: customAttributes,
vertexShader: document.getElementById('vertexshader').textContent,
fragmentShader: document.getElementById('fragmentshader').textContent,
transparent: true,
alphaTest: 0.5
});
particles = new THREE.ParticleSystem(geometry, shaderMaterial);
particles.dynamic = true;
Vertex shader:
attribute float size;
attribute vec3 color;
varying vec3 vColor;
void main() {
vColor = color;
vec4 mvPosition = modelViewMatrix * vec4( position, 1.0 );
//gl_PointSize = size;
gl_PointSize = size * ( 300.0 / length( mvPosition.xyz ) );
gl_Position = projectionMatrix * mvPosition;
}
Fragment shader:
uniform sampler2D texture;
uniform float alpha;
varying vec3 vColor;
void main() {
gl_FragColor = vec4(vColor, 100);
vec4 textureColor = texture2D( texture, gl_PointCoord );
gl_FragColor = gl_FragColor * textureColor;
gl_FragColor.a = alpha;
}
Texture is a circle but when I set alpha, like this: gl_FragColor.a = alpha, my texture become a circle in a black square, alpha level is okay, but I don't need the square, only the circle if I don't set the alpha, square doesn't appear.
So how to set alpha correctly for provided texture?
Take a look at this: three.js - Adjusting opacity of individual particles
You can find jsfiddle somewhere in the page that uses ShaderMaterial for ParticleSystem with variable alpha: http://jsfiddle.net/yfSwK/27/
Also, at least change fragment shader a bit, gl_FragColor should be write-only variable, it's not usual to have it as a read-from variable:
vec4 col = vec4(vColor, 100);
vec4 tex = texture2D( texture, gl_PointCoord );
gl_FragColor = vec4( (col*tex).rgb, alpha );
...or in one line:
gl_FragColor = vec4( (vec4(vColor, 100) * texture2D( texture, gl_PointCoord )).rgb, alpha);

Three.js, GLSL, HeightMaps + Alphamaps

I am using three.js to attempt and learn more about OpenGL. Anyway, I've created a fragment shader which actually works for blending textures with an alphamap but I am having issues with the heightmap portion. If anyone has any ideas please let me know as I am stuck. Oh, I have used this as a reference: http://threejs.org/examples/webgl_terrain_dynamic.html.
<script id="vertexShader" type="x-shader/x-vertex">
uniform sampler2D tDisplacement;
varying vec2 vUv;
void main(void)
{
vUv = uv;
#ifdef VERTEX_TEXTURES
vec3 dv = texture2D( tDisplacement, uv ).xyz;
float df = 300.0 * dv.z + 1.0;
vec3 displacedPosition = normal * df + position;
vec4 worldPosition = modelMatrix * vec4( displacedPosition, 1.0 );
vec4 mvPosition = modelViewMatrix * vec4( displacedPosition, 1.0 );
#else
vec4 worldPosition = modelMatrix * vec4( position, 1.0 );
vec4 mvPosition = modelViewMatrix * vec4( position, 1.0 );
#endif
gl_Position = projectionMatrix * mvPosition;
}
</script>
<script id="fragmentShaderNoise" type="x-shader/x-fragment">
uniform sampler2D Alpha;
uniform sampler2D Grass;
uniform sampler2D Stone;
uniform sampler2D Rock;
uniform sampler2D tDisplacement;
varying vec3 vertex_color;
varying vec2 vUv;
void main(void)
{
vec4 alpha = texture2D( Alpha, vUv.xy);
vec4 tex0 = texture2D( Grass, vUv.xy * 10.0 ); // Tile
vec4 tex1 = texture2D( Rock, vUv.xy * 10.0 ); // Tile
vec4 tex2 = texture2D( Stone, vUv.xy * 10.0 ); // Tile
tex0 *= alpha.r; // Red channel
tex1 = mix( tex0, tex1, alpha.g ); // Green channel
vec4 outColor = mix( tex1, tex2, alpha.b ); // Blue channel
gl_FragColor = outColor;
}
</script>
If a related example would help, I have written a demo that includes a vertex shader that incorporates displacement at:
http://stemkoski.github.io/Three.js/Shader-Fireball.html
Alternatively, perhaps you could start with something simpler, and then work incrementally towards your desired result. For example, I believe that the code below would allow you to change the vertex position according to the red amount of each pixel in the texture bumpTexture.
uniform sampler2D bumpTexture;
varying vec2 vUv;
void main()
{
vUv = uv;
vec4 bumpData = texture2D( bumpTexture, uv );
float displacement = 100.0 * bumpData.r;
vec3 newPosition = position + normal * displacement;
gl_Position = projectionMatrix * modelViewMatrix * vec4( newPosition, 1.0 );
}
Hope this helps!

Categories