I am using three.js to attempt and learn more about OpenGL. Anyway, I've created a fragment shader which actually works for blending textures with an alphamap but I am having issues with the heightmap portion. If anyone has any ideas please let me know as I am stuck. Oh, I have used this as a reference: http://threejs.org/examples/webgl_terrain_dynamic.html.
<script id="vertexShader" type="x-shader/x-vertex">
uniform sampler2D tDisplacement;
varying vec2 vUv;
void main(void)
{
vUv = uv;
#ifdef VERTEX_TEXTURES
vec3 dv = texture2D( tDisplacement, uv ).xyz;
float df = 300.0 * dv.z + 1.0;
vec3 displacedPosition = normal * df + position;
vec4 worldPosition = modelMatrix * vec4( displacedPosition, 1.0 );
vec4 mvPosition = modelViewMatrix * vec4( displacedPosition, 1.0 );
#else
vec4 worldPosition = modelMatrix * vec4( position, 1.0 );
vec4 mvPosition = modelViewMatrix * vec4( position, 1.0 );
#endif
gl_Position = projectionMatrix * mvPosition;
}
</script>
<script id="fragmentShaderNoise" type="x-shader/x-fragment">
uniform sampler2D Alpha;
uniform sampler2D Grass;
uniform sampler2D Stone;
uniform sampler2D Rock;
uniform sampler2D tDisplacement;
varying vec3 vertex_color;
varying vec2 vUv;
void main(void)
{
vec4 alpha = texture2D( Alpha, vUv.xy);
vec4 tex0 = texture2D( Grass, vUv.xy * 10.0 ); // Tile
vec4 tex1 = texture2D( Rock, vUv.xy * 10.0 ); // Tile
vec4 tex2 = texture2D( Stone, vUv.xy * 10.0 ); // Tile
tex0 *= alpha.r; // Red channel
tex1 = mix( tex0, tex1, alpha.g ); // Green channel
vec4 outColor = mix( tex1, tex2, alpha.b ); // Blue channel
gl_FragColor = outColor;
}
</script>
If a related example would help, I have written a demo that includes a vertex shader that incorporates displacement at:
http://stemkoski.github.io/Three.js/Shader-Fireball.html
Alternatively, perhaps you could start with something simpler, and then work incrementally towards your desired result. For example, I believe that the code below would allow you to change the vertex position according to the red amount of each pixel in the texture bumpTexture.
uniform sampler2D bumpTexture;
varying vec2 vUv;
void main()
{
vUv = uv;
vec4 bumpData = texture2D( bumpTexture, uv );
float displacement = 100.0 * bumpData.r;
vec3 newPosition = position + normal * displacement;
gl_Position = projectionMatrix * modelViewMatrix * vec4( newPosition, 1.0 );
}
Hope this helps!
Related
Does texture splatting works with Three.js or other Javascript 3D rendering framework? If yes I'd like to see example maybe even tutorial on large terrain. If it doesn't work is there any other way mapping large terrains?
Thank you.
Challenge accepted!
First, you can write a vertex shader that takes a grayscale image and uses it as a heightmap, and includes a varying float (called vAmount below) to pass to the fragment shader to determine the texture(s) to display(blend) at that point.
uniform sampler2D bumpTexture;
uniform float bumpScale;
varying float vAmount;
varying vec2 vUV;
void main()
{
vUV = uv;
vec4 bumpData = texture2D( bumpTexture, uv );
vAmount = bumpData.r; // assuming map is grayscale it doesn't matter if you use r, g, or b.
// move the position along the normal
vec3 newPosition = position + normal * bumpScale * vAmount;
gl_Position = projectionMatrix * modelViewMatrix * vec4( newPosition, 1.0 );
}
Next comes the fragment shader, which can include however many textures you need for different elevations, and there is a great built-in function called smoothstep that makes smooth transitions much easier to calculate.
An example of code for such a fragment shader:
uniform sampler2D oceanTexture;
uniform sampler2D sandyTexture;
uniform sampler2D grassTexture;
uniform sampler2D rockyTexture;
uniform sampler2D snowyTexture;
varying vec2 vUV;
varying float vAmount;
void main()
{
vec4 water = (smoothstep(0.01, 0.25, vAmount) - smoothstep(0.24, 0.26, vAmount)) * texture2D( oceanTexture, vUV * 10.0 );
vec4 sandy = (smoothstep(0.24, 0.27, vAmount) - smoothstep(0.28, 0.31, vAmount)) * texture2D( sandyTexture, vUV * 10.0 );
vec4 grass = (smoothstep(0.28, 0.32, vAmount) - smoothstep(0.35, 0.40, vAmount)) * texture2D( grassTexture, vUV * 20.0 );
vec4 rocky = (smoothstep(0.30, 0.50, vAmount) - smoothstep(0.40, 0.70, vAmount)) * texture2D( rockyTexture, vUV * 20.0 );
vec4 snowy = (smoothstep(0.50, 0.65, vAmount)) * texture2D( snowyTexture, vUV * 10.0 );
gl_FragColor = vec4(0.0, 0.0, 0.0, 1.0) + water + sandy + grass + rocky + snowy;
}
Then you can use a THREE.ShaderMaterial to use this for a given mesh. The above code is implemented at http://stemkoski.github.io/Three.js/Shader-Heightmap-Textures.html and produces a result like this:
Hope this helps you get started. Happy coding!
I have developed a terrain generation system with texture splatting in Three.js and I am having trouble applying normal mapping to it. How should I go about it? This code is fully functional as of now.
I have looked at some tutorials of WebGL shading and normal mapping and I could not find one to accurately fit my code.
FRAGMENT_SHADER: `
uniform sampler2D albedoA;
uniform sampler2D albedoB;
uniform sampler2D albedoC;
uniform sampler2D albedoD;
uniform sampler2D albedoE;
uniform sampler2D normalA;
uniform sampler2D normalB;
uniform sampler2D normalC;
uniform sampler2D normalD;
uniform sampler2D normalE;
uniform float repeatScale;
uniform vec3 sunPosition;
varying vec2 vUV;
varying float vAmount;
varying vec3 vNormal;
varying vec3 vWorldPosition;
void main()
{
vec3 diffA = (smoothstep(0.01, 0.25, vAmount) - smoothstep(0.25, 0.35, vAmount)) * texture2D(albedoA, vUV * repeatScale).rgb;
vec3 diffB = (smoothstep(0.24, 0.27, vAmount) - smoothstep(0.27, 0.37, vAmount)) * texture2D(albedoB, vUV * repeatScale).rgb;
vec3 diffC = (smoothstep(0.28, 0.32, vAmount) - smoothstep(0.32, 0.42, vAmount)) * texture2D(albedoC, vUV * repeatScale).rgb;
vec3 diffD = (smoothstep(0.30, 0.60, vAmount) - smoothstep(0.40, 0.70, vAmount)) * texture2D(albedoD, vUV * repeatScale).rgb;
vec3 diffE = (smoothstep(0.50, 0.85, vAmount)) * texture2D(albedoE, vUV * repeatScale).rgb;
vec3 albedoVector = diffA + diffB + diffC + diffD + diffE;
vec3 normA = (smoothstep(0.01, 0.25, vAmount) - smoothstep(0.25, 0.35, vAmount)) * texture2D(normalA, vUV * repeatScale).rgb;
vec3 normB = (smoothstep(0.24, 0.27, vAmount) - smoothstep(0.27, 0.37, vAmount)) * texture2D(normalB, vUV * repeatScale).rgb;
vec3 normC = (smoothstep(0.28, 0.32, vAmount) - smoothstep(0.32, 0.42, vAmount)) * texture2D(normalC, vUV * repeatScale).rgb;
vec3 normD = (smoothstep(0.30, 0.60, vAmount) - smoothstep(0.40, 0.70, vAmount)) * texture2D(normalD, vUV * repeatScale).rgb;
vec3 normE = (smoothstep(0.50, 0.85, vAmount)) * texture2D(normalE, vUV * repeatScale).rgb;
vec3 normalVector = normA + normB + normC + normD + normE;
float diffuseFloat = max(dot(normalize(sunPosition - vWorldPosition), vNormal), 0.0);
if (diffuseFloat < 0.25) { diffuseFloat = 0.25; }
if (diffuseFloat > 1.0) { diffuseFloat = 1.0; }
gl_FragColor = vec4(diffuseFloat * albedoVector, 1.0);
}
`,
VERTEX_SHADER: `
uniform sampler2D heightTexture;
varying vec2 vUV;
varying float vAmount;
varying vec3 vNormal;
varying vec3 vWorldPosition;
void main()
{
vUV = uv;
vAmount = texture2D(heightTexture, uv).r;
vec4 worldPosition = modelViewMatrix * vec4(position, 1.0);
vWorldPosition = worldPosition.xyz;
vNormal = vec3(normal);
gl_Position = projectionMatrix * worldPosition;
}
`
I am using the THREE.ShaderMaterial and as of now the code splats the textures and accounts for lighting and shadows, but not the normal maps.
You can calculate the normal of a surface (terrain) by the simple numerical approximation - See Finite difference method.
You have to know the size of the height map texture. I recommend to set the size of the texture to a uniform variable:
uniform sampler2D heightTexture;
uniform vec2 heightTextureSize; // (width, height) of heightTexture;
Calculate the offset between adjacent texels of the texture:
vec2 offset = 1.0 / heightTextureSize;
Read the height of the adjacent texels of the texture
vA = texture2D(heightTexture, uv).r;
vAL = texture2D(heightTexture, uv + vec2(-offset.x, 0.0)).r;
vAR = texture2D(heightTexture, uv + vec2( offset.x, 0.0)).r;
vAB = texture2D(heightTexture, uv + vec2( 0.0, -offset.y)).r;
vAT = texture2D(heightTexture, uv + vec2( 0.0, offset.y)).r;
Finally you can calculate the approximated normal vector in texture space
vec3 normalMap = normalize( vec3(vAL - vAR, vAT - vAB, 2.0) );
I'm working on a graphics engine written in C++. We currently can either build for desktop or for web (using emscripten). Both platforms utilize OpenGL ES 2.0.
I have a vertex and a fragment shader:
Vertex Shader
#version 100
attribute vec4 a_position;
attribute vec2 a_texcoord;
attribute vec3 a_normal;
attribute vec3 a_tangent;
attribute vec3 a_bitangent;
uniform mat4 u_model;
uniform mat4 u_view;
uniform mat4 u_projection;
uniform mat3 u_normalMatrix;
varying vec2 v_texcoord;
varying vec3 v_normal;
varying vec3 v_fragPos;
varying mat3 v_TBN;
void main()
{
vec3 T = normalize(vec3(u_model * vec4(a_tangent, 0.0)));
vec3 B = normalize(vec3(u_model * vec4(a_bitangent, 0.0)));
vec3 N = normalize(vec3(u_model * vec4(a_normal, 0.0)));
v_TBN = mat3(T, B, N);
gl_Position = u_projection * u_view * u_model * a_position;
v_texcoord = a_texcoord;
v_normal = u_normalMatrix * a_normal;
v_fragPos = vec3(u_model * a_position);
};
Fragment Shader
#version 100
precision mediump float;
varying vec2 v_texcoord;
varying vec3 v_normal;
varying vec3 v_fragPos;
varying mat3 v_TBN;
struct Material {
sampler2D diffuse;
sampler2D specular;
sampler2D normal;
float shininess;
};
struct Light {
int type;
vec3 position;
vec3 direction;
vec3 ambient;
vec3 diffuse;
vec3 specular;
float constant;
float linear;
float quadratic;
float cutOff;
float outerCutOff;
};
const int MAX_LIGHTS = 16;
uniform vec3 u_viewPos;
uniform Material u_material;
uniform Light u_lights[MAX_LIGHTS];
uniform int u_lightCount;
uniform bool normalMapping;
vec3 calcDirLight(Light light, vec3 normal, vec3 viewDir);
vec3 calcPointLight(Light light, vec3 normal, vec3 viewDir);
vec3 calcSpotLight(Light light, vec3 normal, vec3 viewDir);
void main()
{
vec3 lightOutput = vec3(0.0);
vec3 norm = normalize(v_normal);
if (normalMapping)
{
norm = texture2D(u_material.normal, v_texcoord).rgb;
norm = normalize(norm * 2.0 - 1.0);
norm = normalize(v_TBN * norm);
}
vec3 viewDir = normalize(u_viewPos - v_fragPos);
for(int i = 0; i < MAX_LIGHTS; i++)
{
if (i >= u_lightCount){break;}
if (u_lights[i].type == 0)
{
lightOutput += calcDirLight(u_lights[i], norm, viewDir)
}
else if (u_lights[i].type == 1)
{
lightOutput += calcPointLight(u_lights[i], norm, viewDir);
}
else if (u_lights[i].type == 2)
{
lightOutput += calcSpotLight(u_lights[i], norm, viewDir);
}
}
gl_FragColor = vec4(lightOutput, 1.0);
}
vec3 calcDirLight(Light light, vec3 normal, vec3 viewDir)
{
vec3 lightDir = normalize(-light.direction);
float diff = max(dot(normal, lightDir), 0.0);
vec3 reflectDir = reflect(-lightDir, normal);
float spec = pow(max(dot(viewDir, reflectDir), 0.0), u_material.shininess);
vec3 ambient = light.ambient * vec3(texture2D(u_material.diffuse, v_texcoord));
vec3 diffuse = light.diffuse * diff * vec3(texture2D(u_material.diffuse, v_texcoord));
vec3 specular = light.specular * spec * vec3(texture2D(u_material.specular, v_texcoord));
return (ambient + diffuse + specular);
}
vec3 calcPointLight(Light light, vec3 normal, vec3 viewDir)
{
vec3 lightDir = normalize(light.position - v_fragPos);
float diff = max(dot(normal, lightDir), 0.0);
vec3 reflectDir = reflect(-lightDir, normal);
float spec = pow(max(dot(viewDir, reflectDir), 0.0), u_material.shininess);
float distance = length(light.position - v_fragPos);
float attenuation = 1.0 / (light.constant + light.linear * distance + light.quadratic * (distance * distance));
vec3 ambient = light.ambient * vec3(texture2D(u_material.diffuse, v_texcoord));
vec3 diffuse = light.diffuse * diff * vec3(texture2D(u_material.diffuse, v_texcoord));
vec3 specular = light.specular * spec * vec3(texture2D(u_material.specular, v_texcoord));
ambient *= attenuation;
diffuse *= attenuation;
specular *= attenuation;
return (ambient + diffuse + specular);
}
vec3 calcSpotLight(Light light, vec3 normal, vec3 viewDir)
{
vec3 lightDir = normalize(light.position - v_fragPos);
float diff = max(dot(normal, lightDir), 0.0);
vec3 reflectDir = reflect(-lightDir, normal);
float spec = pow(max(dot(viewDir, reflectDir), 0.0), u_material.shininess);
float distance = length(light.position - v_fragPos);
float attenuation = 1.0 / (light.constant + light.linear * distance + light.quadratic * (distance * distance));
float theta = dot(lightDir, normalize(light.direction));
float epsilon = light.cutOff - light.outerCutOff;
float intensity = clamp((theta - light.outerCutOff) / epsilon, 0.0, 1.0);
vec3 ambient = light.ambient * vec3(texture2D(u_material.diffuse, v_texcoord));
vec3 diffuse = light.diffuse * diff * vec3(texture2D(u_material.diffuse, v_texcoord));
vec3 specular = light.specular * spec * vec3(texture2D(u_material.specular, v_texcoord));
ambient *= attenuation * intensity;
diffuse *= attenuation * intensity;
specular *= attenuation * intensity;
return (ambient + diffuse + specular);
};
They both compile without errors, but the linking stage fails. The info log (accessed using glGetProgramInfoLog) is empty.
The biggest problem is that it compiles and links perfectly fine on desktop native and desktop browsers (emscripten). It only fails linking on mobile devices.
I've spent hours trying to figure this out, without success. Is there anything obvious i might have missed?
Edit 1: This is the code that builds the shader program:
GLuint GLES2Shader::buildProgram(GLuint vertexShader, GLuint fragmentShader)
{
GL( GLuint programObject = glCreateProgram() );
GL( glAttachShader(programObject, vertexShader) );
GL( glAttachShader(programObject, fragmentShader) );
GL( glLinkProgram(programObject) );
//check if the program linked successfully
GLint linked;
GL( glGetProgramiv(programObject, GL_LINK_STATUS, &linked) );
if (!linked)
{
DEBUG( GLchar infoLog[512] );
DEBUG( glGetProgramInfoLog(programObject, 512, NULL, infoLog) );
DEBUG( LOGE("ERROR::SHADER::LINKING_FAILED %s", infoLog) );
GL( glDeleteProgram(programObject) );
return 0;
}
return programObject;
}
some days ago I visited the site activetheory.net again. And I saw this stunning nice glass effect in the Homescreen logo border and tried to recode it. Based on the minified code I was able to see they use a PNG as a mask.
I was able to load a png inside the shader and show it up, also it was easy to get a mesh with video texture working. Now I'm stuck on the combining part (png with video) inside the shaders.
someone got experience with it and can help me?
Thank you and have a nice Day!
here is the shader-material part from:
var g = new THREE.PlaneGeometry(128.0, 72.0);
var outline = THREE.ImageUtils
.loadTexture('outline.png');
outline.magFilter = THREE.NearestFilter;
var mat = new THREE.ShaderMaterial({
uniforms: {
map: {type: "t", value: movieScreen.texture},
outline: {type: "t", outline},
aspect: {type: "fv1", value: []},
res: {type: "v2", value: new THREE.Vector2(window.innerWidth, window.innerHeight)}
},
vertexShader: document.
getElementById('vertShader').text,
fragmentShader: document.
getElementById('fragShader').text,
transparent: true,
});
here are the shaders:
<script id="vertShader" type="shader">
varying vec2 vUv;
varying vec2 nUv;
void main() {
vUv = uv;
nUv = uv;
gl_Position = projectionMatrix * modelViewMatrix * vec4(position, 1.0);
}
</script>
<script id="fragShader" type="shader">
varying vec2 vUv;
varying vec2 nUv;
uniform float color;
uniform sampler2D outline;
uniform sampler2D map;
void main(void) {
float alpha = step(0.9, texture2D(outline, vUv).a);
gl_FragColor = texture2D(outline, nUv);
gl_FragColor *= alpha;
}
</script>
Finally, I got a similar shader working. For everyone who is interested.
The shaders:
<script id="vertShader" type="shader">
varying vec2 vUv;
varying vec2 nUv;
void main() {
vUv = uv;
nUv = uv;
gl_Position = projectionMatrix * modelViewMatrix * vec4(position, 1.05);
}
</script>
<script id="fragShader" type="shader">
varying vec2 vUv;
varying vec2 nUv;
uniform float color;
uniform sampler2D outline;
uniform sampler2D map;
void main(void) {
float alpha = step(0.9, texture2D(outline, vUv).a);
gl_FragColor = texture2D(map, nUv);
gl_FragColor *= alpha;
gl_FragColor += 0.08 * alpha;
}
</script>
Have fun!
Is it possible to debug GLSL code or print the variable values from within the glsl code while using it with webgl ? Do three.js or scene.js contain any such functionality?
Not really,
The way I usually debug GLSL is to output colors. So for example, given 2 shaders like
// vertex shader
uniform mat4 worldViewProjection;
uniform vec3 lightWorldPos;
uniform mat4 world;
uniform mat4 viewInverse;
uniform mat4 worldInverseTranspose;
attribute vec4 position;
attribute vec3 normal;
attribute vec2 texCoord;
varying vec4 v_position;
varying vec2 v_texCoord;
varying vec3 v_normal;
varying vec3 v_surfaceToLight;
varying vec3 v_surfaceToView;
void main() {
v_texCoord = texCoord;
v_position = (worldViewProjection * position);
v_normal = (worldInverseTranspose * vec4(normal, 0)).xyz;
v_surfaceToLight = lightWorldPos - (world * position).xyz;
v_surfaceToView = (viewInverse[3] - (world * position)).xyz;
gl_Position = v_position;
}
// fragment-shader
precision highp float;
uniform vec4 colorMult;
varying vec4 v_position;
varying vec2 v_texCoord;
varying vec3 v_normal;
varying vec3 v_surfaceToLight;
varying vec3 v_surfaceToView;
uniform sampler2D diffuseSampler;
uniform vec4 specular;
uniform sampler2D bumpSampler;
uniform float shininess;
uniform float specularFactor;
vec4 lit(float l ,float h, float m) {
return vec4(1.0,
max(l, 0.0),
(l > 0.0) ? pow(max(0.0, h), m) : 0.0,
1.0);
}
void main() {
vec4 diffuse = texture2D(diffuseSampler, v_texCoord) * colorMult;
vec3 normal = normalize(v_normal);
vec3 surfaceToLight = normalize(v_surfaceToLight);
vec3 surfaceToView = normalize(v_surfaceToView);
vec3 halfVector = normalize(surfaceToLight + surfaceToView);
vec4 litR = lit(dot(normal, surfaceToLight),
dot(normal, halfVector), shininess);
gl_FragColor = vec4((
vec4(1,1,1,1) * (diffuse * litR.y
+ specular * litR.z * specularFactor)).rgb,
diffuse.a);
}
If I didn't see something on the screen I'd first change the fragment shader to by just adding a line at the end
gl_FragColor = vec4(1,0,0,1); // draw red
If I started to see my geometry then I'd know the issue is probably in the fragment shader. I might check my normals by doing this
gl_FragColor = vec4(v_normal * 0.5 + 0.5, 1);
If the normals looked okay I might check the UV coords with
gl_FragColor = vec4(v_texCoord, 0, 1);
etc...
You can try WebGL-Inspector for this purpose.