Hi so I am trying to add an offset to the fog on my shader texture
I've managed to do this before but now i have changed the code slightly to use uv repeating and im not sure how to apply viewDirection to vec3 txl here's the code:
varying vec2 vuv;
uniform vec2 repeat;
uniform vec3 topColor;
uniform vec3 bottomColor;
uniform float offset;
uniform float exponent;
uniform vec3 fogColor;
uniform float fogNear;
uniform float fogFar;
uniform float fogDensity;
uniform sampler2D background;
varying vec3 vWorldPosition;
void main() {
vec3 viewDirection = normalize(vWorldPosition - cameraPosition);
// orginal this worked with fog
// vec3 stars = textureCube(background, viewDirection).xyz;
vec2 uv = vuv;
uv = fract(uv * repeat);
vec2 smooth_uv = repeat * vuv;
vec4 duv = vec4(dFdx(smooth_uv), dFdy(smooth_uv));
vec3 txl = textureGrad(background, uv, duv.xy, duv.zw).rgb;
float h = normalize(vWorldPosition + offset).y;
float t = max(pow(max(h, 0.0), exponent), 0.0);
float f = exp(min(0.0, -vWorldPosition.y * 0.00125));
vec3 sky = mix(txl, bottomColor, f);
gl_FragColor = vec4(sky, 1.0);
}`;
In my code code i did
vec3 stars = textureCube(background, viewDirection).xyz;
float h = normalize(vWorldPosition + offset).y;
float f = exp(min(0.0, -vWorldPosition.y * 0.00125));
this worked fine but because
I have changed the first line to this vec3 txl = textureGrad(background, uv, duv.xy, duv.zw).rgb; I no longer have viewDirection, which is the vWorldPosition that the offset goes off so my question really is this...
how can i add viewDirection to vec3 txl = textureGrad(background, uv, duv.xy, duv.zw).rgb; so the offset works again?
thanks!
Related
I am using WebGL to try and render a scene with both color and textures, with the colors being rendered through the vertex shader and the textures being rendered through the fragment shader. Both shaders utilize some of the same uniform variables, however WebGL does not allow me to pass them between both, only one or the other. The error message I receive is "Uniform variable not linkable between attached shaders." I have gone through similar posts by other users regarding this issue however I was not able to find one that matched my situation. My code is as follows:
const vertex_shader = `
attribute vec4 vertex;
attribute vec2 textureCoordinates;
attribute vec4 normal;
uniform mat4 model_transform;
uniform mat4 vp_transform;
uniform mat4 normal_transform;
varying vec2 vTexCoords;
varying vec4 obj_loc;
varying vec4 transformed_normal;
varying vec4 vColor;
uniform vec4 light_position;
uniform vec4 light_ambient;
uniform vec4 light_diffuse;
uniform vec4 mat_ambient;
uniform vec4 mat_diffuse;
uniform vec4 mat_specular;
uniform float mat_shininess;
uniform vec4 light_specular;
uniform vec4 camera_position;
uniform int flags;
void main()
{
obj_loc = model_transform * vertex; // vertex in model coordinates
gl_Position = vp_transform * obj_loc; // vertex in device independent coordinates
transformed_normal = normal_transform * normal; // normal in model coordinates
vTexCoords = textureCoordinates;
vec4 l_vec = normalize(light_position - obj_loc);
vec4 n_vec = normalize(normal_transform * normal);
float lndot = dot(l_vec, n_vec);
float diffuse_scale = max(0.0, lndot);
vec4 diffuse_color = diffuse_scale * light_diffuse * mat_diffuse;
if( (flags - 2*(flags/2)) == 0)
diffuse_color = vec4(0.0, 0.0, 0.0, 1.0);
vec4 h_vec = normalize(l_vec + normalize(camera_position - obj_loc));
float spec_scale = pow(max(0.0, dot(h_vec, n_vec)), mat_shininess);
vec4 specular_color;
if(lndot < 0.0) {
specular_color = vec4(0.0, 0.0, 0.0, 1.0);
} else {
specular_color = spec_scale * mat_specular * light_specular;
}
if( (flags - 4*(flags/4)) < 2 ) {
specular_color = vec4(0.0, 0.0, 0.0, 1.0);
}
vec4 ambient_color = mat_ambient * light_ambient;
if(flags < 4) {
ambient_color = vec4(0.0, 0.0, 0.0, 1.0);
}
}`;
const fragment_shader = `
precision mediump float;
uniform vec4 light_position;
uniform vec4 light_ambient;
uniform vec4 light_diffuse;
varying vec2 vTexCoords;
varying vec4 obj_loc;
varying vec4 transformed_normal;
uniform sampler2D sampler;
void
main()
{
vec4 texture_sample = texture2D(sampler, vTexCoords);
vec4 l_vec = normalize(light_position - obj_loc);
vec4 n_vec = normalize(transformed_normal);
float lndot = dot(l_vec, n_vec);
float diffuse_scale = max(0.0, lndot);
vec4 diffuse_color = diffuse_scale * light_diffuse * texture_sample;
vec4 ambient_color = light_ambient * texture_sample;
gl_FragColor = diffuse_color + ambient_color;
gl_FragColor.a = 1.0;
}
`;
The variables I am trying to use between both shaders are light_position, light_ambient, and light_diffuse. How would I go about doing this?
The link error is caused by the different precision in the vertex shader and fragment shader. See 4.5 Precision and Precision Qualifiers
From OpenGL ES Shading Language 1.00 Specification - 4.5.3 Default Precision Qualifiers
The vertex language has the following predeclared globally scoped default precision statements:
precision highp float;
The fragment shader has no default precision for float, hence you have specified the default precision mediump:
precision mediump float;
Thus the precision of the floating point uniforms are different in the fragment shader and vertex shader.
Either set an explicit mediump precision for floats in the vertex shader:
const vertex_shader = `
precision mediump float;
...
or change the precision in the fragment shader from medium to high.
Alternatively you can set an explicite precision qualifier to the uniform variables which are used in the vertex shader and fragment shader. For instance:
uniform mediump vec4 light_position;
I need to add two textures on the same mesh. I have an octahedron with a texture and I want to add a second texture above it. How can I achieve this? The first texture is appearing but the second one not. I created two different textures at my .js file and enabled 2 different texture slots for both of them. I did bind the textures and pass the textures slots as a uniform in the fragment shader. I think that my HTML file is wrong so I am attaching the code of the HTML file.
<script id="shader-fs" type="x-shader/x-fragment">
precision mediump float;
varying vec2 vTextureCoord;
varying vec3 vLightWeighting;
uniform sampler2D uSampler;
uniform sampler2D uSampler1;
void main(void) {
vec3 c;
vec4 textureColor = texture2D(uSampler, vec2(vTextureCoord.s, vTextureCoord.t));
vec4 textureColor1 = texture2D(uSampler1, vec2(vTextureCoord.s, vTextureCoord.t));
c = textureColor.rgb * textureColor.a + textureColor1.rgb * textureColor1.a * (1.0 - textureColor.a);
gl_FragColor= vec4(c, 1.0);
}
</script>
<script id="shader-vs" type="x-shader/x-vertex">
attribute vec3 aVertexPosition;
attribute vec3 aVertexNormal;
attribute vec2 aTextureCoord;
uniform mat4 uMVMatrix;
uniform mat4 uPMatrix;
uniform mat3 uNMatrix;
uniform vec3 uLightingDirection;
uniform vec3 uDirectionalColor;
varying vec2 vTextureCoord;
varying vec3 vLightWeighting;
void main(void) {
gl_Position = uPMatrix * uMVMatrix * vec4(aVertexPosition, 0.5);
vTextureCoord = aTextureCoord;
vec3 transformedNormal = uNMatrix * aVertexNormal;
float directionalLightWeighting = max(dot(transformedNormal, uLightingDirection), 0.0);
vLightWeighting = uDirectionalColor * directionalLightWeighting;
}
</script>
I'm working on a graphics engine written in C++. We currently can either build for desktop or for web (using emscripten). Both platforms utilize OpenGL ES 2.0.
I have a vertex and a fragment shader:
Vertex Shader
#version 100
attribute vec4 a_position;
attribute vec2 a_texcoord;
attribute vec3 a_normal;
attribute vec3 a_tangent;
attribute vec3 a_bitangent;
uniform mat4 u_model;
uniform mat4 u_view;
uniform mat4 u_projection;
uniform mat3 u_normalMatrix;
varying vec2 v_texcoord;
varying vec3 v_normal;
varying vec3 v_fragPos;
varying mat3 v_TBN;
void main()
{
vec3 T = normalize(vec3(u_model * vec4(a_tangent, 0.0)));
vec3 B = normalize(vec3(u_model * vec4(a_bitangent, 0.0)));
vec3 N = normalize(vec3(u_model * vec4(a_normal, 0.0)));
v_TBN = mat3(T, B, N);
gl_Position = u_projection * u_view * u_model * a_position;
v_texcoord = a_texcoord;
v_normal = u_normalMatrix * a_normal;
v_fragPos = vec3(u_model * a_position);
};
Fragment Shader
#version 100
precision mediump float;
varying vec2 v_texcoord;
varying vec3 v_normal;
varying vec3 v_fragPos;
varying mat3 v_TBN;
struct Material {
sampler2D diffuse;
sampler2D specular;
sampler2D normal;
float shininess;
};
struct Light {
int type;
vec3 position;
vec3 direction;
vec3 ambient;
vec3 diffuse;
vec3 specular;
float constant;
float linear;
float quadratic;
float cutOff;
float outerCutOff;
};
const int MAX_LIGHTS = 16;
uniform vec3 u_viewPos;
uniform Material u_material;
uniform Light u_lights[MAX_LIGHTS];
uniform int u_lightCount;
uniform bool normalMapping;
vec3 calcDirLight(Light light, vec3 normal, vec3 viewDir);
vec3 calcPointLight(Light light, vec3 normal, vec3 viewDir);
vec3 calcSpotLight(Light light, vec3 normal, vec3 viewDir);
void main()
{
vec3 lightOutput = vec3(0.0);
vec3 norm = normalize(v_normal);
if (normalMapping)
{
norm = texture2D(u_material.normal, v_texcoord).rgb;
norm = normalize(norm * 2.0 - 1.0);
norm = normalize(v_TBN * norm);
}
vec3 viewDir = normalize(u_viewPos - v_fragPos);
for(int i = 0; i < MAX_LIGHTS; i++)
{
if (i >= u_lightCount){break;}
if (u_lights[i].type == 0)
{
lightOutput += calcDirLight(u_lights[i], norm, viewDir)
}
else if (u_lights[i].type == 1)
{
lightOutput += calcPointLight(u_lights[i], norm, viewDir);
}
else if (u_lights[i].type == 2)
{
lightOutput += calcSpotLight(u_lights[i], norm, viewDir);
}
}
gl_FragColor = vec4(lightOutput, 1.0);
}
vec3 calcDirLight(Light light, vec3 normal, vec3 viewDir)
{
vec3 lightDir = normalize(-light.direction);
float diff = max(dot(normal, lightDir), 0.0);
vec3 reflectDir = reflect(-lightDir, normal);
float spec = pow(max(dot(viewDir, reflectDir), 0.0), u_material.shininess);
vec3 ambient = light.ambient * vec3(texture2D(u_material.diffuse, v_texcoord));
vec3 diffuse = light.diffuse * diff * vec3(texture2D(u_material.diffuse, v_texcoord));
vec3 specular = light.specular * spec * vec3(texture2D(u_material.specular, v_texcoord));
return (ambient + diffuse + specular);
}
vec3 calcPointLight(Light light, vec3 normal, vec3 viewDir)
{
vec3 lightDir = normalize(light.position - v_fragPos);
float diff = max(dot(normal, lightDir), 0.0);
vec3 reflectDir = reflect(-lightDir, normal);
float spec = pow(max(dot(viewDir, reflectDir), 0.0), u_material.shininess);
float distance = length(light.position - v_fragPos);
float attenuation = 1.0 / (light.constant + light.linear * distance + light.quadratic * (distance * distance));
vec3 ambient = light.ambient * vec3(texture2D(u_material.diffuse, v_texcoord));
vec3 diffuse = light.diffuse * diff * vec3(texture2D(u_material.diffuse, v_texcoord));
vec3 specular = light.specular * spec * vec3(texture2D(u_material.specular, v_texcoord));
ambient *= attenuation;
diffuse *= attenuation;
specular *= attenuation;
return (ambient + diffuse + specular);
}
vec3 calcSpotLight(Light light, vec3 normal, vec3 viewDir)
{
vec3 lightDir = normalize(light.position - v_fragPos);
float diff = max(dot(normal, lightDir), 0.0);
vec3 reflectDir = reflect(-lightDir, normal);
float spec = pow(max(dot(viewDir, reflectDir), 0.0), u_material.shininess);
float distance = length(light.position - v_fragPos);
float attenuation = 1.0 / (light.constant + light.linear * distance + light.quadratic * (distance * distance));
float theta = dot(lightDir, normalize(light.direction));
float epsilon = light.cutOff - light.outerCutOff;
float intensity = clamp((theta - light.outerCutOff) / epsilon, 0.0, 1.0);
vec3 ambient = light.ambient * vec3(texture2D(u_material.diffuse, v_texcoord));
vec3 diffuse = light.diffuse * diff * vec3(texture2D(u_material.diffuse, v_texcoord));
vec3 specular = light.specular * spec * vec3(texture2D(u_material.specular, v_texcoord));
ambient *= attenuation * intensity;
diffuse *= attenuation * intensity;
specular *= attenuation * intensity;
return (ambient + diffuse + specular);
};
They both compile without errors, but the linking stage fails. The info log (accessed using glGetProgramInfoLog) is empty.
The biggest problem is that it compiles and links perfectly fine on desktop native and desktop browsers (emscripten). It only fails linking on mobile devices.
I've spent hours trying to figure this out, without success. Is there anything obvious i might have missed?
Edit 1: This is the code that builds the shader program:
GLuint GLES2Shader::buildProgram(GLuint vertexShader, GLuint fragmentShader)
{
GL( GLuint programObject = glCreateProgram() );
GL( glAttachShader(programObject, vertexShader) );
GL( glAttachShader(programObject, fragmentShader) );
GL( glLinkProgram(programObject) );
//check if the program linked successfully
GLint linked;
GL( glGetProgramiv(programObject, GL_LINK_STATUS, &linked) );
if (!linked)
{
DEBUG( GLchar infoLog[512] );
DEBUG( glGetProgramInfoLog(programObject, 512, NULL, infoLog) );
DEBUG( LOGE("ERROR::SHADER::LINKING_FAILED %s", infoLog) );
GL( glDeleteProgram(programObject) );
return 0;
}
return programObject;
}
I am using three.js to attempt and learn more about OpenGL. Anyway, I've created a fragment shader which actually works for blending textures with an alphamap but I am having issues with the heightmap portion. If anyone has any ideas please let me know as I am stuck. Oh, I have used this as a reference: http://threejs.org/examples/webgl_terrain_dynamic.html.
<script id="vertexShader" type="x-shader/x-vertex">
uniform sampler2D tDisplacement;
varying vec2 vUv;
void main(void)
{
vUv = uv;
#ifdef VERTEX_TEXTURES
vec3 dv = texture2D( tDisplacement, uv ).xyz;
float df = 300.0 * dv.z + 1.0;
vec3 displacedPosition = normal * df + position;
vec4 worldPosition = modelMatrix * vec4( displacedPosition, 1.0 );
vec4 mvPosition = modelViewMatrix * vec4( displacedPosition, 1.0 );
#else
vec4 worldPosition = modelMatrix * vec4( position, 1.0 );
vec4 mvPosition = modelViewMatrix * vec4( position, 1.0 );
#endif
gl_Position = projectionMatrix * mvPosition;
}
</script>
<script id="fragmentShaderNoise" type="x-shader/x-fragment">
uniform sampler2D Alpha;
uniform sampler2D Grass;
uniform sampler2D Stone;
uniform sampler2D Rock;
uniform sampler2D tDisplacement;
varying vec3 vertex_color;
varying vec2 vUv;
void main(void)
{
vec4 alpha = texture2D( Alpha, vUv.xy);
vec4 tex0 = texture2D( Grass, vUv.xy * 10.0 ); // Tile
vec4 tex1 = texture2D( Rock, vUv.xy * 10.0 ); // Tile
vec4 tex2 = texture2D( Stone, vUv.xy * 10.0 ); // Tile
tex0 *= alpha.r; // Red channel
tex1 = mix( tex0, tex1, alpha.g ); // Green channel
vec4 outColor = mix( tex1, tex2, alpha.b ); // Blue channel
gl_FragColor = outColor;
}
</script>
If a related example would help, I have written a demo that includes a vertex shader that incorporates displacement at:
http://stemkoski.github.io/Three.js/Shader-Fireball.html
Alternatively, perhaps you could start with something simpler, and then work incrementally towards your desired result. For example, I believe that the code below would allow you to change the vertex position according to the red amount of each pixel in the texture bumpTexture.
uniform sampler2D bumpTexture;
varying vec2 vUv;
void main()
{
vUv = uv;
vec4 bumpData = texture2D( bumpTexture, uv );
float displacement = 100.0 * bumpData.r;
vec3 newPosition = position + normal * displacement;
gl_Position = projectionMatrix * modelViewMatrix * vec4( newPosition, 1.0 );
}
Hope this helps!
Is it possible to debug GLSL code or print the variable values from within the glsl code while using it with webgl ? Do three.js or scene.js contain any such functionality?
Not really,
The way I usually debug GLSL is to output colors. So for example, given 2 shaders like
// vertex shader
uniform mat4 worldViewProjection;
uniform vec3 lightWorldPos;
uniform mat4 world;
uniform mat4 viewInverse;
uniform mat4 worldInverseTranspose;
attribute vec4 position;
attribute vec3 normal;
attribute vec2 texCoord;
varying vec4 v_position;
varying vec2 v_texCoord;
varying vec3 v_normal;
varying vec3 v_surfaceToLight;
varying vec3 v_surfaceToView;
void main() {
v_texCoord = texCoord;
v_position = (worldViewProjection * position);
v_normal = (worldInverseTranspose * vec4(normal, 0)).xyz;
v_surfaceToLight = lightWorldPos - (world * position).xyz;
v_surfaceToView = (viewInverse[3] - (world * position)).xyz;
gl_Position = v_position;
}
// fragment-shader
precision highp float;
uniform vec4 colorMult;
varying vec4 v_position;
varying vec2 v_texCoord;
varying vec3 v_normal;
varying vec3 v_surfaceToLight;
varying vec3 v_surfaceToView;
uniform sampler2D diffuseSampler;
uniform vec4 specular;
uniform sampler2D bumpSampler;
uniform float shininess;
uniform float specularFactor;
vec4 lit(float l ,float h, float m) {
return vec4(1.0,
max(l, 0.0),
(l > 0.0) ? pow(max(0.0, h), m) : 0.0,
1.0);
}
void main() {
vec4 diffuse = texture2D(diffuseSampler, v_texCoord) * colorMult;
vec3 normal = normalize(v_normal);
vec3 surfaceToLight = normalize(v_surfaceToLight);
vec3 surfaceToView = normalize(v_surfaceToView);
vec3 halfVector = normalize(surfaceToLight + surfaceToView);
vec4 litR = lit(dot(normal, surfaceToLight),
dot(normal, halfVector), shininess);
gl_FragColor = vec4((
vec4(1,1,1,1) * (diffuse * litR.y
+ specular * litR.z * specularFactor)).rgb,
diffuse.a);
}
If I didn't see something on the screen I'd first change the fragment shader to by just adding a line at the end
gl_FragColor = vec4(1,0,0,1); // draw red
If I started to see my geometry then I'd know the issue is probably in the fragment shader. I might check my normals by doing this
gl_FragColor = vec4(v_normal * 0.5 + 0.5, 1);
If the normals looked okay I might check the UV coords with
gl_FragColor = vec4(v_texCoord, 0, 1);
etc...
You can try WebGL-Inspector for this purpose.