WebGL website looks fine on PC, but looks weird on mobile devices - javascript

So I made a WebGL website and it can be accessed here:
https://jameswebsite.azurewebsites.net/
It looks fine(or as expected) on the PC, but on mobile devices it looks funny. Looks like the texture mapping might be off (maybe texture clipping is the problem) but also there doesn't appear to be any shading occuring.
Here is the screenshot from the PC:
PC Image]1
Here is the screenshot from the Mobile Device:
Mobile Image
I have turned off the textures and still have this problem. This leads me to believe the problem could be in my shaders. Here are my shaders:
<script id="per-fragment-lighting-fs" type="x-shader/x-fragment">
precision mediump float;
varying vec2 vTextureCoord;
varying vec3 vTransformedNormal;
varying vec4 vPosition;
uniform float uMaterialShininess;
uniform bool uShowSpecularHighlights;
uniform bool uUseLighting;
uniform bool uUseTextures;
uniform vec3 uAmbientColor;
uniform vec3 uPointLightingLocation;
uniform vec3 uPointLightingSpecularColor;
uniform vec3 uPointLightingDiffuseColor;
uniform sampler2D uSampler;
void main(void) {
vec3 lightWeighting;
if (!uUseLighting) {
lightWeighting = vec3(1.0, 1.0, 1.0);
} else {
vec3 lightDirection = normalize(uPointLightingLocation - vPosition.xyz);
vec3 normal = normalize(vTransformedNormal);
float specularLightWeighting = 0.0;
if (uShowSpecularHighlights) {
vec3 eyeDirection = normalize(-vPosition.xyz);
vec3 reflectionDirection = reflect(-lightDirection, normal);
specularLightWeighting = pow(max(dot(reflectionDirection, eyeDirection), 0.0), uMaterialShininess);
}
float diffuseLightWeighting = max(dot(normal, lightDirection), 0.0);
lightWeighting = uAmbientColor
+ uPointLightingSpecularColor * specularLightWeighting
+ uPointLightingDiffuseColor * diffuseLightWeighting;
}
vec4 fragmentColor;
if (uUseTextures) {
fragmentColor = texture2D(uSampler, vec2(vTextureCoord.s, vTextureCoord.t));
} else {
fragmentColor = vec4(1.0, 1.0, 1.0, 1.0);
}
gl_FragColor = vec4(fragmentColor.rgb * lightWeighting, fragmentColor.a);
}
<script id="per-fragment-lighting-vs" type="x-shader/x-vertex">
attribute vec3 aVertexPosition;
attribute vec3 aVertexNormal;
attribute vec2 aTextureCoord;
uniform mat4 uMVMatrix;
uniform mat4 uPMatrix;
uniform mat3 uNMatrix;
varying vec2 vTextureCoord;
varying vec3 vTransformedNormal;
varying vec4 vPosition;
void main(void) {
vPosition = uMVMatrix * vec4(aVertexPosition, 1.0);
gl_Position = uPMatrix * vPosition;
vTextureCoord = aTextureCoord;
vTransformedNormal = uNMatrix * aVertexNormal;
}
Any ideas? Thanks in Advance!

On mobile devices, WebGL implementations can be sensitive to things like non-base two texture dimensions, or can require texture wrapping behaviour to be explicitly set.
I noticed your grass texture has dimensions of 590x590. Consider resizing your textures to the closes base two dimensions (ie in the case of your grass texture, 512x512)
Also, I would recommend you explicity set the gl.TEXTURE_WRAP_S and gl.TEXTURE_WRAP_T parameters on your texture object(s) to gl.GL_REPEAT, as this may be another cause for your textures only partially displaying on geometry.
You can set wrapping behaviour on a texture object in the following way:
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.REPEAT);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.REPEAT);

Per #gman:
change mediump to highp in your shaders and see what that does
This actually worked!! Thanks #gman!

Related

Add two textures on the same mesh WEBGL

I need to add two textures on the same mesh. I have an octahedron with a texture and I want to add a second texture above it. How can I achieve this? The first texture is appearing but the second one not. I created two different textures at my .js file and enabled 2 different texture slots for both of them. I did bind the textures and pass the textures slots as a uniform in the fragment shader. I think that my HTML file is wrong so I am attaching the code of the HTML file.
<script id="shader-fs" type="x-shader/x-fragment">
precision mediump float;
varying vec2 vTextureCoord;
varying vec3 vLightWeighting;
uniform sampler2D uSampler;
uniform sampler2D uSampler1;
void main(void) {
vec3 c;
vec4 textureColor = texture2D(uSampler, vec2(vTextureCoord.s, vTextureCoord.t));
vec4 textureColor1 = texture2D(uSampler1, vec2(vTextureCoord.s, vTextureCoord.t));
c = textureColor.rgb * textureColor.a + textureColor1.rgb * textureColor1.a * (1.0 - textureColor.a);
gl_FragColor= vec4(c, 1.0);
}
</script>
<script id="shader-vs" type="x-shader/x-vertex">
attribute vec3 aVertexPosition;
attribute vec3 aVertexNormal;
attribute vec2 aTextureCoord;
uniform mat4 uMVMatrix;
uniform mat4 uPMatrix;
uniform mat3 uNMatrix;
uniform vec3 uLightingDirection;
uniform vec3 uDirectionalColor;
varying vec2 vTextureCoord;
varying vec3 vLightWeighting;
void main(void) {
gl_Position = uPMatrix * uMVMatrix * vec4(aVertexPosition, 0.5);
vTextureCoord = aTextureCoord;
vec3 transformedNormal = uNMatrix * aVertexNormal;
float directionalLightWeighting = max(dot(transformedNormal, uLightingDirection), 0.0);
vLightWeighting = uDirectionalColor * directionalLightWeighting;
}
</script>

webgl 2d blending two transparent textures on top of each other

I am trying to blend two textures with an alpha channel over each other.
After looking through the net it appeared that there are no simple ways to solve this. I tried this trick in the fragment shader:
if(gl_FragColor.a < 0.5){
discard;
}
This works for simpler textures with not a lot of alpha variations, like the human sprite in the background. But I want to be able to work with more complex images like the gradient sprite which doesn't work at all.
This is my fragment shader:
precision mediump float;
varying vec3 fragColor;
varying highp vec2 vTextureCoord;
uniform sampler2D uSampler;
void main()
{
vec4 tex = texture2D(uSampler, vTextureCoord);
gl_FragColor = tex * vec4(fragColor, 1.0);
if(gl_FragColor.a < 0.5){
discard;
}
}'
This is my vertex shader:
precision mediump float;
attribute vec3 vertPosition;
attribute vec3 vertColor;
attribute vec2 aTextureCoord;
varying vec3 fragColor;
varying highp vec2 vTextureCoord;
uniform mat4 uPMatrix;
uniform mat4 uMVMatrix;
uniform vec2 uvOffset;
uniform vec2 uvScale;
void main()
{
fragColor = vertColor;
gl_Position = uPMatrix * uMVMatrix * vec4(vertPosition.x, vertPosition.y, vertPosition.z, 1.0);
vTextureCoord = aTextureCoord * uvScale + uvOffset;
}
This is a part of the gl setup I use:
gl.enable(gl.DEPTH_TEST);
gl.enable(gl.BLEND);
gl.blendEquation(gl.FUNC_ADD);
gl.blendFunc(gl.SRC_ALPHA, gl.ON);
Currently all sprites are being drawn on the same z axis, 0. However I don't know if the is the source of the problem as I tested giving each object a random z value and the problem persisted.
Edit:
In response to Rabbid76's comment.
This works verry well! The alpha is blended but the only problem is that the sprites look "burned":
I tried to alter the fragment shader to this:
<strike>gl_FragColor = tex * vec4(tex.rgb, tex.a);</strike>
But it still looks burned.
Edit 2
I solved it it. gl_FragColor should b:
gl_FragColor = vec4(tex.rgb, tex.a);
and not
gl_FragColor = vec4(fragColor* tex.rgb, tex.a);
otherwise it creates a burn blending effect
Currently all sprites are being drawn on the same z axis, 0.
Since the dept test is enabled (gl.enable(gl.DEPTH_TEST)), and the default depth function (depthFunc) is gl.LESS, the second drawn sprite won't pass the depth test. You have to disable the depth test:
gl.disable(gl.DEPTH_TEST);
Further I recommend to adapt the blend function (blendFunc):
gl.blendFunc(gl.SRC_ALPHA, gl.ONE_MINUS_SRC_ALPHA);
Or you use Alpha premultiplication. Therefore you have to adapt the fragment shader:
gl_FragColor = tex * vec4(fragColor * tex.rgb, tex.a);
And you have to use the following blend function (blendFunc):
gl.blendFunc(gl.ONE, gl.ONE_MINUS_SRC_ALPHA);
Note, you don't need if(gl_FragColor.a < 0.5) discard; any more.

Are uniforms actually defined in THREE.js RawShaderMaterial?

The documentation for the THREE.Js RawShaderMaterial says:
built-in uniforms and attributes are not automatically prepended to the GLSL shader code.
However, in practice I am able to run the following shader, with a rawShaderMaterial:
<script type="x-shader/x-vertex" id="vertexShader">
precision mediump float;
precision mediump int;
uniform mat4 modelViewMatrix;
uniform mat4 projectionMatrix;
attribute vec3 position;
attribute vec4 color;
varying vec3 vPosition;
varying vec4 vColor;
void main() {
vPosition = position;
vColor = color;
gl_Position = projectionMatrix *
modelViewMatrix *
vec4(position.x, position.y, position.z, 1.0);
}
Without defining the modelViewMatrix or projectionMatrix anywhere. Are some uniforms actually are passed to the rawshader material after all?
Yes,
uniform mat4 modelViewMatrix;
uniform mat4 projectionMatrix;
uniform mat4 modelMatrix;
uniform mat4 viewMatrix;
uniform mat3 normalMatrix;
uniform vec3 cameraPosition;
are available to you when using RawShaderMaterial, however you must declare them in your shader if you want to use them. They are not automatically prefixed.
three.js r.73

WebGL Textures Drawn Very Dark

I'm trying to display multiple textured objects using HTML5 and WebGL. The problem is that the textures are being shaded very dark. I believe it has to be something with the way my shaders are being generated or used. I have been using the default shaders from https://developer.mozilla.org/en-US/docs/Web/API/WebGL_API/Tutorial/Lighting_in_WebGL. It works fine when I use one object such as in their example, but if I use two, both objects are drawn very dark. My fragment shader with 4 textures shared between 5 objects looks like this:
varying highp vec2 vTextureCoord;varying highp vec3 vLighting;
uniform sampler2D u_image0;
uniform sampler2D u_image1;
uniform sampler2D u_image2;
uniform sampler2D u_image3;
void main(void){
highp vec4 texelColor0 = texture2D(u_image0, vec2(vTextureCoord.s, vTextureCoord.t));
highp vec4 texelColor1 = texture2D(u_image1, vec2(vTextureCoord.s, vTextureCoord.t));
highp vec4 texelColor2 = texture2D(u_image2, vec2(vTextureCoord.s, vTextureCoord.t));
highp vec4 texelColor3 = texture2D(u_image3, vec2(vTextureCoord.s, vTextureCoord.t));
gl_FragColor =
vec4(texelColor0.rgb * vLighting, texelColor0.a) *
vec4(texelColor1.rgb * vLighting, texelColor1.a) *
vec4(texelColor2.rgb * vLighting, texelColor2.a) *
vec4(texelColor3.rgb * vLighting, texelColor3.a);
}
The fragment shader:
attribute highp vec3 aVertexNormal;
attribute highp vec3 aVertexPosition;
attribute highp vec2 aTextureCoord;
uniform highp mat4 uNormalMatrix;
uniform highp mat4 uMVMatrix;
uniform highp mat4 uPMatrix;
varying highp vec2 vTextureCoord;
varying highp vec3 vLighting;
void main(void) {
gl_Position = uPMatrix * uMVMatrix * vec4(aVertexPosition, 1.0);
vTextureCoord = aTextureCoord;
// Apply lighting effect
highp vec3 ambientLight = vec3(0.5, 0.5, 0.5);
highp vec3 directionalLightColor = vec3(1.0, 1.0, 1.0);
highp vec3 directionalVector = vec3(1, 2.0, 2.0);
highp vec4 transformedNormal = uNormalMatrix * vec4(aVertexNormal, 1.0);
highp float directional = max(dot(transformedNormal.xyz, directionalVector), 0.0);
vLighting = ambientLight + (directionalLightColor * directional);
}
I also call this at the start of each draw cycle:
gl.clearColor(255.0, 255.0, 255.0, 1.0);
gl.clearDepth(1.0); // Clear everything
gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
My canvas draws something like this, it is very light on some parts of the objects and very dark on others. What can I do to create an evenly distributed but "normal" looking object with no "glare" but more "clear" looking textures?
Here is a link to what my scene looks like:
http://i.imgur.com/S9fwrEm.png
It seems that the problem was with the gl_fragColor calculation. I thought that using multiple textures you were to multiply each together. However this would make sense that multiplying the current texture by the others not in use would darken the currently drawn textures. If you only use something like:
gl_fragColor =
vec4(texelColor0.rgb * vLighting, texelColor0.a);
}
Then it is drawn fine. However, this doesn't seem proper since I am using one fragColor of one texture for each texture drawn. If anyone has insight as to how to change fragColors based on the current texture being used, then please leave another answer, Thanks!
The color is off because you are blending in 4 different "colors" from 4 different texture in your fragment shader. Of course the result will be wrong. The way your doing it is not how you draw multiple models. If you are serious about this you should go find some tutorials on webGL.

Debug GLSL code in webgl

Is it possible to debug GLSL code or print the variable values from within the glsl code while using it with webgl ? Do three.js or scene.js contain any such functionality?
Not really,
The way I usually debug GLSL is to output colors. So for example, given 2 shaders like
// vertex shader
uniform mat4 worldViewProjection;
uniform vec3 lightWorldPos;
uniform mat4 world;
uniform mat4 viewInverse;
uniform mat4 worldInverseTranspose;
attribute vec4 position;
attribute vec3 normal;
attribute vec2 texCoord;
varying vec4 v_position;
varying vec2 v_texCoord;
varying vec3 v_normal;
varying vec3 v_surfaceToLight;
varying vec3 v_surfaceToView;
void main() {
v_texCoord = texCoord;
v_position = (worldViewProjection * position);
v_normal = (worldInverseTranspose * vec4(normal, 0)).xyz;
v_surfaceToLight = lightWorldPos - (world * position).xyz;
v_surfaceToView = (viewInverse[3] - (world * position)).xyz;
gl_Position = v_position;
}
// fragment-shader
precision highp float;
uniform vec4 colorMult;
varying vec4 v_position;
varying vec2 v_texCoord;
varying vec3 v_normal;
varying vec3 v_surfaceToLight;
varying vec3 v_surfaceToView;
uniform sampler2D diffuseSampler;
uniform vec4 specular;
uniform sampler2D bumpSampler;
uniform float shininess;
uniform float specularFactor;
vec4 lit(float l ,float h, float m) {
return vec4(1.0,
max(l, 0.0),
(l > 0.0) ? pow(max(0.0, h), m) : 0.0,
1.0);
}
void main() {
vec4 diffuse = texture2D(diffuseSampler, v_texCoord) * colorMult;
vec3 normal = normalize(v_normal);
vec3 surfaceToLight = normalize(v_surfaceToLight);
vec3 surfaceToView = normalize(v_surfaceToView);
vec3 halfVector = normalize(surfaceToLight + surfaceToView);
vec4 litR = lit(dot(normal, surfaceToLight),
dot(normal, halfVector), shininess);
gl_FragColor = vec4((
vec4(1,1,1,1) * (diffuse * litR.y
+ specular * litR.z * specularFactor)).rgb,
diffuse.a);
}
If I didn't see something on the screen I'd first change the fragment shader to by just adding a line at the end
gl_FragColor = vec4(1,0,0,1); // draw red
If I started to see my geometry then I'd know the issue is probably in the fragment shader. I might check my normals by doing this
gl_FragColor = vec4(v_normal * 0.5 + 0.5, 1);
If the normals looked okay I might check the UV coords with
gl_FragColor = vec4(v_texCoord, 0, 1);
etc...
You can try WebGL-Inspector for this purpose.

Categories