samplerCube in struct cause GL_INVALID_OPERATION - javascript

I have the following code in glsl:
// snippet 1, works well
uniform vec4 uDiffuse;
uniform sampler2D uDiffuseMap;
uniform vec4 uSpecular;
uniform sampler2D uSpecularMap;
uniform float uShininess;
uniform samplerCube uEnvironmentMap;
// snippet 2, not work
struct PhongMaterial {
vec4 diffuse;
sampler2D diffuseMap;
vec4 specular;
sampler2D specularMap;
float shininess;
samplerCube environmentMap; // works well if I remove this line.
};
But it throw the following error:
[.WebGL-0x7fabfe002e00]RENDER WARNING: there is no texture bound to the unit 0
[.WebGL-0x7fabfe002e00]GL ERROR :GL_INVALID_OPERATION : GetShaderiv: <- error from previous GL command
[.WebGL-0x7fabfe002e00]GL ERROR :GL_INVALID_OPERATION : GLES2DecoderImpl::DoBindTexImage2DCHROMIUM: <- error from previous GL command
[.WebGL-0x7fabfe002e00]GL ERROR :GL_INVALID_OPERATION : glFramebufferTexture2D: <- error from previous GL command
[.WebGL-0x7fabfe002e00]GL ERROR :GL_INVALID_OPERATION : GLES2DecoderImpl::DoBindTexImage2DCHROMIUM: <- error from previous GL command
WebGL: too many errors, no more errors will be reported to the console for this context.
Here is an example:
https://codepen.io/scarletsky/pen/KEgBzx?editors=1010
What I want to do is to implement a shader which can receive sampler2D and samplerCube. When no samplerCube passed in the shader, it will throw error.
I have no idea what to do next. Can anyone help me ?

Your real error is likely some where else and related to you not binding your textures correctly or looking up the wrong locations or something else
RENDER WARNING: there is no texture bound to the unit 0
Here's a working example with your uniform structure
const fs = `
precision mediump float;
struct PhongMaterial {
vec4 diffuse;
sampler2D diffuseMap;
vec4 specular;
sampler2D specularMap;
float shininess;
samplerCube environmentMap;
};
uniform PhongMaterial material;
void main() {
vec4 diffuse = texture2D(material.diffuseMap, gl_PointCoord.xy);
vec4 specular = texture2D(material.specularMap, gl_PointCoord.xy);
vec4 cube = textureCube(
material.environmentMap,
vec3(gl_PointCoord.xy, gl_PointCoord.x * gl_PointCoord.y) * 2. - 1.);
// use all 3 textures so we can see they were set
vec4 diffuseOrSpecular = mix(diffuse, specular, step(0.25, gl_PointCoord.y));
gl_FragColor = mix(diffuseOrSpecular, cube, step(0.5, gl_PointCoord.y));
}
`
const vs = `
void main() {
gl_Position = vec4(0, 0, 0, 1);
gl_PointSize = 128.0;
}
`;
const gl = document.querySelector('canvas').getContext('webgl');
const prg = twgl.createProgram(gl, [vs, fs]);
const diffuseLocation = gl.getUniformLocation(prg, 'material.diffuseMap');
const specularLocation = gl.getUniformLocation(prg, 'material.specularMap');
const envmapLocation = gl.getUniformLocation(prg, 'material.environmentMap');
const texDiffuse = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, texDiffuse);
{
const level = 0;
const format = gl.RGBA;
const width = 1;
const height = 1;
const type = gl.UNSIGNED_BYTE;
const pixel = new Uint8Array([255, 255, 0, 255]); // yellow
gl.texImage2D(gl.TEXTURE_2D, level, format, width, height, 0, format, type, pixel);
}
const texSpecular = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, texSpecular);
{
const level = 0;
const format = gl.RGBA;
const width = 1;
const height = 1;
const type = gl.UNSIGNED_BYTE;
const pixel = new Uint8Array([0, 0, 255, 255]); // blue
gl.texImage2D(gl.TEXTURE_2D, level, format, width, height, 0, format, type, pixel);
}
const texCube = gl.createTexture();
gl.bindTexture(gl.TEXTURE_CUBE_MAP, texCube);
for (let i = 0; i < 6; ++i) {
const level = 0;
const format = gl.RGBA;
const width = 1;
const height = 1;
const type = gl.UNSIGNED_BYTE;
const pixel = new Uint8Array([(i & 1) * 255, (i & 2) * 255, (i & 4) * 255, 255]);
gl.texImage2D(gl.TEXTURE_CUBE_MAP_POSITIVE_X + i, level, format, width, height, 0, format, type, pixel);
}
gl.useProgram(prg);
// put the yellow diffuse texture on texture unit 0
gl.activeTexture(gl.TEXTURE0 + 0);
gl.bindTexture(gl.TEXTURE_2D, texDiffuse);
// use texture on texture unit 0
gl.uniform1i(diffuseLocation, 0);
// put the blue specular texture on texture unit 1
gl.activeTexture(gl.TEXTURE0 + 1);
gl.bindTexture(gl.TEXTURE_2D, texSpecular);
// tell the specular sampler to use texture unit 1
gl.uniform1i(specularLocation, 1);
// put the cubemap on texture unit 2
gl.activeTexture(gl.TEXTURE0 + 2);
gl.bindTexture(gl.TEXTURE_CUBE_MAP, texCube);
// tell the cubemap sampler to use texture unit 2
gl.uniform1i(envmapLocation, 2);
// draw one 128x128 pixel point
gl.drawArrays(gl.POINTS, 0, 1);
<script src="https://twgljs.org/dist/4.x/twgl.min.js"></script>
<canvas></canvas>
NOTE: you are required to supply a valid texture for every sampler uniform your shader claims is being used regardless of whether or not it is actually being used.
To find out if the shader claims it's being used call
gl.getUniformLocation(program, nameOfSamplerUniform);
If it returns non-null then AFAIK as WebGL is concerned you must supply a valid texture for that sampler.
If you don't actually need one (because of conditionals or something) then keep around a 1 pixel texture for 2D or 6 pixel texture, 1 pixel per face for cube maps and attach that texture when you don't need a specific texture.
For these cases I usually keep a white and/or black texture around. For example let's say I had math like
color = diffuseMapColor * diffuseColor + envMapColor;
If I only want diffuseColor then I can set diffuseMapColor to white and envMapColor to black which is effectively
color = 1 * diffuseColor + 0;
Similarly of I only want diffuseMapColor I can set diffuseColor to white and envMapColor to black and get
color = diffuseMapColor * 1 + 0;
and if I only want envMapColor then setting diffuseColor to 0 will work
color = diffuseMapColor * 0 + envMapColor;
is the same as
color = 0 + envMapColor;
On the other hand, most 3D engines would generate a different shaders for these cases. If no environment map is used they'd generate a shader that doesn't include an environment map. This is because generally doing less work in a shader is faster than doing more so a good 3D engine generates shaders for each case of what is needed.

Related

Prevent texture rounding error for pixel graphics

To prevent XY-problems, I'll describe the problem, followed by my current idea for a solution. Given this has to have been solved a thousand times by other people, I need a sanity check, whether I am running down the completely wrong path, or not.
In 2D pixel graphics (GL_NEAREST), when translating by half of a screen pixel, rounding behavior determines the color of a fragment. This means frequently choosing the wrong texel.
From my understanding, in the following image:
On the left side, fragments' UV-coordinates will be exactly between texels (by construction, UVs selecting size of the quad pixels from the texture).
The rasterizer won't include some fragments (e.g. left side, red stripes, not shown on the right), due to conflict rules for seamless connections between triangles.
Default rounding will only be correct, if UV-space up is also screen-space up. This fails immediately when rotating, see the right side.
E.g. for a y-flipped texture, the selected texels are one row off. Clamping to prevent texture bleeds would result in one row being used twice, and one missingĀ¹. When freely rotating sprites, these errors may occur for certain rows or columns.
I'm aware of two common approaches, which both seem to fail:
Rounding vertices to pixel-corners, which I don't see working when sprites can rotate.
Having transparent border pixels in the texture-atlas. This is a somewhat dirty patch, and has costs. It destroys power-of-two sized textures (unless increasing the size by four times), and required border-size depends on mipmap-level.
For 2D pixel-graphics, almost everything is a textured quad. Given this, my current idea is to render each quad slightly larger (to have both side's fragments included in the problem-case). I'd then manually round to middle of texels in the fragment-shader, and drop fragments, which would be outside of the intended texture.
Is this a bad idea? How do other people solve this?
(1): Example webgl code always needs a lot of boilerplate. The problem imho does not require a demonstration, but people do love clicking things which display something.
The following has y-flipped UVs, and clamps in the fragment-shader. To be able to see texels selected, a colorful texture is generated:
The entire thing is on 8x8 pixels, using a quad constructed to be 5x5, upscaled for ease-of-view.
const assert = (condition, message) => {
if (!condition) throw new Error(message);
};
const canvas = document.getElementById('canvas');
const gl = canvas.getContext('webgl2', { antialias: false });
assert(gl !== null, `WebGL2 was unexpectedly not supported.`);
const distribution = (x) => Math.E ** (-((1.6 * x) ** 2));
const rollover = 11;
const getColors = (i) => [0, 1, 2].map(e => Math.round(0xff * distribution((((e + 0.5) + (i % rollover) / rollover * 3) % 3) - 1.5)));
const textureSize = 8;
const testImage = new Uint8Array(Array.from(
{ length: textureSize * textureSize },
(_, i) => [...getColors(i), 0xff],
).flat());
const positionPerPixel = 2 / 8;
const selectionInPixels = 5;
const offsetY = -0.5;
const vertices = [
[0, 0 + offsetY],
[selectionInPixels, 0 + offsetY],
[selectionInPixels, selectionInPixels + offsetY],
[0, selectionInPixels + offsetY],
].map(coordPair => coordPair.map(coord => (coord - 2) * positionPerPixel));
const pixelOffset = 2;
const normalizedCoordPerPixel = 1 / textureSize;
const textureCoords = [
[0, selectionInPixels],
[selectionInPixels, selectionInPixels],
[selectionInPixels, 0],
[0, 0],
].map(coordPair => coordPair.map(coord => (coord + pixelOffset) * normalizedCoordPerPixel));
const vData = new Float32Array(vertices.map((e, i) => [...e, ...textureCoords[i]]).flat());
const indices = new Uint16Array([0, 1, 2, 0, 2, 3]);
const texture = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, 8, 8, 0, gl.RGBA, gl.UNSIGNED_BYTE, testImage);
const vao = gl.createVertexArray();
gl.bindVertexArray(vao);
gl.enableVertexAttribArray(0);
gl.enableVertexAttribArray(1);
const vBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, vBuffer);
gl.vertexAttribPointer(0, 2, gl.FLOAT, false, 16, 0);
gl.vertexAttribPointer(1, 2, gl.FLOAT, false, 16, 8);
gl.bufferData(gl.ARRAY_BUFFER, vData, gl.STATIC_DRAW);
const iBuffer = gl.createBuffer();
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, iBuffer);
gl.bufferData(gl.ELEMENT_ARRAY_BUFFER, indices, gl.STATIC_DRAW);
const vertexShaderSrc = `#version 300 es
precision highp float;
layout(location = 0) in vec2 aPosition;
layout(location = 1) in vec2 aTextureCoord;
out vec2 vTextureCoord;
void main(void) {
gl_Position = vec4(aPosition, 0.0, 1.0);
vTextureCoord = aTextureCoord;
}`;
const maxTextureCoord = Math.max(...textureCoords.flat());
const minTextureCoord = Math.min(...textureCoords.flat());
const fragmentShaderSrc = `#version 300 es
precision highp float;
uniform sampler2D sampler;
in vec2 vTextureCoord;
out vec4 fColor;
void main(void){
vec2 clamped = vec2(
clamp(vTextureCoord.x, ${minTextureCoord} + 0.01, ${maxTextureCoord} - 0.01),
clamp(vTextureCoord.y, ${minTextureCoord} + 0.01, ${maxTextureCoord} - 0.01)
);
fColor = texture(sampler, clamped);
}`;
const program = gl.createProgram();
assert(program !== null, `Program was unexpectedly \`null\`.`);
const vertexShader = gl.createShader(gl.VERTEX_SHADER);
assert(vertexShader !== null, `Vertex-shader was unexpectedly \`null\`.`);
gl.shaderSource(vertexShader, vertexShaderSrc);
gl.compileShader(vertexShader);
assert(gl.getShaderParameter(vertexShader, gl.COMPILE_STATUS), `Vertex-shader failed to compile:\n${gl.getShaderInfoLog(vertexShader)}`);
const fragmentShader = gl.createShader(gl.FRAGMENT_SHADER);
assert(fragmentShader !== null, `Vertex-shader was unexpectedly \`null\`.`);
gl.shaderSource(fragmentShader, fragmentShaderSrc);
gl.compileShader(fragmentShader);
assert(gl.getShaderParameter(fragmentShader, gl.COMPILE_STATUS), `Fragment-shader failed to compile:\n${gl.getShaderInfoLog(fragmentShader)}`);
gl.attachShader(program, vertexShader);
gl.attachShader(program, fragmentShader);
gl.linkProgram(program);
assert(gl.getProgramParameter(program, gl.LINK_STATUS), `Program linking failed:\n${gl.getProgramInfoLog(program)}`);
gl.useProgram(program);
const uniformLocationSampler = gl.getUniformLocation(program, 'sampler');
gl.uniform1i(uniformLocationSampler, 0);
gl.clearColor(0, 0, 0, 1.0);
gl.clear(gl.COLOR_BUFFER_BIT);
gl.drawElements(gl.TRIANGLES, 6, gl.UNSIGNED_SHORT, 0);
#canvas {
width: 256px;
height: 256px;
image-rendering: pixelated;
}
<canvas id='canvas' width='8' height='8'></canvas>
The code is a slightly modified copy&paste from a related problem (limited subtexel precision leading to UVs snapping to exactly half-way between texels). Therefore, some oddities may still be left in there.

Is passing just the length of the sides of a triangle, sufficient to draw a triangle in WebGL?

The question may not be clear, but I am gonna clear it here. Let's consider an array which have th co-ordinates of a triangle in clip space :
var coor = [
-0.4, 0.6,
0.0, -0.5,
-0.5, 0.0,
0.5, -0.5,
0.0
]
So, the coor array will help us to draw a triangle using WebGL. But instead of it I want to go something like this :
var coor2 = [ 100, 100, 100 ]
In the coor2 I gave the measure of sides to draw a triangle. It will be an equilateral trinagle. So, can I do something such that I enter the three sides of a triangle and a script converts them to co-ordinate which can be cliped in the clip space and can be read by WebGL ?
Thanks In Advance
WebGL only cares that you set gl_Position to clip space values. It doesn't care how you do it. It's up to you to decide how to do that. Inside the vertex shader, at least in WebGL1, things are stateless. Meaning when you draw a triangle your vertex shader will be called 3 times. It needs to set gl_Position to 3 different values in clipspace with no state between calls. So given 100 three times, how would you compute a different value each time your vertex shader is called?
Just imagine it in JavaScript
const gl_Position1 = vertexShader(100);
const gl_Position2 = vertexShader(100);
const gl_Position3 = vertexShader(100);
How is the function vertexShader supposed to produce 3 different values with no other input? Unless there is some other state it can't produce a different value. vertex shaders in WebGL1 don't have any other state.
You need to pass in some data that changes every iteration.
Of course you if the values changed then you could produce a triangle. Example
function vertexShader(v) {
const angle = v / 3 * Math.PI * 2;
return [Math.cos(angle), Math.sin(angle), 0, 1];
}
const gl_Position1 = vertexShader(0);
const gl_Position2 = vertexShader(1);
const gl_Position3 = vertexShader(2);
Would produce a triangle. Converting that to GLSL and WebGL
const gl = document.querySelector('canvas').getContext('webgl');
const vs = `
attribute float v;
#define PI radians(180.0)
void main() {
float angle = v / 3.0 * PI * 2.0;
gl_Position = vec4(cos(angle), sin(angle), 0, 1);
}
`;
const fs = `
precision mediump float;
void main() {
gl_FragColor = vec4(1, 0, 0, 1);
}
`;
const program = twgl.createProgram(gl, [vs, fs]);
const vLocation = gl.getAttribLocation(program, 'v');
const buf = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, buf);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([0, 1, 2]), gl.STATIC_DRAW);
gl.enableVertexAttribArray(vLocation);
gl.vertexAttribPointer(vLocation, 1, gl.FLOAT, false, 0, 0);
gl.useProgram(program);
gl.drawArrays(gl.TRIANGLES, 0, 3);
canvas { border: 1px solid black; }
<script src="https://twgljs.org/dist/4.x/twgl.min.js"></script>
<canvas></canvas>
Of course given we only passed in [0, 1, 2] we can't easily specifiy a position but the point is at least now that we have a value that changes we can produce a triangle which is just to re-iterate WebGL only cares that you set gl_Position to clip space values. It doesn't care how you do it.
See this article for more

How to make a texture move on y-axis and x-axis in WebGL

For this project, I am trying to move a texture (an image of raindrops sent as a 1024x2048 png) down my screen, before looping the pixels back up to the top of the screen once they reach the bottom of the canvas. Once the pixels reach the top, I also want to move it to the right a little bit, and after several iterations, it will reach the leftmost side of the canvas, and then it is sent to the rightmost side of the canvas. However, I can't seem to get it to work correctly. As my knowledge of WebGL is quite limited, I am not sure what I am doing wrong, or what the right questions that I should be asking are. I am pretty sure that it is an issue with how I am trying to move the pixels, but I don't know what I should be doing instead.
Here is my vertex shader:
<script id="vertex-shader1" type="x-shader/x-vertex">
attribute vec4 vPosition;
attribute vec2 vTexCoord;
varying vec2 fTexCoord;
void main()
{
float y= vTexCoord.y - float(0.01);
float x= vTexCoord.x;
if(y < float(0.0)) {
y = float(1.0);
//x = fTexCoord.x + float(0.1) ;
}
/* if(x > float(1.0)){
x = float(0.0);
}
*/
gl_Position = vPosition;
fTexCoord= vec2(x,y);
}
</script>
And here is my fragment shader:
<script id="fragment-shader1" type="x-shader/x-fragment">
precision mediump float;
uniform sampler2D texture1;
varying vec2 fTexCoord;
void
main()
{
// gl_FragColor = texture2D(texture1, vec2(x,y));
gl_FragColor= texture2D(texture1, fTexCoord);
}
</script>
The above shaders are supposed to make the raindrops from the image (linked below) move down the screen. raindrop.png
But instead, it just stretches the raindrops out. (It does this: messedup_raindrops.png)
Any ideas on how to fix this?
I sounds like you want to "scroll" a texture. To do that you can just pass in an offset to the texture coords. You can do it in either the fragment or vertex shader
Effectively
uniform vec2 offset;
vec2 newTexCoords = texCoords + offset;
And then just change offset over time. Offset values generally only need to be in the range of 0 to 1 since past that things will just repeat.
Example:
const vs = `
attribute vec4 position;
attribute vec2 texcoord;
uniform vec2 offset;
varying vec2 v_texcoord;
void main() {
gl_Position = position;
v_texcoord = texcoord + offset;
}
`;
const fs = `
precision mediump float;
varying vec2 v_texcoord;
uniform sampler2D tex;
void main() {
gl_FragColor = texture2D(tex, v_texcoord);
}
`;
const gl = document.querySelector('canvas').getContext('webgl');
// compile shaders, link program, lookup locations
const programInfo = twgl.createProgramInfo(gl, [vs, fs]);
const bufferInfo = twgl.createBufferInfoFromArrays(gl, {
position: {
numComponents: 2,
data: [
-1, -1,
1, -1,
-1, 1,
-1, 1,
1, -1,
1, 1,
],
},
texcoord: {
numComponents: 2,
data: [
0, 1,
1, 1,
0, 0,
0, 0,
1, 1,
1, 0,
],
},
});
const texture = twgl.createTexture(gl, {
src: 'https://i.imgur.com/ZKMnXce.png',
crossOrigin: 'anonymous',
});
function render(time) {
time *= 0.001; // convert to seconds
gl.useProgram(programInfo.program);
// bind buffers and set attributes
twgl.setBuffersAndAttributes(gl, programInfo, bufferInfo);
// set uniforms and bind textures
twgl.setUniforms(programInfo, {
tex: texture,
offset: [(time * .5) % 1, (time * .6) % 1],
});
const count = 6;
gl.drawArrays(gl.TRIANGLES, 0, count);
requestAnimationFrame(render);
}
requestAnimationFrame(render);
canvas { border: 1px solid black; }
<canvas></canvas>
<script src="https://twgljs.org/dist/4.x/twgl.js"></script>
If you want something more flexible I'd suggest you use a matrix for your texture coords
uniform mat4 texMatrix;
vec2 newTexCoords = (texMatrix * vec4(texCoords, 0, 1)).xy;
Which will let you manipulate the texture coords in all the same way vertex coordinates are often manipulated which means you'll be able to scale the texture, rotate the texture, skew the texture, etc.... You can see an example of using a texture matrix here

How to render binary data on canvas using WebGl?

I am using PNaCl ffmpeg to open, read and decode RTSP stream. I am now having raw video frames which I need to transfer to WebGl to render on the canvas.
How can I render binary data on the canvas?
I am running the following code: I presume that I should get a grey canvas after running this code, because I am passing RGBA values of (120,120,120,1) to the synthetic data.
var canvas = document.getElementById('canvas');
var gl = initWebGL(canvas); //function initializes webgl
initViewport(gl, canvas); //initializes view port
console.log('viewport initialized');
var data = [];
for (var i = 0 ; i < 256; i++){
data.push(120,120,120,1.0);
}
console.log(data);
var pixels = new Uint8Array(data); // 16x16 RGBA image
var texture = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.texImage2D(
gl.TEXTURE_2D, // target
0, // mip level
gl.RGBA, // internal format
16, 16, // width and height
0, // border
gl.RGBA, //format
gl.UNSIGNED_BYTE, // type
pixels // texture data
);
console.log('pixels');
console.log(pixels);
<canvas id="canvas"></canvas>
I should get a grey 16x16 box being represented on the canvas, but I am not getting that. What additional steps do I need to take to correctly render the 2D bitmap on the canvas?
PS. I am taking help from this article.
Console output:
As pointed out in the comments, alpha in WebGL in the type of texture you're creating is 0 to 255. You're putting in 1.0 which = 1/255 or an alpha of 0.004
But on top of that you say
I am running the following code: I presume that I should get a grey canvas after running this code
That code is not enough for WebGL. WebGL requires you to supply a vertex shader and fragment shader, vertex data for vertices and then call either gl.drawArrays or gl.drawElements to render something. The code you provided doesn't do those things and without those things we can't tell what else you're doing.
You're also only supplying mip level 0. You either need to supply mips or set texture filtering so only the first level is used otherwise the texture is unrenderable (you'll get a warning about it in the JavaScript console of most browsers).
Here's a working example
var canvas = document.getElementById('canvas');
var gl = canvas.getContext("webgl");
var data = [];
for (var i = 0 ; i < 256; i++){
data.push(120,120,120,255);
}
var pixels = new Uint8Array(data); // 16x16 RGBA image
var texture = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.texImage2D(
gl.TEXTURE_2D, // target
0, // mip level
gl.RGBA, // internal format
16, 16, // width and height
0, // border
gl.RGBA, //format
gl.UNSIGNED_BYTE, // type
pixels // texture data
);
gl.generateMipmap(gl.TEXTURE_2D); // you need to do this or set filtering
// compiles and links the shaders and looks up uniform and attribute locations
var programInfo = twgl.createProgramInfo(gl, ["vs", "fs"]);
var arrays = {
position: [
-1, -1, 0,
1, -1, 0,
-1, 1, 0,
-1, 1, 0,
1, -1, 0,
1, 1, 0,
],
};
// calls gl.createBuffer, gl.bindBuffer, gl.bufferData for each array
var bufferInfo = twgl.createBufferInfoFromArrays(gl, arrays);
var uniforms = {
u_texture: texture,
};
gl.useProgram(programInfo.program);
// calls gl.bindBuffer, gl.enableVertexAttribArray, gl.vertexAttribPointer
twgl.setBuffersAndAttributes(gl, programInfo, bufferInfo);
// calls gl.activeTexture, gl.bindTexture, gl.uniformXXX
twgl.setUniforms(programInfo, uniforms);
// calls gl.drawArrays or gl.drawElements
twgl.drawBufferInfo(gl, gl.TRIANGLES, bufferInfo);
canvas { border: 1px solid black; }
<script id="vs" type="notjs">
attribute vec4 position;
varying vec2 v_texcoord;
void main() {
gl_Position = position;
// Since we know we'll be passing in -1 to +1 for position
v_texcoord = position.xy * 0.5 + 0.5;
}
</script>
<script id="fs" type="notjs">
precision mediump float;
uniform sampler2D u_texture;
varying vec2 v_texcoord;
void main() {
gl_FragColor = texture2D(u_texture, v_texcoord);
}
</script>
<script src="https://twgljs.org/dist/twgl.min.js"></script>
<canvas id="canvas"></canvas>

Trying to get webGL (2d) drawing to work. Can't see my triangles

I'm trying to draw a simple quad via webgl. Here is some data:
vertex data: (2 floats per vertex)
0: 0
1: 0
2: 1
3: 0
4: 1
5: 1
6: 0
7: 1
tex coords: (doesnt really matter)
0: -1
1: 1
2: 1
3: 1
4: 1
5: -1
6: -1
7: -1
Indices:
0: 3
1: 0
2: 1
3: 3
4: 1
5: 2
Shaders:
<script id="shader-fs" type="x-shader/x-fragment">
varying highp vec2 vTextureCoord;
uniform highp vec3 uColor;
uniform sampler2D uSampler;
uniform int uSamplerCount;
void main(void) {
highp vec4 texColor =vec4(uColor, 1.0);
if(uSamplerCount > 0)
texColor = texture2D(uSampler, vTextureCoord);
gl_FragColor = vec4(1.0, 1.0, 1.0, 1.0); //just white for now
}
</script>
<!-- Vertex shader program -->
<script id="shader-vs" type="x-shader/x-vertex">
attribute highp vec2 aVertexPosition;
attribute highp vec2 aTextureCoord;
uniform highp vec2 uPosition;
uniform highp float uZLayer;
varying highp vec2 vTextureCoord;
void main(void) {
gl_Position = vec4(uPosition + aVertexPosition, uZLayer, 1.0);
vTextureCoord = aTextureCoord;
}
</script>
To me this seems all right. Now here I'm binding the buffers:
$p.UpdateBuffers = function ConvexSprite_UpdateBuffers() {
this.gl.bindBuffer(34962 /* WebGLRenderingContext.ARRAY_BUFFER */, this.bVertexPositions);
this.gl.bufferData(34962 /* WebGLRenderingContext.ARRAY_BUFFER */, this.rotatedPoints, 35044 /* WebGLRenderingContext.STATIC_DRAW */);
// string s = rotatedPoints.join(",");
// System.Console.WriteLine("Vertices: " + s);
this.gl.bindBuffer(34962 /* WebGLRenderingContext.ARRAY_BUFFER */, this.bTextureCoords);
this.gl.bufferData(34962 /* WebGLRenderingContext.ARRAY_BUFFER */, this.texCoords, 35044 /* WebGLRenderingContext.STATIC_DRAW */);
//System.Console.WriteLine("Texcoords: " + texCoords.join(","));
this.gl.bindBuffer(34963 /* WebGLRenderingContext.ELEMENT_ARRAY_BUFFER */, this.bIndices);
this.gl.bufferData(34963 /* WebGLRenderingContext.ELEMENT_ARRAY_BUFFER */, this.triangles, 35044 /* WebGLRenderingContext.STATIC_DRAW */);
};
And this is how I'm drawing:
private void DrawScene()
{
gl.viewport(0, 0, (int)canvas.width, (int)canvas.height);
gl.clear(GL.COLOR_BUFFER_BIT | GL.DEPTH_BUFFER_BIT);
testSprite.Draw(aVertexPosition, aTextureCoord, uColor, uPosition, uZLayer, uSampler, uSamplerCount);
}
draw function:
public void Draw(uint aVertexPosition, uint aTextureCoord,
WebGLUniformLocation uColor, WebGLUniformLocation uPosition, WebGLUniformLocation uZLayer, WebGLUniformLocation uSampler, WebGLUniformLocation uSamplerCount)
{
//position
gl.uniform2f(uPosition, this.position.x, this.position.y);
gl.uniform1f(uZLayer, this.position.z);
gl.uniform3f(uColor, 1f, 0.5f, 0.5f);
gl.uniform1i(uSamplerCount, 0);
//vertex data
gl.bindBuffer(GL.ARRAY_BUFFER, bVertexPositions);
gl.vertexAttribPointer(aVertexPosition, 2, GL.FLOAT, false, 0, 0);
gl.bindBuffer(GL.ARRAY_BUFFER, bTextureCoords);
gl.vertexAttribPointer(aTextureCoord, 2, GL.FLOAT, false, 0, 0);
gl.bindBuffer(GL.ELEMENT_ARRAY_BUFFER, bIndices);
//texture
// gl.activeTexture(GL.TEXTURE0);
// gl.bindTexture(GL.TEXTURE_2D, texture);
// gl.uniform1i(uSampler, 0);
//draw
gl.drawElements(GL.TRIANGLES, triangles.length, GL.UNSIGNED_SHORT, 0);
}
Unfortunately all I'm getting is a black screen. The triangles are ccw, and in the 0-1 position range. Shouldn't this draw something to the screen ? What am I forgetting here ?
What values are this.position.x and this.position.y? Given you have a 0,1 quad the best you're going to be able to do with this shader and those vertex coordinates is draw a quad on 1/4th of the canvas. The values you need to output have to be in clipspace (-1 <-> +1).
To convert from pixels to clipspace
clipX = pixelX / gl.canvas.width * 2 - 1
clipY = pixelY / gl.canvas.height * 2 - 1
for width and height in clipspace from pixels
clipWidth = pixelWidth / gl.canvas.width * 2
clipHeight = pixelHeight / gl.canvas.height * 2
The first thing I do when debugging a WebGL program that is not drawing is change the fragment shader to the simplest thing possible
gl_FragColor = vec4(1,0,0,1); return;
If I see red where I expect it I know the issue is in the fragcoord. If I don't I know the issue is somewhere else.
Next I might use the WebGL Inspector to see what values I'm passing in the uniforms. For example if this.position.x or this.position.y is greater than 1 or less than -2 then nothing is going to appear because your quad is 0 to 1 so 0 + 1 = 1 which means your quad will be off the right side or top side of the canvas. 1 + -2 = -1 in which case the quad will be off the left or bottom side of the canvas.
Some other things. You set it draw in white gl_FragColor = vec4(1,1,1,1); but the default webpage color is white and the default clear color is 0,0,0,0 (transparent black) which means unless you either (a) made a canvas with no alpha (b) set the clear color somewhere not show or (c) set the CSS for the page or the canvas to a different color you're not going to see anything.
Here's some code that draws quads at pixel locations
"use strict";
window.onload = function() {
// Get A WebGL context
var canvas = document.getElementById("c");
var gl = canvas.getContext("webgl");
if (!gl) {
return;
}
// setup GLSL program
var program = webglUtils.createProgramFromScripts(gl, ["2d-vertex-shader", "2d-fragment-shader"]);
gl.useProgram(program);
// look up where the vertex data needs to go.
var positionLocation = gl.getAttribLocation(program, "a_position");
// look up uniform locations
var u_matrixLoc = gl.getUniformLocation(program, "u_matrix");
var u_colorLoc = gl.getUniformLocation(program, "u_color");
// provide texture coordinates for the rectangle.
var positionBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, positionBuffer);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([
0.0, 0.0,
1.0, 0.0,
0.0, 1.0,
0.0, 1.0,
1.0, 0.0,
1.0, 1.0]), gl.STATIC_DRAW);
gl.enableVertexAttribArray(positionLocation);
gl.vertexAttribPointer(positionLocation, 2, gl.FLOAT, false, 0, 0);
function rand(min, max) {
return min + Math.random() * (max - min) | 0;
}
for (var ii = 0; ii < 300; ++ii) {
var dstX = rand(0, gl.canvas.width - 20);
var dstY = rand(0, gl.canvas.height - 20);
var dstWidth = rand(10, 30);
var dstHeight = rand(10, 30);
// convert dst pixel coords to clipspace coords
var clipX = dstX / gl.canvas.width * 2 - 1;
var clipY = dstY / gl.canvas.height * -2 + 1;
var clipWidth = dstWidth / gl.canvas.width * 2;
var clipHeight = dstHeight / gl.canvas.height * -2;
// build a matrix that will stretch our
// unit quad to our desired size and location
gl.uniformMatrix3fv(u_matrixLoc, false, [
clipWidth, 0, 0,
0, clipHeight, 0,
clipX, clipY, 1,
]);
gl.uniform4f(u_colorLoc, Math.random(), Math.random(), Math.random(), 1);
// Draw the rectangle.
gl.drawArrays(gl.TRIANGLES, 0, 6);
}
}
canvas {
border: 1px solid black;
}
<script src="//webglfundamentals.org/webgl/resources/webgl-utils.js"></script>
<canvas id="c"></canvas>
<!-- vertex shader -->
<script id="2d-vertex-shader" type="x-shader/x-vertex">
attribute vec2 a_position;
uniform mat3 u_matrix;
void main() {
gl_Position = vec4(u_matrix * vec3(a_position, 1), 1);
}
</script>
<!-- fragment shader -->
<script id="2d-fragment-shader" type="x-shader/x-fragment">
precision mediump float;
// our texture
uniform vec4 u_color;
void main() {
gl_FragColor = u_color;
}
</script>
If you don't get the matrix math here's an article that explains it.
One other suggestion, instead of a conditional on uSamplerCount you might consider just
gl_FragColor = uColor * texture2D(uSampler, vTextureCoord);
Then make a 1 pixel white texture
whiteTex = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, whiteTex);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, 0, gl.RGBA, gl.UNSIGNED_BYTE,
new Float32Array([255,255,255,255]));
Now whenever you want to draw with a solid color then
gl.bindTexture(gl.TEXTURE_2D, whiteTex);
gl.uniform4f(uColorLocation, r, g, b, a);
Whenever you want to draw with a texture
gl.bindTexture(gl.TEXTURE_2D, someTexture);
gl.uniform4f(uColorLocation, 1, 1, 1, 1);

Categories