Why is that gl.drawElements needs rebind while gl.drawArrays doesn't? - javascript

Hi guys I been studying webgl these days.
There are two snippets that accomplish the same thing - draw a square. One is using gl.drawArrays for 6 vertices and one is using gl.drawElements for 4 vertices.
However I noticed that when using gl.drawArrays, we can unbind gl.ARRAY_BUFFER before using it, it doesn't matter. See the snippets.
function initBuffers() {
/*
V0 V3
(-0.5, 0.5, 0) (0.5, 0.5, 0)
X---------------------X
| |
| |
| (0, 0) |
| |
| |
X---------------------X
V1 V2
(-0.5, -0.5, 0) (0.5, -0.5, 0)
*/
const vertices = [
// first triangle (V0, V1, V2)
-0.5, 0.5, 0,
-0.5, -0.5, 0,
0.5, -0.5, 0,
// second triangle (V0, V2, V3)
-0.5, 0.5, 0,
0.5, -0.5, 0,
0.5, 0.5, 0
];
// Setting up the VBO
squareVertexBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, squareVertexBuffer);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(vertices), gl.STATIC_DRAW);
gl.vertexAttribPointer(program.aVertexPosition, 3, gl.FLOAT, false, 0, 0);
gl.enableVertexAttribArray(program.aVertexPosition);
// Clean
gl.bindBuffer(gl.ARRAY_BUFFER, null);
}
function draw() {
// Clear the scene
gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
gl.viewport(0, 0, gl.canvas.width, gl.canvas.height);
gl.drawArrays(gl.TRIANGLES, 0, 6);
// Clean
gl.bindBuffer(gl.ARRAY_BUFFER, null);
}
initBuffers() is called before draw(). Notice here I already unbind gl.ARRAY_BUFFER before calling gl.drawArrays and it successfully draws the square.
However when using gl.drawElements, I have to make sure gl.ELEMENT_ARRAY_BUFFER is currently binded to the correct indices. e.g.
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, squareIndexBuffer);
gl.drawElements(gl.TRIANGLES, indices.length, gl.UNSIGNED_SHORT, 0);
if I use gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, null); like I did for gl.drawArrays, I have to rebind it using gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, squareIndexBuffer); before calling gl.drawElements.

This is mostly explained in this answer: https://stackoverflow.com/a/27164577/128511
The short version is gl.drawArrays uses only attributes. Attributes have buffers bound to them when you call gl.vertexAttribPointer. Whatever buffer was bound do gl.ARRAY_BUFFER at the time you call gl.verexAttribPointer is copied into the state of that attribute.
Attributes themselves are state of the current Vertex Array Object (VAO) as is the current ELEMENT_ARRAY_BUFFER. VAOs are an optional extension in WebGL1 and a standard part of WebGL2.
Again refer to this answer: https://stackoverflow.com/a/27164577/128511 and also this answer: https://stackoverflow.com/a/50257695/128511

Related

WebGL gl.triangle make square

gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([
// position
-0.9, 0.9,
0.9, 0.9,
-0.9, -0.9,
0.9, 0.9,
// color
1, 0, 0, 1,
0, 1, 0, 1,
1, 0, 1, 1,
1, 0, 0, 1
]), gl.STATIC_DRAW);
gl.enableVertexAttribArray(positionLocation);
gl.enableVertexAttribArray(colorLocation);
var size = 2;
var type = gl.FLOAT;
var normalize = false;
var stride = 0;
var offset = 0;
gl.vertexAttribPointer(positionLocation, size, type, normalize, stride, offset);
var size = 4;
var type = gl.FLOAT;
var normalize = false;
var stride = 0;
var offset = Float32Array.BYTES_PER_ELEMENT * 8;
gl.vertexAttribPointer(colorLocation, size, type, normalize, stride, offset);
This is some portion of the code ( the code is too long to fit ) so I will put these part where I'm curious about why it didnt draw a square but still a triangle? I know that I used gl.Triangle but I want to try using gl.Triangle to draw a square which I'm not sure which part of this is wrong and I have searched about it but no one do the same thing as I do ( the one where I put position and vertices in the same array )
There's also this part where count is 3 which I'm not sure what it does ( this code is given by my professor to let me make it a square and colored by changing a few setting so I do not know how to code opengl yet )
// Draw the geometry.
var offset = 0;
var count = 3;
gl.drawArrays(gl.TRIANGLES, offset, count);
below is the full code
<!DOCTYPE html>
<html>
<head>
<title>CS299 - Assignment 1.1</title>
<link type="text/css" href="https://webgl2fundamentals.org/webgl/resources/webgl-tutorials.css" rel="stylesheet" />
<!-- css override -->
<style type="text/css">
body { background-color: #CCCCCC; }
#group {background-color: #E8F49F;}
canvas { background-color: #4DC72F; width: 300px; height: 300px; border: 0px; }
.gman-widget-slider {min-width: 200px;}
</style>
</head>
<body>
<canvas id="canvas"></canvas>
</body>
<!-- util functions -->
<script src="https://webgl2fundamentals.org/webgl/resources/webgl-utils.js"></script>
<!-- main WebGL2 code -->
<script>
"use strict";
var vs = `#version 300 es
// an attribute is an input (in) to a vertex shader.
// It will receive data from a buffer
in vec2 a_position;
in vec4 a_color;
// color output from vertex shader to fragment shader
out vec4 v_color;
// all shaders have a main function.
void main() {
// default position output variable
// convert vec2 to vec4
gl_Position = vec4(a_position, 0, 1);
// color passthrough
v_color = a_color;
}
`;
var fs = `#version 300 es
precision highp float;
// color passthrough
in vec4 v_color;
// outout color
out vec4 outColor;
void main() {
outColor = v_color;
}
`;
function main() {
// Get A WebGL context
/** #type {HTMLCanvasElement} */
var canvas = document.querySelector("#canvas");
var gl = canvas.getContext("webgl2");
if (!gl) {
return;
}
// setup GLSL program
var program = webglUtils.createProgramFromSources(gl, [vs, fs]);
// look up where the vertex data needs to go.
var positionLocation = gl.getAttribLocation(program, "a_position");
var colorLocation = gl.getAttribLocation(program, "a_color");
// Create set of attributes
var vao = gl.createVertexArray();
gl.bindVertexArray(vao);
// Create a buffer (formerly called "vertex buffer object", now just "buffer").
var vbo = gl.createBuffer();
// Set Geometry.
gl.bindBuffer(gl.ARRAY_BUFFER, vbo);
// [40%] Modify the code to draw a square instead of a triangle.
// Assign C,M,Y, and K colors to the 4 vertices of the square.
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([
// position
-0.9, 0.9,
0.9, 0.9,
-0.9, -0.9,
0.9, 0.9,
// color
1, 0, 0, 1,
0, 1, 0, 1,
1, 0, 1, 1,
1, 0, 0, 1
]), gl.STATIC_DRAW);
// tell the position attribute how to pull data out of the current ARRAY_BUFFER
gl.enableVertexAttribArray(positionLocation);
gl.enableVertexAttribArray(colorLocation);
var size = 2;
var type = gl.FLOAT;
var normalize = false;
var stride = 0;
var offset = 0;
gl.vertexAttribPointer(positionLocation, size, type, normalize, stride, offset);
var size = 4;
var type = gl.FLOAT;
var normalize = false;
var stride = 0;
var offset = Float32Array.BYTES_PER_ELEMENT * 8; // must be in bytes
gl.vertexAttribPointer(colorLocation, size, type, normalize, stride, offset);
// Draw the scene.
function drawScene() {
webglUtils.resizeCanvasToDisplaySize(gl.canvas);
gl.viewport(0, 0, gl.canvas.width, gl.canvas.height);
// Clear the canvas
gl.clearColor(0.15, 0.15, 0.15, 1.0);
gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
// Tell it to use our program (pair of shaders)
gl.useProgram(program);
// Bind the attribute/buffer set we want.
gl.bindVertexArray(vao);
// Draw the geometry.
var offset = 0;
var count = 3;
// [1.5 points] Use gl.TRIANGLE_STRIP instead of gl.TRIANGLES
gl.drawArrays(gl.TRIANGLES, offset, count);
}
drawScene();
}
main();
</script>
<p id="group">Group: 4DC72F</p>
</html>
I would like some hint instead of answer if that is ok because I was trying to learn but I cant find this method anywhere on the internet
In your vertex specification, the coordinate (0.9, 0.9) is duplicated, however, that's not the only problem.
See Triangle primitives. The primitive type gl.TRIANGLES renders, as the name suggests, triangles. For 2 triangles you need 6 verticals (2*3). Each triangle consists of 3 vertices, and the triangles are completely independent and have no common vertices. e.g.:
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([
// position
// triangle 1
-0.9, 0.9,
0.9, 0.9,
-0.9, -0.9,
// triangle 2
0.9, 0.9,
0.9, -0.9,
-0.9, -0.9,
// color
// [...]
]), gl.STATIC_DRAW);
However you can use the primitive type gl.TRIANGLE_STRIP to draw a single quad:
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([
// position
-0.9, 0.9,
0.9, 0.9,
-0.9, -0.9,
0.9, -0.9,
// color
1, 0, 0, 1,
0, 1, 0, 1,
1, 0, 1, 1,
1, 0, 0, 1
]), gl.STATIC_DRAW);
gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4);

WebGL Framebuffer Multisampling

I know webgl can antialias or multisample stuff you render to the screen to avoid hard edges, but when I used a framebuffer it didnt do it anymore and there were a bunch of jagged edges on the screen.
How can I make the framebuffer use multisampling?
This took me a day to figure out, so I thought I should post an example for others to follow. I borrowed the cube animation code below from webgl2fundamentals.org. All I have added to it is the code that does antialiasing on the 3d texture. Make sure the context is initialized with canvas.getContext("webgl2", {antialias: false}); This method won't work with antialiasing on.
To antialias a generated texture you need to initialize a Renderbuffer object and two Framebuffer objects, one for storing the drawing, and the other to process the antialiased graphics into afterwards.
// Create and bind the framebuffer
const FRAMEBUFFER =
{
RENDERBUFFER: 0,
COLORBUFFER: 1
};
const fb =
[
gl.createFramebuffer(),
gl.createFramebuffer()
];
const colorRenderbuffer = gl.createRenderbuffer();
gl.bindRenderbuffer(gl.RENDERBUFFER,
colorRenderbuffer);
gl.renderbufferStorageMultisample(gl.RENDERBUFFER,
gl.getParameter(gl.MAX_SAMPLES),
gl.RGBA8,
targetTextureWidth,
targetTextureHeight);
gl.bindFramebuffer(gl.FRAMEBUFFER,
fb[FRAMEBUFFER.RENDERBUFFER]);
gl.framebufferRenderbuffer(gl.FRAMEBUFFER,
gl.COLOR_ATTACHMENT0,
gl.RENDERBUFFER,
colorRenderbuffer);
gl.bindFramebuffer(gl.FRAMEBUFFER,
fb[FRAMEBUFFER.COLORBUFFER]);
gl.framebufferTexture2D(gl.FRAMEBUFFER,
gl.COLOR_ATTACHMENT0,
gl.TEXTURE_2D,
targetTexture, 0);
gl.bindFramebuffer(gl.FRAMEBUFFER, null);
Just before drawing what will become your texture, set the Framebuffer as the first of the two Framebuffer objects.
// render to our targetTexture by binding the framebuffer
gl.bindFramebuffer(gl.FRAMEBUFFER,
fb[FRAMEBUFFER.RENDERBUFFER]);
Then do the texture drawing, and then do the antialiasing, which will require the second buffer.
// ... drawing code ...
//
// "blit" the cube into the color buffer, which adds antialiasing
gl.bindFramebuffer(gl.READ_FRAMEBUFFER,
fb[FRAMEBUFFER.RENDERBUFFER]);
gl.bindFramebuffer(gl.DRAW_FRAMEBUFFER,
fb[FRAMEBUFFER.COLORBUFFER]);
gl.clearBufferfv(gl.COLOR, 0, [1.0, 1.0, 1.0, 1.0]);
gl.blitFramebuffer(0, 0, targetTextureWidth, targetTextureHeight,
0, 0, targetTextureWidth, targetTextureHeight,
gl.COLOR_BUFFER_BIT, gl.LINEAR);
// render the top layer to the framebuffer as well
gl.bindFramebuffer(gl.FRAMEBUFFER,
fb[FRAMEBUFFER.RENDERBUFFER]);
Once you have finished drawing the top layer into the buffer, use the same antialiasing method from before, this time setting DRAW_FRAMEBUFFER to null; this tells it to draw to the actual canvas.
// this time render to the default buffer, which is just canvas
gl.bindFramebuffer(gl.READ_FRAMEBUFFER,
fb[FRAMEBUFFER.RENDERBUFFER]);
gl.bindFramebuffer(gl.DRAW_FRAMEBUFFER, null);
gl.clearBufferfv(gl.COLOR, 0, [1.0, 1.0, 1.0, 1.0]);
gl.blitFramebuffer(0, 0, canvas.width, canvas.height,
0, 0, canvas.width, canvas.height,
gl.COLOR_BUFFER_BIT, gl.LINEAR);
Here is the finished product:
"use strict";
var vertexShaderSource = `#version 300 es
// an attribute is an input (in) to a vertex shader.
// It will receive data from a buffer
in vec4 a_position;
in vec2 a_texcoord;
// A matrix to transform the positions by
uniform mat4 u_matrix;
// a varying to pass the texture coordinates to the fragment shader
out vec2 v_texcoord;
// all shaders have a main function
void main() {
// Multiply the position by the matrix.
gl_Position = u_matrix * a_position;
// Pass the texcoord to the fragment shader.
v_texcoord = a_texcoord;
}
`;
var fragmentShaderSource = `#version 300 es
precision mediump float;
// Passed in from the vertex shader.
in vec2 v_texcoord;
// The texture.
uniform sampler2D u_texture;
// we need to declare an output for the fragment shader
out vec4 outColor;
void main() {
outColor = texture(u_texture, v_texcoord);
}
`;
function main() {
// Get A WebGL context
/** #type {HTMLCanvasElement} */
var canvas = document.getElementById("canvas");
var gl = canvas.getContext("webgl2", {
antialias: false
});
if (!gl) {
return;
}
// Use our boilerplate utils to compile the shaders and link into a program
var program = webglUtils.createProgramFromSources(gl, [vertexShaderSource, fragmentShaderSource]);
// look up where the vertex data needs to go.
var positionAttributeLocation = gl.getAttribLocation(program, "a_position");
var texcoordAttributeLocation = gl.getAttribLocation(program, "a_texcoord");
// look up uniform locations
var matrixLocation = gl.getUniformLocation(program, "u_matrix");
var textureLocation = gl.getUniformLocation(program, "u_texture");
// Create a buffer
var positionBuffer = gl.createBuffer();
// Create a vertex array object (attribute state)
var vao = gl.createVertexArray();
// and make it the one we're currently working with
gl.bindVertexArray(vao);
// Turn on the attribute
gl.enableVertexAttribArray(positionAttributeLocation);
// Bind it to ARRAY_BUFFER (think of it as ARRAY_BUFFER = positionBuffer)
gl.bindBuffer(gl.ARRAY_BUFFER, positionBuffer);
// Set Geometry.
setGeometry(gl);
// Tell the attribute how to get data out of positionBuffer (ARRAY_BUFFER)
var size = 3; // 3 components per iteration
var type = gl.FLOAT; // the data is 32bit floats
var normalize = false; // don't normalize the data
var stride = 0; // 0 = move forward size * sizeof(type) each iteration to get the next position
var offset = 0; // start at the beginning of the buffer
gl.vertexAttribPointer(
positionAttributeLocation, size, type, normalize, stride, offset);
// create the texcoord buffer, make it the current ARRAY_BUFFER
// and copy in the texcoord values
var texcoordBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, texcoordBuffer);
setTexcoords(gl);
// Turn on the attribute
gl.enableVertexAttribArray(texcoordAttributeLocation);
// Tell the attribute how to get data out of colorBuffer (ARRAY_BUFFER)
var size = 2; // 2 components per iteration
var type = gl.FLOAT; // the data is 32bit floating point values
var normalize = true; // convert from 0-255 to 0.0-1.0
var stride = 0; // 0 = move forward size * sizeof(type) each iteration to get the next color
var offset = 0; // start at the beginning of the buffer
gl.vertexAttribPointer(
texcoordAttributeLocation, size, type, normalize, stride, offset);
// Create a texture.
var texture = gl.createTexture();
// use texture unit 0
gl.activeTexture(gl.TEXTURE0 + 0);
// bind to the TEXTURE_2D bind point of texture unit 0
gl.bindTexture(gl.TEXTURE_2D, texture);
// fill texture with 3x2 pixels
{
const level = 0;
const internalFormat = gl.R8;
const width = 3;
const height = 2;
const border = 0;
const format = gl.RED;
const type = gl.UNSIGNED_BYTE;
const data = new Uint8Array([
128, 64, 128,
0, 192, 0,
]);
gl.pixelStorei(gl.UNPACK_ALIGNMENT, 1);
gl.texImage2D(gl.TEXTURE_2D, level, internalFormat, width, height, border,
format, type, data);
}
// set the filtering so we don't need mips
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
// Create a texture to render to
const targetTextureWidth = 512;
const targetTextureHeight = 512;
const targetTexture = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, targetTexture);
{
// define size and format of level 0
const level = 0;
const internalFormat = gl.RGBA;
const border = 0;
const format = gl.RGBA;
const type = gl.UNSIGNED_BYTE;
const data = null;
gl.texImage2D(gl.TEXTURE_2D, level, internalFormat,
targetTextureWidth, targetTextureHeight, border,
format, type, data);
// set the filtering so we don't need mips
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
}
// Create and bind the framebuffer
const FRAMEBUFFER = {
RENDERBUFFER: 0,
COLORBUFFER: 1
};
const fb = [gl.createFramebuffer(), gl.createFramebuffer()];
const colorRenderbuffer = gl.createRenderbuffer();
gl.bindRenderbuffer(gl.RENDERBUFFER, colorRenderbuffer);
gl.renderbufferStorageMultisample(gl.RENDERBUFFER, gl.getParameter(gl.MAX_SAMPLES), gl.RGBA8, targetTextureWidth, targetTextureHeight);
gl.bindFramebuffer(gl.FRAMEBUFFER, fb[FRAMEBUFFER.RENDERBUFFER]);
gl.framebufferRenderbuffer(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.RENDERBUFFER, colorRenderbuffer);
gl.bindFramebuffer(gl.FRAMEBUFFER, fb[FRAMEBUFFER.COLORBUFFER]);
gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, targetTexture, 0);
gl.bindFramebuffer(gl.FRAMEBUFFER, null);
function degToRad(d) {
return d * Math.PI / 180;
}
var fieldOfViewRadians = degToRad(60);
var modelXRotationRadians = degToRad(0);
var modelYRotationRadians = degToRad(0);
// Get the starting time.
var then = 0;
requestAnimationFrame(drawScene);
function drawCube(aspect) {
// Tell it to use our program (pair of shaders)
gl.useProgram(program);
// Bind the attribute/buffer set we want.
gl.bindVertexArray(vao);
// Compute the projection matrix
var projectionMatrix =
m4.perspective(fieldOfViewRadians, aspect, 1, 2000);
var cameraPosition = [0, 0, 2];
var up = [0, 1, 0];
var target = [0, 0, 0];
// Compute the camera's matrix using look at.
var cameraMatrix = m4.lookAt(cameraPosition, target, up);
// Make a view matrix from the camera matrix.
var viewMatrix = m4.inverse(cameraMatrix);
var viewProjectionMatrix = m4.multiply(projectionMatrix, viewMatrix);
var matrix = m4.xRotate(viewProjectionMatrix, modelXRotationRadians);
matrix = m4.yRotate(matrix, modelYRotationRadians);
// Set the matrix.
gl.uniformMatrix4fv(matrixLocation, false, matrix);
// Tell the shader to use texture unit 0 for u_texture
gl.uniform1i(textureLocation, 0);
// Draw the geometry.
var primitiveType = gl.TRIANGLES;
var offset = 0;
var count = 6 * 6;
gl.drawArrays(primitiveType, offset, count);
}
// Draw the scene.
function drawScene(time) {
// convert to seconds
time *= 0.001;
// Subtract the previous time from the current time
var deltaTime = time - then;
// Remember the current time for the next frame.
then = time;
// Animate the rotation
modelYRotationRadians += -0.7 * deltaTime;
modelXRotationRadians += -0.4 * deltaTime;
//webglUtils.resizeCanvasToDisplaySize(gl.canvas);
gl.enable(gl.CULL_FACE);
gl.enable(gl.DEPTH_TEST);
{
// render to our targetTexture by binding the framebuffer
gl.bindFramebuffer(gl.FRAMEBUFFER, fb[FRAMEBUFFER.RENDERBUFFER]);
// render cube with our 3x2 texture
gl.bindTexture(gl.TEXTURE_2D, texture);
// Tell WebGL how to convert from clip space to pixels
gl.viewport(0, 0, targetTextureWidth, targetTextureHeight);
// Clear the canvas AND the depth buffer.
gl.clearColor(0, 0, 1, 1); // clear to blue
gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
const aspect = targetTextureWidth / targetTextureHeight;
drawCube(aspect);
// "blit" the cube into the color buffer, which adds antialiasing
gl.bindFramebuffer(gl.READ_FRAMEBUFFER, fb[FRAMEBUFFER.RENDERBUFFER]);
gl.bindFramebuffer(gl.DRAW_FRAMEBUFFER, fb[FRAMEBUFFER.COLORBUFFER]);
gl.clearBufferfv(gl.COLOR, 0, [1.0, 1.0, 1.0, 1.0]);
gl.blitFramebuffer(0, 0, targetTextureWidth, targetTextureHeight,
0, 0, targetTextureWidth, targetTextureHeight,
gl.COLOR_BUFFER_BIT, gl.LINEAR);
}
{
// render the top layer to the frame buffer as well
gl.bindFramebuffer(gl.FRAMEBUFFER, fb[FRAMEBUFFER.RENDERBUFFER]);
// render the cube with the texture we just rendered to
gl.bindTexture(gl.TEXTURE_2D, targetTexture);
// Tell WebGL how to convert from clip space to pixels
gl.viewport(0, 0, targetTextureWidth, targetTextureHeight);
// Clear the canvas AND the depth buffer.
gl.clearColor(0.105, 0.105, 0.105, 1); // clear to black
gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
const aspect = 1;
drawCube(aspect);
// this time render to the default buffer, which is just canvas
gl.bindFramebuffer(gl.READ_FRAMEBUFFER, fb[FRAMEBUFFER.RENDERBUFFER]);
gl.bindFramebuffer(gl.DRAW_FRAMEBUFFER, null);
gl.clearBufferfv(gl.COLOR, 0, [1.0, 1.0, 1.0, 1.0]);
gl.blitFramebuffer(0, 0, canvas.width, canvas.height,
0, 0, canvas.width, canvas.height,
gl.COLOR_BUFFER_BIT, gl.LINEAR);
}
requestAnimationFrame(drawScene);
}
}
// Fill the buffer with the values that define a cube.
function setGeometry(gl) {
var positions = new Float32Array(
[-0.5, -0.5, -0.5, -0.5, 0.5, -0.5,
0.5, -0.5, -0.5, -0.5, 0.5, -0.5,
0.5, 0.5, -0.5,
0.5, -0.5, -0.5,
-0.5, -0.5, 0.5,
0.5, -0.5, 0.5, -0.5, 0.5, 0.5, -0.5, 0.5, 0.5,
0.5, -0.5, 0.5,
0.5, 0.5, 0.5,
-0.5, 0.5, -0.5, -0.5, 0.5, 0.5,
0.5, 0.5, -0.5, -0.5, 0.5, 0.5,
0.5, 0.5, 0.5,
0.5, 0.5, -0.5,
-0.5, -0.5, -0.5,
0.5, -0.5, -0.5, -0.5, -0.5, 0.5, -0.5, -0.5, 0.5,
0.5, -0.5, -0.5,
0.5, -0.5, 0.5,
-0.5, -0.5, -0.5, -0.5, -0.5, 0.5, -0.5, 0.5, -0.5, -0.5, -0.5, 0.5, -0.5, 0.5, 0.5, -0.5, 0.5, -0.5,
0.5, -0.5, -0.5,
0.5, 0.5, -0.5,
0.5, -0.5, 0.5,
0.5, -0.5, 0.5,
0.5, 0.5, -0.5,
0.5, 0.5, 0.5,
]);
gl.bufferData(gl.ARRAY_BUFFER, positions, gl.STATIC_DRAW);
}
// Fill the buffer with texture coordinates the cube.
function setTexcoords(gl) {
gl.bufferData(
gl.ARRAY_BUFFER,
new Float32Array(
[
0, 0,
0, 1,
1, 0,
0, 1,
1, 1,
1, 0,
0, 0,
0, 1,
1, 0,
1, 0,
0, 1,
1, 1,
0, 0,
0, 1,
1, 0,
0, 1,
1, 1,
1, 0,
0, 0,
0, 1,
1, 0,
1, 0,
0, 1,
1, 1,
0, 0,
0, 1,
1, 0,
0, 1,
1, 1,
1, 0,
0, 0,
0, 1,
1, 0,
1, 0,
0, 1,
1, 1,
]),
gl.STATIC_DRAW);
}
main();
html
{
background-color: #1b1b1b;
}
<canvas id="canvas" width="512" height="512"></canvas>
<!--
for most samples webgl-utils only provides shader compiling/linking and
canvas resizing because why clutter the examples with code that's the same in every sample.
See http://webglfundamentals.org/webgl/lessons/webgl-boilerplate.html
and http://webglfundamentals.org/webgl/lessons/webgl-resizing-the-canvas.html
for webgl-utils, m3, m4, and webgl-lessons-ui.
-->
<script src="https://webgl2fundamentals.org/webgl/resources/webgl-utils.js"></script>
<script src="https://webgl2fundamentals.org/webgl/resources/m4.js"></script>
WebGL1 does not support multisampling for framebuffers so in that case your options are things like rendering to a higher resolution and down sampling when rendering to the canvas and/or running some post processing effect to do the anti-aliasing
WebGL2 does support multisampling for framebuffers. You can call renderbufferStorageMultisample to create a multisampled renderbufffer and you can call blitFramebuffer to resolve it into the canvas

Setting gl_PointSize dynamically as shader attribute not working in WebGL

I'm trying to change gl_PointSize within my vertex-shader depending on an input. So, whenever I set ie. gl_PointSize = 10.0 explicitly, everything is working out fine. After adding
attribute vec4 vPosition;
attribute vec4 vColor;
attribute float vSize;
varying vec4 fColor;
void main()
{
fColor = vColor;
gl_PointSize = vSize;
gl_Position = vPosition;
}
and calling
var size = new Float32Array([10.0]);
.
.
.
var sizeBufferId = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, sizeBufferId);
gl.bufferData(gl.ARRAY_BUFFER, size, gl.STATIC_DRAW);
var vSize = gl.getAttribLocation(program, "vSize");
gl.vertexAttribPointer(vSize, 1, gl.FLOAT, false, 0, 0);
gl.enableVertexAttribArray(vSize);
vSize is 2 after this so the buffer is created and bound correctly. WebGL Inspector also verified, that its content is 10.0.
Still, when calling my draw-code
gl.clear(gl.COLOR_BUFFER_BIT);
gl.drawArrays(gl.POINTS, 0, 2);
I only receive a blank canvas... Where am I wrong?
The rest of my code:
var canvas = document.getElementById("gl-canvas");
gl = WebGLUtils.setupWebGL(canvas);
if (!gl) { alert("WebGL isn't available"); }
var colors = new Float32Array([ 1, 0, 0, 1,
0, 0, 1, 1]);
var vertices = new Float32Array([-0.5, 0,
0.5, 0]);
var size = new Float32Array([10.0]);
// Configure viewport
gl.viewport(0,0,canvas.width,canvas.height);
gl.clearColor(1.0,1.0,1.0,1.0);
// Init shader program and bind it (invoked from another JS-file)
var program = initShaders(gl, "vertex-shader", "fragment-shader");
gl.useProgram(program);
// Load colors into the GPU and associate shader variables
var cBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, cBuffer);
gl.bufferData(gl.ARRAY_BUFFER, colors, gl.STATIC_DRAW);
var vColor = gl.getAttribLocation(program, "vColor");
gl.vertexAttribPointer(vColor, 4, gl.FLOAT, false, 0, 0);
gl.enableVertexAttribArray(vColor);
// Load positions into the GPU and associate shader variables
var bufferId = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, bufferId);
gl.bufferData(gl.ARRAY_BUFFER, vertices, gl.STATIC_DRAW);
var vPosition = gl.getAttribLocation(program, "vPosition");
gl.vertexAttribPointer(vPosition, 2, gl.FLOAT, false, 0, 0);
gl.enableVertexAttribArray(vPosition);
// Load squares' size into the GPU and associate shader variables
var sizeBufferId = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, sizeBufferId);
gl.bufferData(gl.ARRAY_BUFFER, size, gl.STATIC_DRAW);
var vSize = gl.getAttribLocation(program, "vSize");
gl.vertexAttribPointer(vSize, 1, gl.FLOAT, false, 0, 0);
gl.enableVertexAttribArray(vSize);
alert("Color var: " + vColor + " | Pos var: " + vPosition + " | Size var: " + vSize);
gl.clear(gl.COLOR_BUFFER_BIT);
gl.drawArrays(gl.POINTS, 0, 2);
I already figured it out myself. A silly and naive mistake, yet easy to overlook:
By calling gl.drawArrays(gl.POINTS, 0, 2) it is specified, that WebGL has to draw two independent points - each with its own coordinates, color AND PointSize.
So by altering var size = new Float32Array([10.0, 10.0]) WebGL may now get two independent PointSize values out of size. Otherwise there would be some kind of NullPointerException within the WebGL state-machine.

Is there a bug in WebGL polygonOffset or am I missing something?

I am seeing very odd behavior where polygonOffset initially works, but if I re-render it stops working.
I made a simple example to illustrate it. I started with the z-fighting example from Ch7 of the WebGL Programming Guide (https://sites.google.com/site/webglbook/). I then separated out just the rendering portion and wrapped it in a function. I then hooked up an HTML button to call the render() function when clicked. On the first click, the triangles render correctly with no issues. On the second click, it is like polygonOffset is turned off again.
I've tried a number of different variations, including re-enabling every time, disabling and re-enabling, changing the offsets, but I keep getting the same behavior. Any ideas?
I'm including the code, though the snippet doesn't run for me won't run without the book's libraries.
// Zfighting.js (c) 2012 matsuda
// Vertex shader program
var VSHADER_SOURCE =
'attribute vec4 a_Position;\n' +
'attribute vec4 a_Color;\n' +
'uniform mat4 u_ViewProjMatrix;\n' +
'varying vec4 v_Color;\n' +
'void main() {\n' +
' gl_Position = u_ViewProjMatrix * a_Position;\n' +
' v_Color = a_Color;\n' +
'}\n';
// Fragment shader program
var FSHADER_SOURCE =
'#ifdef GL_ES\n' +
'precision mediump float;\n' +
'#endif\n' +
'varying vec4 v_Color;\n' +
'void main() {\n' +
' gl_FragColor = v_Color;\n' +
'}\n';
function main() {
// Retrieve <canvas> element
var canvas = document.getElementById('webgl');
// Get the rendering context for WebGL
var gl = getWebGLContext(canvas);
if (!gl) {
console.log('Failed to get the rendering context for WebGL');
return;
}
// Initialize shaders
if (!initShaders(gl, VSHADER_SOURCE, FSHADER_SOURCE)) {
console.log('Failed to intialize shaders.');
return;
}
// Set the vertex coordinates and color (the blue triangle is in the front)
var n = initVertexBuffers(gl);
if (n < 0) {
console.log('Failed to set the vertex information');
return;
}
//Set clear color and enable the hidden surface removal function
gl.clearColor(0, 0, 0, 1);
gl.enable(gl.DEPTH_TEST);
// Get the storage locations of u_ViewProjMatrix
var u_ViewProjMatrix = gl.getUniformLocation(gl.program, 'u_ViewProjMatrix');
if (!u_ViewProjMatrix) {
console.log('Failed to get the storage locations of u_ViewProjMatrix');
return;
}
var viewProjMatrix = new Matrix4();
// Set the eye point, look-at point, and up vector.
viewProjMatrix.setPerspective(30, canvas.width/canvas.height, 1, 100);
viewProjMatrix.lookAt(3.06, 2.5, 10.0, 0, 0, -2, 0, 1, 0);
// Pass the view projection matrix to u_ViewProjMatrix
gl.uniformMatrix4fv(u_ViewProjMatrix, false, viewProjMatrix.elements);
// Enable the polygon offset function
gl.enable(gl.POLYGON_OFFSET_FILL);
function render() {
// Clear color and depth buffer
gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
// Draw the triangles
gl.drawArrays(gl.TRIANGLES, 0, n/2); // The green triangle
gl.polygonOffset(1.0, 1.0); // Set the polygon offset
gl.drawArrays(gl.TRIANGLES, n/2, n/2); // The yellow triangle
}
document.getElementById("button").onclick = render;
}
function initVertexBuffers(gl) {
var verticesColors = new Float32Array([
// Vertex coordinates and color
0.0, 2.5, -5.0, 0.4, 1.0, 0.4, // The green triangle
-2.5, -2.5, -5.0, 0.4, 1.0, 0.4,
2.5, -2.5, -5.0, 1.0, 0.4, 0.4,
0.0, 3.0, -5.0, 1.0, 0.4, 0.4, // The yellow triagle
-3.0, -3.0, -5.0, 1.0, 1.0, 0.4,
3.0, -3.0, -5.0, 1.0, 1.0, 0.4,
]);
var n = 6;
// Create a buffer object
var vertexColorbuffer = gl.createBuffer();
if (!vertexColorbuffer) {
console.log('Failed to create the buffer object');
return -1;
}
// Write the vertex coordinates and color to the buffer object
gl.bindBuffer(gl.ARRAY_BUFFER, vertexColorbuffer);
gl.bufferData(gl.ARRAY_BUFFER, verticesColors, gl.STATIC_DRAW);
var FSIZE = verticesColors.BYTES_PER_ELEMENT;
// Assign the buffer object to a_Position and enable the assignment
var a_Position = gl.getAttribLocation(gl.program, 'a_Position');
if(a_Position < 0) {
console.log('Failed to get the storage location of a_Position');
return -1;
}
gl.vertexAttribPointer(a_Position, 3, gl.FLOAT, false, FSIZE * 6, 0);
gl.enableVertexAttribArray(a_Position);
// Assign the buffer object to a_Color and enable the assignment
var a_Color = gl.getAttribLocation(gl.program, 'a_Color');
if(a_Color < 0) {
console.log('Failed to get the storage location of a_Color');
return -1;
}
gl.vertexAttribPointer(a_Color, 3, gl.FLOAT, false, FSIZE * 6, FSIZE * 3);
gl.enableVertexAttribArray(a_Color);
return n;
}
<canvas id="webgl" width="400" height="400">
Please use a browser that supports "canvas"
</canvas>
<input type="button" id="button" />
You need to reset PolygonOffset or disable/reenable it, otherwise both triangles are offset by the same amount.
GPUs are state machines, you're in charge of managing the state(variables):
function render() {
// Clear color and depth buffer
gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
// Draw the triangles
gl.polygonOffset(0.0, 0.0); // Reset the polygon offset
gl.drawArrays(gl.TRIANGLES, 0, n/2); // The green triangle
gl.polygonOffset(1.0, 1.0); // Set the polygon offset
gl.drawArrays(gl.TRIANGLES, n/2, n/2); // The yellow triangle
}

webgl, texture coordinates and obj

I'm finding it difficult to understand the correlation between vertex and texture coordinates when the data is rendered. I have a cube being drawn using drawElements form data parsed from an obj. I got textures somewhere close to working with a simple plane where the number of vertex for position and for texture coordinates but once i use a more complex model or even just a more complex uv unwrap i end up with the texture going all wrong.
From what i've read there doesn't seen to be a way of using texture coordinate indices the same way you would for vertex position, which is unfortunate because the obj has that information. The way i've gotten it close to working was by building an array of texture coordinates from the indices data in the obj. But because the length of the vertex and texture coordinate arrays differ (for example in an obj for a cube there are 8 vertex and up to 36 texture coordinate depending on have the mesh is unwrapped) they don't correlate.
What is the correct workflow for using drawElements and mapping the vertex to its correct texture coordinates.
You are correct, you can not easily use different indices for different attributes (in your case positions and texture coordinates).
A common example is a cube. If you want to render a cube with lighting you need normals. There are only 8 positions on a cube but each face of the cube needs 3 different normals for the same positions, one normal for each face that shares that position. That means you need 24 vertices total, 4 for each of the 6 faces of the cube.
If you have a file format that has separate indices for different attributes you'll need to expand them out so that each unique combination of attributes (position, normal, texture coord, etc..) is in your buffers.
Most game engines would do this kind of thing offline. In other words, they'd write some tool that reads the OBJ file, expands the various attributes, and then writes the data back out pre-expanded. That's because generating the expanded data can be time consuming at runtime for a large model if you're trying to optimize the data and only keep unique vertices.
If you don't care about optimal data then just expand based on the indices. The number of indices for each type of attribute should be the same.
Note: positions are not special. I bring this up because you said there doesn't seen to be a way of using texture coordinate indices the same way you would for vertex position. WebGL has no concept of "positions". It just has attributes which describe how to pull data out of buffers. What's in those attributes (positions, normals, random data, whatever), is up to you. gl.drawElements indexes the entire combination of attributes you supply. If you pass in an index of 7 it's going to give you element 7 of each attribute.
Note that the above is describing how pretty much all 3d engines written in WebGL work. That said you can get creative if you really want to.
Here's a program that stores positions and normals in textures. It then puts the indices in buffers. Because textures are random access it can therefore have different indices for positions and normals
var canvas = document.getElementById("c");
var gl = canvas.getContext("webgl");
var ext = gl.getExtension("OES_texture_float");
if (!ext) {
alert("need OES_texture_float extension cause I'm lazy");
//return;
}
if (gl.getParameter(gl.MAX_VERTEX_TEXTURE_IMAGE_UNITS) < 2) {
alert("need to be able to access textures from vertex shaders");
//return;
}
var m4 = twgl.m4;
var v3 = twgl.v3;
var programInfo = twgl.createProgramInfo(gl, ["vshader", "fshader"]);
// Cube data
var positions = [
-1, -1, -1, // 0 lbb
+1, -1, -1, // 1 rbb 2---3
-1, +1, -1, // 2 ltb /| /|
+1, +1, -1, // 3 rtb 6---7 |
-1, -1, +1, // 4 lbf | | | |
+1, -1, +1, // 5 rbf | 0-|-1
-1, +1, +1, // 6 ltf |/ |/
+1, +1, +1, // 7 rtf 4---5
];
var positionIndices = [
3, 7, 5, 3, 5, 1, // right
6, 2, 0, 6, 0, 4, // left
6, 7, 3, 6, 3, 2, // top
0, 1, 5, 0, 5, 4, // bottom
7, 6, 4, 7, 4, 5, // front
2, 3, 1, 2, 1, 0, // back
];
var normals = [
+1, 0, 0,
-1, 0, 0,
0, +1, 0,
0, -1, 0,
0, 0, +1,
0, 0, -1,
]
var normalIndices = [
0, 0, 0, 0, 0, 0, // right
1, 1, 1, 1, 1, 1, // left
2, 2, 2, 2, 2, 2, // top
3, 3, 3, 3, 3, 3, // bottom
4, 4, 4, 4, 4, 4, // front
5, 5, 5, 5, 5, 5, // back
];
function degToRad(deg) {
return deg * Math.PI / 180;
}
var bufferInfo = twgl.createBufferInfoFromArrays(gl, {
a_positionIndex: { size: 1, data: positionIndices },
a_normalIndex: { size: 1, data: normalIndices, },
});
var textures = twgl.createTextures(gl, {
positions: {
format: gl.RGB,
type: gl.FLOAT,
height: 1,
src: positions,
min: gl.NEAREST,
mag: gl.NEAREST,
wrap: gl.CLAMP_TO_EDGE,
},
normals: {
format: gl.RGB,
type: gl.FLOAT,
height: 1,
src: normals,
min: gl.NEAREST,
mag: gl.NEAREST,
wrap: gl.CLAMP_TO_EDGE,
},
});
var xRot = degToRad(30);
var yRot = degToRad(20);
var lightDir = v3.normalize([-0.2, -0.1, 0.5]);
function draw(time) {
time *= 0.001; // convert to seconds
twgl.resizeCanvasToDisplaySize(gl.canvas);
gl.viewport(0, 0, gl.canvas.width, gl.canvas.height);
yRot = time;
gl.enable(gl.DEPTH_TEST);
gl.enable(gl.CULL_FACE);
gl.useProgram(programInfo.program);
var persp = m4.perspective(
degToRad(45),
gl.canvas.clientWidth / gl.canvas.clientHeight,
0.1, 100.0);
var mat = m4.identity();
mat = m4.translate(mat, [0.0, 0.0, -5.0]);
mat = m4.rotateX(mat, xRot);
mat = m4.rotateY(mat, yRot);
var uniforms = {
u_positions: textures.positions,
u_positionsSize: [positions.length / 3, 1],
u_normals: textures.normals,
u_normalsSize: [normals.length / 3, 1],
u_mvpMatrix: m4.multiply(persp, mat),
u_mvMatrix: mat,
u_color: [0.5, 0.8, 1, 1],
u_lightDirection: lightDir,
};
twgl.setBuffersAndAttributes(gl, programInfo, bufferInfo);
twgl.setUniforms(programInfo, uniforms);
twgl.drawBufferInfo(gl, bufferInfo);
requestAnimationFrame(draw);
}
requestAnimationFrame(draw);
body { margin: 0; }
canvas { width: 100vw; height: 100vh; display: block; }
<script src="//twgljs.org/dist/2.x/twgl-full.min.js"></script>
<script id="vshader" type="whatever">
attribute float a_positionIndex;
attribute float a_normalIndex;
attribute vec4 a_pos;
uniform sampler2D u_positions;
uniform vec2 u_positionsSize;
uniform sampler2D u_normals;
uniform vec2 u_normalsSize;
uniform mat4 u_mvpMatrix;
uniform mat4 u_mvMatrix;
varying vec3 v_normal;
// to index the value in the texture we need to
// compute a texture coordinate that will access
// the correct texel. To do that we need access from
// the middle of the first texel to the middle of the
// last texel.
//
// In other words if we had 3 values (and therefore
// 3 texels) we'd have something like this
//
// ------3x1 ----- texels ----------
// [ ][ ][ ]
// 0.0 |<----------------------------->| 1.0
//
// If we just did index / numValues we'd get
//
// [ ][ ][ ]
// | | |
// 0.0 0.333 0.666
//
// Which is right between texels so we add a
// a halfTexel to get this
//
// [ ][ ][ ]
// | | |
// 0.167 0.5 0.833
// note: In WebGL2 we could just use `textureFetch`
// which takes integer pixel locations
vec2 texCoordFromIndex(const float index, const vec2 textureSize) {
vec2 colRow = vec2(
mod(index, textureSize.x), // columm
floor(index / textureSize.x)); // row
return vec2((colRow + 0.5) / textureSize);
}
void main() {
vec2 ptc = texCoordFromIndex(a_positionIndex, u_positionsSize);
vec3 position = texture2D(u_positions, ptc).rgb;
vec2 ntc = texCoordFromIndex(a_normalIndex, u_normalsSize);
vec3 normal = texture2D(u_normals, ntc).rgb;
gl_Position = u_mvpMatrix * vec4(position, 1);
v_normal = (u_mvMatrix * vec4(normal, 0)).xyz;
}
</script>
<script id="fshader" type="whatever">
precision mediump float;
uniform vec4 u_color;
uniform vec3 u_lightDirection;
varying vec3 v_normal;
void main() {
float light = dot(
normalize(v_normal), u_lightDirection) * 0.5 + 0.5;
gl_FragColor = vec4(u_color.rgb * light, u_color.a);
}
</script>
<canvas id="c"></canvas>

Categories