WebGL Framebuffer Multisampling - javascript

I know webgl can antialias or multisample stuff you render to the screen to avoid hard edges, but when I used a framebuffer it didnt do it anymore and there were a bunch of jagged edges on the screen.
How can I make the framebuffer use multisampling?

This took me a day to figure out, so I thought I should post an example for others to follow. I borrowed the cube animation code below from webgl2fundamentals.org. All I have added to it is the code that does antialiasing on the 3d texture. Make sure the context is initialized with canvas.getContext("webgl2", {antialias: false}); This method won't work with antialiasing on.
To antialias a generated texture you need to initialize a Renderbuffer object and two Framebuffer objects, one for storing the drawing, and the other to process the antialiased graphics into afterwards.
// Create and bind the framebuffer
const FRAMEBUFFER =
{
RENDERBUFFER: 0,
COLORBUFFER: 1
};
const fb =
[
gl.createFramebuffer(),
gl.createFramebuffer()
];
const colorRenderbuffer = gl.createRenderbuffer();
gl.bindRenderbuffer(gl.RENDERBUFFER,
colorRenderbuffer);
gl.renderbufferStorageMultisample(gl.RENDERBUFFER,
gl.getParameter(gl.MAX_SAMPLES),
gl.RGBA8,
targetTextureWidth,
targetTextureHeight);
gl.bindFramebuffer(gl.FRAMEBUFFER,
fb[FRAMEBUFFER.RENDERBUFFER]);
gl.framebufferRenderbuffer(gl.FRAMEBUFFER,
gl.COLOR_ATTACHMENT0,
gl.RENDERBUFFER,
colorRenderbuffer);
gl.bindFramebuffer(gl.FRAMEBUFFER,
fb[FRAMEBUFFER.COLORBUFFER]);
gl.framebufferTexture2D(gl.FRAMEBUFFER,
gl.COLOR_ATTACHMENT0,
gl.TEXTURE_2D,
targetTexture, 0);
gl.bindFramebuffer(gl.FRAMEBUFFER, null);
Just before drawing what will become your texture, set the Framebuffer as the first of the two Framebuffer objects.
// render to our targetTexture by binding the framebuffer
gl.bindFramebuffer(gl.FRAMEBUFFER,
fb[FRAMEBUFFER.RENDERBUFFER]);
Then do the texture drawing, and then do the antialiasing, which will require the second buffer.
// ... drawing code ...
//
// "blit" the cube into the color buffer, which adds antialiasing
gl.bindFramebuffer(gl.READ_FRAMEBUFFER,
fb[FRAMEBUFFER.RENDERBUFFER]);
gl.bindFramebuffer(gl.DRAW_FRAMEBUFFER,
fb[FRAMEBUFFER.COLORBUFFER]);
gl.clearBufferfv(gl.COLOR, 0, [1.0, 1.0, 1.0, 1.0]);
gl.blitFramebuffer(0, 0, targetTextureWidth, targetTextureHeight,
0, 0, targetTextureWidth, targetTextureHeight,
gl.COLOR_BUFFER_BIT, gl.LINEAR);
// render the top layer to the framebuffer as well
gl.bindFramebuffer(gl.FRAMEBUFFER,
fb[FRAMEBUFFER.RENDERBUFFER]);
Once you have finished drawing the top layer into the buffer, use the same antialiasing method from before, this time setting DRAW_FRAMEBUFFER to null; this tells it to draw to the actual canvas.
// this time render to the default buffer, which is just canvas
gl.bindFramebuffer(gl.READ_FRAMEBUFFER,
fb[FRAMEBUFFER.RENDERBUFFER]);
gl.bindFramebuffer(gl.DRAW_FRAMEBUFFER, null);
gl.clearBufferfv(gl.COLOR, 0, [1.0, 1.0, 1.0, 1.0]);
gl.blitFramebuffer(0, 0, canvas.width, canvas.height,
0, 0, canvas.width, canvas.height,
gl.COLOR_BUFFER_BIT, gl.LINEAR);
Here is the finished product:
"use strict";
var vertexShaderSource = `#version 300 es
// an attribute is an input (in) to a vertex shader.
// It will receive data from a buffer
in vec4 a_position;
in vec2 a_texcoord;
// A matrix to transform the positions by
uniform mat4 u_matrix;
// a varying to pass the texture coordinates to the fragment shader
out vec2 v_texcoord;
// all shaders have a main function
void main() {
// Multiply the position by the matrix.
gl_Position = u_matrix * a_position;
// Pass the texcoord to the fragment shader.
v_texcoord = a_texcoord;
}
`;
var fragmentShaderSource = `#version 300 es
precision mediump float;
// Passed in from the vertex shader.
in vec2 v_texcoord;
// The texture.
uniform sampler2D u_texture;
// we need to declare an output for the fragment shader
out vec4 outColor;
void main() {
outColor = texture(u_texture, v_texcoord);
}
`;
function main() {
// Get A WebGL context
/** #type {HTMLCanvasElement} */
var canvas = document.getElementById("canvas");
var gl = canvas.getContext("webgl2", {
antialias: false
});
if (!gl) {
return;
}
// Use our boilerplate utils to compile the shaders and link into a program
var program = webglUtils.createProgramFromSources(gl, [vertexShaderSource, fragmentShaderSource]);
// look up where the vertex data needs to go.
var positionAttributeLocation = gl.getAttribLocation(program, "a_position");
var texcoordAttributeLocation = gl.getAttribLocation(program, "a_texcoord");
// look up uniform locations
var matrixLocation = gl.getUniformLocation(program, "u_matrix");
var textureLocation = gl.getUniformLocation(program, "u_texture");
// Create a buffer
var positionBuffer = gl.createBuffer();
// Create a vertex array object (attribute state)
var vao = gl.createVertexArray();
// and make it the one we're currently working with
gl.bindVertexArray(vao);
// Turn on the attribute
gl.enableVertexAttribArray(positionAttributeLocation);
// Bind it to ARRAY_BUFFER (think of it as ARRAY_BUFFER = positionBuffer)
gl.bindBuffer(gl.ARRAY_BUFFER, positionBuffer);
// Set Geometry.
setGeometry(gl);
// Tell the attribute how to get data out of positionBuffer (ARRAY_BUFFER)
var size = 3; // 3 components per iteration
var type = gl.FLOAT; // the data is 32bit floats
var normalize = false; // don't normalize the data
var stride = 0; // 0 = move forward size * sizeof(type) each iteration to get the next position
var offset = 0; // start at the beginning of the buffer
gl.vertexAttribPointer(
positionAttributeLocation, size, type, normalize, stride, offset);
// create the texcoord buffer, make it the current ARRAY_BUFFER
// and copy in the texcoord values
var texcoordBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, texcoordBuffer);
setTexcoords(gl);
// Turn on the attribute
gl.enableVertexAttribArray(texcoordAttributeLocation);
// Tell the attribute how to get data out of colorBuffer (ARRAY_BUFFER)
var size = 2; // 2 components per iteration
var type = gl.FLOAT; // the data is 32bit floating point values
var normalize = true; // convert from 0-255 to 0.0-1.0
var stride = 0; // 0 = move forward size * sizeof(type) each iteration to get the next color
var offset = 0; // start at the beginning of the buffer
gl.vertexAttribPointer(
texcoordAttributeLocation, size, type, normalize, stride, offset);
// Create a texture.
var texture = gl.createTexture();
// use texture unit 0
gl.activeTexture(gl.TEXTURE0 + 0);
// bind to the TEXTURE_2D bind point of texture unit 0
gl.bindTexture(gl.TEXTURE_2D, texture);
// fill texture with 3x2 pixels
{
const level = 0;
const internalFormat = gl.R8;
const width = 3;
const height = 2;
const border = 0;
const format = gl.RED;
const type = gl.UNSIGNED_BYTE;
const data = new Uint8Array([
128, 64, 128,
0, 192, 0,
]);
gl.pixelStorei(gl.UNPACK_ALIGNMENT, 1);
gl.texImage2D(gl.TEXTURE_2D, level, internalFormat, width, height, border,
format, type, data);
}
// set the filtering so we don't need mips
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
// Create a texture to render to
const targetTextureWidth = 512;
const targetTextureHeight = 512;
const targetTexture = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, targetTexture);
{
// define size and format of level 0
const level = 0;
const internalFormat = gl.RGBA;
const border = 0;
const format = gl.RGBA;
const type = gl.UNSIGNED_BYTE;
const data = null;
gl.texImage2D(gl.TEXTURE_2D, level, internalFormat,
targetTextureWidth, targetTextureHeight, border,
format, type, data);
// set the filtering so we don't need mips
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
}
// Create and bind the framebuffer
const FRAMEBUFFER = {
RENDERBUFFER: 0,
COLORBUFFER: 1
};
const fb = [gl.createFramebuffer(), gl.createFramebuffer()];
const colorRenderbuffer = gl.createRenderbuffer();
gl.bindRenderbuffer(gl.RENDERBUFFER, colorRenderbuffer);
gl.renderbufferStorageMultisample(gl.RENDERBUFFER, gl.getParameter(gl.MAX_SAMPLES), gl.RGBA8, targetTextureWidth, targetTextureHeight);
gl.bindFramebuffer(gl.FRAMEBUFFER, fb[FRAMEBUFFER.RENDERBUFFER]);
gl.framebufferRenderbuffer(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.RENDERBUFFER, colorRenderbuffer);
gl.bindFramebuffer(gl.FRAMEBUFFER, fb[FRAMEBUFFER.COLORBUFFER]);
gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, targetTexture, 0);
gl.bindFramebuffer(gl.FRAMEBUFFER, null);
function degToRad(d) {
return d * Math.PI / 180;
}
var fieldOfViewRadians = degToRad(60);
var modelXRotationRadians = degToRad(0);
var modelYRotationRadians = degToRad(0);
// Get the starting time.
var then = 0;
requestAnimationFrame(drawScene);
function drawCube(aspect) {
// Tell it to use our program (pair of shaders)
gl.useProgram(program);
// Bind the attribute/buffer set we want.
gl.bindVertexArray(vao);
// Compute the projection matrix
var projectionMatrix =
m4.perspective(fieldOfViewRadians, aspect, 1, 2000);
var cameraPosition = [0, 0, 2];
var up = [0, 1, 0];
var target = [0, 0, 0];
// Compute the camera's matrix using look at.
var cameraMatrix = m4.lookAt(cameraPosition, target, up);
// Make a view matrix from the camera matrix.
var viewMatrix = m4.inverse(cameraMatrix);
var viewProjectionMatrix = m4.multiply(projectionMatrix, viewMatrix);
var matrix = m4.xRotate(viewProjectionMatrix, modelXRotationRadians);
matrix = m4.yRotate(matrix, modelYRotationRadians);
// Set the matrix.
gl.uniformMatrix4fv(matrixLocation, false, matrix);
// Tell the shader to use texture unit 0 for u_texture
gl.uniform1i(textureLocation, 0);
// Draw the geometry.
var primitiveType = gl.TRIANGLES;
var offset = 0;
var count = 6 * 6;
gl.drawArrays(primitiveType, offset, count);
}
// Draw the scene.
function drawScene(time) {
// convert to seconds
time *= 0.001;
// Subtract the previous time from the current time
var deltaTime = time - then;
// Remember the current time for the next frame.
then = time;
// Animate the rotation
modelYRotationRadians += -0.7 * deltaTime;
modelXRotationRadians += -0.4 * deltaTime;
//webglUtils.resizeCanvasToDisplaySize(gl.canvas);
gl.enable(gl.CULL_FACE);
gl.enable(gl.DEPTH_TEST);
{
// render to our targetTexture by binding the framebuffer
gl.bindFramebuffer(gl.FRAMEBUFFER, fb[FRAMEBUFFER.RENDERBUFFER]);
// render cube with our 3x2 texture
gl.bindTexture(gl.TEXTURE_2D, texture);
// Tell WebGL how to convert from clip space to pixels
gl.viewport(0, 0, targetTextureWidth, targetTextureHeight);
// Clear the canvas AND the depth buffer.
gl.clearColor(0, 0, 1, 1); // clear to blue
gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
const aspect = targetTextureWidth / targetTextureHeight;
drawCube(aspect);
// "blit" the cube into the color buffer, which adds antialiasing
gl.bindFramebuffer(gl.READ_FRAMEBUFFER, fb[FRAMEBUFFER.RENDERBUFFER]);
gl.bindFramebuffer(gl.DRAW_FRAMEBUFFER, fb[FRAMEBUFFER.COLORBUFFER]);
gl.clearBufferfv(gl.COLOR, 0, [1.0, 1.0, 1.0, 1.0]);
gl.blitFramebuffer(0, 0, targetTextureWidth, targetTextureHeight,
0, 0, targetTextureWidth, targetTextureHeight,
gl.COLOR_BUFFER_BIT, gl.LINEAR);
}
{
// render the top layer to the frame buffer as well
gl.bindFramebuffer(gl.FRAMEBUFFER, fb[FRAMEBUFFER.RENDERBUFFER]);
// render the cube with the texture we just rendered to
gl.bindTexture(gl.TEXTURE_2D, targetTexture);
// Tell WebGL how to convert from clip space to pixels
gl.viewport(0, 0, targetTextureWidth, targetTextureHeight);
// Clear the canvas AND the depth buffer.
gl.clearColor(0.105, 0.105, 0.105, 1); // clear to black
gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
const aspect = 1;
drawCube(aspect);
// this time render to the default buffer, which is just canvas
gl.bindFramebuffer(gl.READ_FRAMEBUFFER, fb[FRAMEBUFFER.RENDERBUFFER]);
gl.bindFramebuffer(gl.DRAW_FRAMEBUFFER, null);
gl.clearBufferfv(gl.COLOR, 0, [1.0, 1.0, 1.0, 1.0]);
gl.blitFramebuffer(0, 0, canvas.width, canvas.height,
0, 0, canvas.width, canvas.height,
gl.COLOR_BUFFER_BIT, gl.LINEAR);
}
requestAnimationFrame(drawScene);
}
}
// Fill the buffer with the values that define a cube.
function setGeometry(gl) {
var positions = new Float32Array(
[-0.5, -0.5, -0.5, -0.5, 0.5, -0.5,
0.5, -0.5, -0.5, -0.5, 0.5, -0.5,
0.5, 0.5, -0.5,
0.5, -0.5, -0.5,
-0.5, -0.5, 0.5,
0.5, -0.5, 0.5, -0.5, 0.5, 0.5, -0.5, 0.5, 0.5,
0.5, -0.5, 0.5,
0.5, 0.5, 0.5,
-0.5, 0.5, -0.5, -0.5, 0.5, 0.5,
0.5, 0.5, -0.5, -0.5, 0.5, 0.5,
0.5, 0.5, 0.5,
0.5, 0.5, -0.5,
-0.5, -0.5, -0.5,
0.5, -0.5, -0.5, -0.5, -0.5, 0.5, -0.5, -0.5, 0.5,
0.5, -0.5, -0.5,
0.5, -0.5, 0.5,
-0.5, -0.5, -0.5, -0.5, -0.5, 0.5, -0.5, 0.5, -0.5, -0.5, -0.5, 0.5, -0.5, 0.5, 0.5, -0.5, 0.5, -0.5,
0.5, -0.5, -0.5,
0.5, 0.5, -0.5,
0.5, -0.5, 0.5,
0.5, -0.5, 0.5,
0.5, 0.5, -0.5,
0.5, 0.5, 0.5,
]);
gl.bufferData(gl.ARRAY_BUFFER, positions, gl.STATIC_DRAW);
}
// Fill the buffer with texture coordinates the cube.
function setTexcoords(gl) {
gl.bufferData(
gl.ARRAY_BUFFER,
new Float32Array(
[
0, 0,
0, 1,
1, 0,
0, 1,
1, 1,
1, 0,
0, 0,
0, 1,
1, 0,
1, 0,
0, 1,
1, 1,
0, 0,
0, 1,
1, 0,
0, 1,
1, 1,
1, 0,
0, 0,
0, 1,
1, 0,
1, 0,
0, 1,
1, 1,
0, 0,
0, 1,
1, 0,
0, 1,
1, 1,
1, 0,
0, 0,
0, 1,
1, 0,
1, 0,
0, 1,
1, 1,
]),
gl.STATIC_DRAW);
}
main();
html
{
background-color: #1b1b1b;
}
<canvas id="canvas" width="512" height="512"></canvas>
<!--
for most samples webgl-utils only provides shader compiling/linking and
canvas resizing because why clutter the examples with code that's the same in every sample.
See http://webglfundamentals.org/webgl/lessons/webgl-boilerplate.html
and http://webglfundamentals.org/webgl/lessons/webgl-resizing-the-canvas.html
for webgl-utils, m3, m4, and webgl-lessons-ui.
-->
<script src="https://webgl2fundamentals.org/webgl/resources/webgl-utils.js"></script>
<script src="https://webgl2fundamentals.org/webgl/resources/m4.js"></script>

WebGL1 does not support multisampling for framebuffers so in that case your options are things like rendering to a higher resolution and down sampling when rendering to the canvas and/or running some post processing effect to do the anti-aliasing
WebGL2 does support multisampling for framebuffers. You can call renderbufferStorageMultisample to create a multisampled renderbufffer and you can call blitFramebuffer to resolve it into the canvas

Related

WebGL gl.triangle make square

gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([
// position
-0.9, 0.9,
0.9, 0.9,
-0.9, -0.9,
0.9, 0.9,
// color
1, 0, 0, 1,
0, 1, 0, 1,
1, 0, 1, 1,
1, 0, 0, 1
]), gl.STATIC_DRAW);
gl.enableVertexAttribArray(positionLocation);
gl.enableVertexAttribArray(colorLocation);
var size = 2;
var type = gl.FLOAT;
var normalize = false;
var stride = 0;
var offset = 0;
gl.vertexAttribPointer(positionLocation, size, type, normalize, stride, offset);
var size = 4;
var type = gl.FLOAT;
var normalize = false;
var stride = 0;
var offset = Float32Array.BYTES_PER_ELEMENT * 8;
gl.vertexAttribPointer(colorLocation, size, type, normalize, stride, offset);
This is some portion of the code ( the code is too long to fit ) so I will put these part where I'm curious about why it didnt draw a square but still a triangle? I know that I used gl.Triangle but I want to try using gl.Triangle to draw a square which I'm not sure which part of this is wrong and I have searched about it but no one do the same thing as I do ( the one where I put position and vertices in the same array )
There's also this part where count is 3 which I'm not sure what it does ( this code is given by my professor to let me make it a square and colored by changing a few setting so I do not know how to code opengl yet )
// Draw the geometry.
var offset = 0;
var count = 3;
gl.drawArrays(gl.TRIANGLES, offset, count);
below is the full code
<!DOCTYPE html>
<html>
<head>
<title>CS299 - Assignment 1.1</title>
<link type="text/css" href="https://webgl2fundamentals.org/webgl/resources/webgl-tutorials.css" rel="stylesheet" />
<!-- css override -->
<style type="text/css">
body { background-color: #CCCCCC; }
#group {background-color: #E8F49F;}
canvas { background-color: #4DC72F; width: 300px; height: 300px; border: 0px; }
.gman-widget-slider {min-width: 200px;}
</style>
</head>
<body>
<canvas id="canvas"></canvas>
</body>
<!-- util functions -->
<script src="https://webgl2fundamentals.org/webgl/resources/webgl-utils.js"></script>
<!-- main WebGL2 code -->
<script>
"use strict";
var vs = `#version 300 es
// an attribute is an input (in) to a vertex shader.
// It will receive data from a buffer
in vec2 a_position;
in vec4 a_color;
// color output from vertex shader to fragment shader
out vec4 v_color;
// all shaders have a main function.
void main() {
// default position output variable
// convert vec2 to vec4
gl_Position = vec4(a_position, 0, 1);
// color passthrough
v_color = a_color;
}
`;
var fs = `#version 300 es
precision highp float;
// color passthrough
in vec4 v_color;
// outout color
out vec4 outColor;
void main() {
outColor = v_color;
}
`;
function main() {
// Get A WebGL context
/** #type {HTMLCanvasElement} */
var canvas = document.querySelector("#canvas");
var gl = canvas.getContext("webgl2");
if (!gl) {
return;
}
// setup GLSL program
var program = webglUtils.createProgramFromSources(gl, [vs, fs]);
// look up where the vertex data needs to go.
var positionLocation = gl.getAttribLocation(program, "a_position");
var colorLocation = gl.getAttribLocation(program, "a_color");
// Create set of attributes
var vao = gl.createVertexArray();
gl.bindVertexArray(vao);
// Create a buffer (formerly called "vertex buffer object", now just "buffer").
var vbo = gl.createBuffer();
// Set Geometry.
gl.bindBuffer(gl.ARRAY_BUFFER, vbo);
// [40%] Modify the code to draw a square instead of a triangle.
// Assign C,M,Y, and K colors to the 4 vertices of the square.
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([
// position
-0.9, 0.9,
0.9, 0.9,
-0.9, -0.9,
0.9, 0.9,
// color
1, 0, 0, 1,
0, 1, 0, 1,
1, 0, 1, 1,
1, 0, 0, 1
]), gl.STATIC_DRAW);
// tell the position attribute how to pull data out of the current ARRAY_BUFFER
gl.enableVertexAttribArray(positionLocation);
gl.enableVertexAttribArray(colorLocation);
var size = 2;
var type = gl.FLOAT;
var normalize = false;
var stride = 0;
var offset = 0;
gl.vertexAttribPointer(positionLocation, size, type, normalize, stride, offset);
var size = 4;
var type = gl.FLOAT;
var normalize = false;
var stride = 0;
var offset = Float32Array.BYTES_PER_ELEMENT * 8; // must be in bytes
gl.vertexAttribPointer(colorLocation, size, type, normalize, stride, offset);
// Draw the scene.
function drawScene() {
webglUtils.resizeCanvasToDisplaySize(gl.canvas);
gl.viewport(0, 0, gl.canvas.width, gl.canvas.height);
// Clear the canvas
gl.clearColor(0.15, 0.15, 0.15, 1.0);
gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
// Tell it to use our program (pair of shaders)
gl.useProgram(program);
// Bind the attribute/buffer set we want.
gl.bindVertexArray(vao);
// Draw the geometry.
var offset = 0;
var count = 3;
// [1.5 points] Use gl.TRIANGLE_STRIP instead of gl.TRIANGLES
gl.drawArrays(gl.TRIANGLES, offset, count);
}
drawScene();
}
main();
</script>
<p id="group">Group: 4DC72F</p>
</html>
I would like some hint instead of answer if that is ok because I was trying to learn but I cant find this method anywhere on the internet
In your vertex specification, the coordinate (0.9, 0.9) is duplicated, however, that's not the only problem.
See Triangle primitives. The primitive type gl.TRIANGLES renders, as the name suggests, triangles. For 2 triangles you need 6 verticals (2*3). Each triangle consists of 3 vertices, and the triangles are completely independent and have no common vertices. e.g.:
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([
// position
// triangle 1
-0.9, 0.9,
0.9, 0.9,
-0.9, -0.9,
// triangle 2
0.9, 0.9,
0.9, -0.9,
-0.9, -0.9,
// color
// [...]
]), gl.STATIC_DRAW);
However you can use the primitive type gl.TRIANGLE_STRIP to draw a single quad:
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([
// position
-0.9, 0.9,
0.9, 0.9,
-0.9, -0.9,
0.9, -0.9,
// color
1, 0, 0, 1,
0, 1, 0, 1,
1, 0, 1, 1,
1, 0, 0, 1
]), gl.STATIC_DRAW);
gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4);

Black screen when rendering to a texture

I'm trying to do the following:
Draw the scene in a first pass to a framebuffer I've created.
Take the texture that was attached to the created framebuffer and draw that onto a plane so that it can be displayed on screen.
Do some post-processing.
Just using the default framebuffer the scene looks like this:
Currently I'm unable to get parts 1 & 2 working. All I get is a black screen. However, the plane is placed in the scene correctly (confirmed by looking at a wireframe using gl.LINE_STRIP). I'm unsure if this is due to a mistake I've made with the code, or a lack of understanding of how framebuffers work (webgl is new to me).
Here's the relevant code excerpts:
// ======== FRAMEBUFFER PHASE ======== //
const framebuffer = gl.createFramebuffer();
gl.bindFramebuffer(gl.FRAMEBUFFER, framebuffer);
const texture = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.texImage2D(
gl.TEXTURE_2D,
0,
gl.RGB,
canvas.clientWidth,
canvas.clientHeight,
0,
gl.RGB,
gl.UNSIGNED_BYTE,
null // don't fill it with pixel data just yet
);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
gl.framebufferTexture2D(
gl.FRAMEBUFFER,
gl.COLOR_ATTACHMENT0,
gl.TEXTURE_2D,
texture,
0
);
// ======== END FRAMEBUFFER PHASE ======== //
// =========== RENDERBUFFER PHASE ============== //
const renderBuffer = gl.createRenderbuffer();
gl.bindRenderbuffer(gl.RENDERBUFFER, renderBuffer);
gl.renderbufferStorage(
gl.RENDERBUFFER,
gl.DEPTH_STENCIL,
canvas.clientWidth,
canvas.clientHeight
);
gl.framebufferRenderbuffer(
gl.FRAMEBUFFER,
gl.DEPTH_STENCIL_ATTACHMENT,
gl.RENDERBUFFER,
renderBuffer
);
// =========== END RENDERBUFFER PHASE ============== //
// =========== CHECK FRAMEBUFFER STATUS ============== //
const framebufferState = gl.checkFramebufferStatus(gl.FRAMEBUFFER);
if (framebufferState !== gl.FRAMEBUFFER_COMPLETE) {
throw new Error(
`Framebuffer status is not complete: ${framebufferState}`
);
}
// =========== END CHECK FRAMEBUFFER STATUS ============== //
// =========== FIRST PASS RENDERING ============ //
gl.bindFramebuffer(gl.FRAMEBUFFER, framebuffer);
gl.clearColor(0.1, 0.1, 0.1, 1.0);
gl.clear(gl.DEPTH_BUFFER_BIT | gl.COLOR_BUFFER_BIT);
gl.enable(gl.DEPTH_TEST);
// this sets up the green quad and draws it to the screen
const objectModel = setupObjectModel({
position: [100.0, -10.0, 0.0],
colour: [0.734345265462, 0.89624528765, 0.9868589658, 1.0],
gl,
canvas,
});
objectModel.draw({
shaderProgram: mainShaderProgram,
camera: updatedCamera,
currentTime,
deltaTime,
});
// =========== END FIRST PASS RENDERING ============ //
// =========== SECOND PASS RENDERING ============ //
// back to rendering with the default framebuffer
gl.bindFramebuffer(gl.FRAMEBUFFER, null);
gl.clearColor(1.0, 1.0, 1.0, 1.0);
gl.clear(gl.COLOR_BUFFER_BIT);
gl.disable(gl.DEPTH_TEST);
gl.useProgram(frameBufferShaderProgram);
// prettier-ignore
const verts = [
// positions // texCoords
-1.0, 1.0, 0.0, 1.0,
-1.0, -1.0, 0.0, 0.0,
1.0, -1.0, 1.0, 0.0,
-1.0, 1.0, 0.0, 1.0,
1.0, -1.0, 1.0, 0.0,
1.0, 1.0, 1.0, 1.0
];
// prettier-ignore-end
const screenQuad = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, screenQuad);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(verts), gl.STATIC_DRAW);
const aPosAttributeLocation = gl.getAttribLocation(
frameBufferShaderProgram,
"aPos"
);
gl.enableVertexAttribArray(aPosAttributeLocation);
gl.vertexAttribPointer(
aPosAttributeLocation,
2,
gl.FLOAT,
false,
Float32Array.BYTES_PER_ELEMENT * 4,
0
);
const aTexCoordsAttributeLocation = gl.getAttribLocation(
frameBufferShaderProgram,
"aTexCoords"
);
gl.enableVertexAttribArray(aTexCoordsAttributeLocation);
gl.vertexAttribPointer(
aTexCoordsAttributeLocation,
2,
gl.FLOAT,
false,
Float32Array.BYTES_PER_ELEMENT * 4,
Float32Array.BYTES_PER_ELEMENT * 2
);
const screenTexture = gl.getUniformLocation(
frameBufferShaderProgram,
"screenTexture"
);
gl.uniform1i(screenTexture, 0);
gl.drawArrays(gl.TRIANGLES, 0, 6);
And here is the framebuffer shader program:
// vertex shader
precision mediump float;
attribute vec2 aPos;
attribute vec2 aTexCoords;
varying vec2 TexCoords;
void main() {
gl_Position = vec4(aPos.x, aPos.y, 0.0, 1.0);
TexCoords = aTexCoords;
}
// fragment shader
precision mediump float;
uniform sampler2D screenTexture;
varying vec2 TexCoords;
void main() {
// the texture coordinates are fine here, it's the screen texture that's the issue
gl_FragColor = texture2D(screenTexture, TexCoords.xy);
}
This is a common mistake. WebGL 1.0 is base on OpenGL ES 2.0. The same rules apply to texture framebuffer attachments as to mipmaps. The size of a framebuffer texture must be a power of 2. See Texture Completeness and Non-Power-Of-Two Textures.
Create a framebuffer with a size equal to a power of 2 (e.g. 1024x1024):
gl.bindFramebuffer(gl.FRAMEBUFFER, framebuffer);
// [...]
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, 1024, 1024, 0, gl.RGBA, gl.UNSIGNED_BYTE, null);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, texture, 0);
// [...]
gl.renderbufferStorage(gl.RENDERBUFFER, gl.DEPTH_STENCIL, 1024, 1024);
gl.framebufferRenderbuffer(gl.FRAMEBUFFER, gl.DEPTH_STENCIL_ATTACHMENT, gl.RENDERBUFFER, renderBuffer);
// [...]
gl.bindFramebuffer(gl.FRAMEBUFFER, framebuffer);
gl.viewport(0, 0, 1024, 1024);
// [...]
gl.bindFramebuffer(gl.FRAMEBUFFER, null);
gl.viewport(0, 0, canvas.clientWidth, canvas.clientHeight,);
// [...]

I am trying to draw rotating square in WebGL but not getting it. plus I need to add a point light source

"use strict";
var canvas;
var gl;
var axis = 0;
var xAxis = 0;
var yAxis =1;
var zAxis = 2;
var theta = [0, 0, 0];
var thetaLoc;
var flag = true;
var numElements = 36;
var vertices = [
vec3(-0.5, -0.5, 0.5),
vec3(-0.5, 0.5, 0.5),
vec3(0.5, 0.5, 0.5),
vec3(0.5, -0.5, 0.5),
vec3(-0.5, -0.5, -0.5),
vec3(-0.5, 0.5, -0.5),
vec3(0.5, 0.5, -0.5),
vec3(0.5, -0.5, -0.5)
];
var vertexColors = [
vec4(0.0, 0.0, 0.0, 1.0), // black
vec4(1.0, 0.0, 0.0, 1.0), // red
vec4(1.0, 1.0, 0.0, 1.0), // yellow
vec4(0.0, 1.0, 0.0, 1.0), // green
vec4(0.0, 0.0, 1.0, 1.0), // blue
vec4(1.0, 0.0, 1.0, 1.0), // magenta
vec4(1.0, 1.0, 1.0, 1.0), // white
vec4(0.0, 1.0, 1.0, 1.0) // cyan
];
// indices of the 12 triangles that compose the cube
var indices = [
1, 0, 3,
3, 2, 1,
2, 3, 7,
7, 6, 2,
3, 0, 4,
4, 7, 3,
6, 5, 1,
1, 2, 6,
4, 5, 6,
6, 7, 4,
5, 4, 0,
0, 1, 5
];
window.onload = function init()
{
// --------------- Cube --------------------------
canvas = document.getElementById("gl-canvas");
gl = canvas.getContext('webgl2');
if (!gl) alert("WebGL 2.0 isn't available");
gl.viewport(0, 0, canvas.width - 50, canvas.height - 50);
gl.clearColor(1.0, 1.0, 1.0, 1.0);
gl.enable(gl.DEPTH_TEST);
// Load shaders and initialize attribute buffers
// Create a buffer object, initialise it, and associate it
// with the associated attribute variable in our vertex shader
var programCube = initShaders(gl, "vertex-shader", "fragment-shader");
gl.useProgram(programCube);
// array element buffer
var iBuffer = gl.createBuffer();
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, iBuffer);
gl.bufferData(gl.ELEMENT_ARRAY_BUFFER, new Uint8Array(indices), gl.STATIC_DRAW);
// color array atrribute buffer
var cBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, cBuffer);
gl.bufferData(gl.ARRAY_BUFFER, flatten(vertexColors), gl.STATIC_DRAW);
var colorLoc = gl.getAttribLocation(programCube, "aColor");
gl.vertexAttribPointer(colorLoc, 4, gl.FLOAT, false, 0, 0);
gl.enableVertexAttribArray(colorLoc);
// vertex array attribute buffer
var vBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, vBuffer);
gl.bufferData(gl.ARRAY_BUFFER, flatten(vertices), gl.STATIC_DRAW);
var positionLoc = gl.getAttribLocation( programCube, "aPosition");
gl.vertexAttribPointer(positionLoc, 3, gl.FLOAT, false, 0, 0);
gl.enableVertexAttribArray(positionLoc );
thetaLoc = gl.getUniformLocation(programCube, "uTheta");
render();
//event listeners for buttons
document.getElementById( "xButton" ).onclick = function () {
axis = xAxis;
};
document.getElementById( "yButton" ).onclick = function () {
axis = yAxis;
};
document.getElementById( "zButton" ).onclick = function () {
axis = zAxis;
};
document.getElementById("ButtonT").onclick = function(){flag = !flag;};
}
function render()
{
gl.clear( gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
if(flag) theta[axis] += 2.0;
gl.uniform3fv(thetaLoc, theta);
gl.drawElements(gl.TRIANGLES, numElements, gl.UNSIGNED_BYTE, 0);
requestAnimationFrame(render);
}
still, need to add below
Need to set up the surface normals for the objects being shown on the scene.
Properly setting up different material properties for the objects being shown on the scene.
Properly showing two different light sources at fixed locations that areused to illustrate properly the objects on the scene, and can be turned on and off independently.
Properly setting up the lights above with an ambient light sourcecomponent that is used to illustrate properly the objects on the scene and can be turned from totally dark to intense brightness using a dialor a slider (from 0-total darkness to 100-total brightness, with a default somewhere in between.

WebGL: Rendering two objects with separate textures

I am currently working on a simple project in WebGL that requires rendering multiple 2D objects with a simple image textured on. The actual project generates a random number of objects, usually between 5 and 9, sets vertex data for them around the canvas to separate them, and is supposed to render, however it will only render one at a time (usually the last but I can change around gl.activeTexture to show other objects in the array). I tried to use a question on here about texture arrays in the shader, but to no avail, so I ended up creating a very simple test program that just tries to load two objects and textures, one on the left of the canvas and the other on the right.
From here I tried to do everything completely separate, even giving each object their own shaders, programs, buffers and everything, and subsequently binding everything in the draw call for each before calling gl.drawElements for each. This still doesn't show me the correct result, only the second texture appears, however it did lead me to discover what I believe to be happening. By commenting out the bindings and draw call for the second one, the first texture shows up, however it appears at the location of the second texture, not where it's vertices should be placing it. So, I assume what is happening in this program (and my project code) is that it is in fact drawing both, but for some reason applying the vertices of the last drawn one to all of them, thus stacking them and only showing the top (or last drawn one).
I have also tried a mishmash of tweaks to the below code, using only one program, using the same indices, texture coordinates, there are some commented out lines from trying to make calls in different orders as well. Anything commented out doesn't mean I necessarily think it is wrong or right, just from various things I've aimlessly tried at this point.
I have worked with OpenGL a little and had little to no trouble drawing multiple objects with their own textures, and I know that WebGL works differently than OpenGL in some ways including textures, but I do not see where I am creating the issue. I'm sure it is something very simple, and any guidance would be greatly appreciated.
I apologize for the long block of code, it's pretty much just straight typing everything out that I believe to be needed without trying to take any shortcuts. The initShaders call is from the WebGL js files I'm using from my textbook and isn't something I've written, and the loadImage call just simply loads an <img> from the html code. There are no issues with the images being loaded correctly as far as I can tell. I only included the first vertex and fragment shader because the other two are the same save for the id.
<script id="vertex-shader1" type="x-shader/x-vertex">
attribute vec4 vPosition;
attribute vec2 vTexCoord;
varying vec2 fTexCoord;
void main() {
fTexCoord = vTexCoord;
gl_Position = vPosition;
}
</script>
<script id="fragment-shader1" type="x-shader/x-fragment">
precision mediump float;
varying vec2 fTexCoord;
uniform sampler2D texture;
void main() {
gl_FragColor = texture2D(texture, fTexCoord);
}
</script>
"use-strict"
var gl;
var images = [];
var program1;
var program2;
var texture1;
var texture2;
var vBuff1;
var vBuff2;
var iBuff1;
var iBuff2;
var tBuff1;
var tBuff2;
var vPos1;
var vPos2;
var fTexCoord1;
var fTexCoord2;
var sampler1;
var sampler2;
var vertices1 = [
vec4(-0.8, 0.1, 0.0, 1.0),
vec4(-0.8, 0.3, 0.0, 1.0),
vec4(-0.6, 0.3, 0.0, 1.0),
vec4(-0.6, 0.1, 0.0, 1.0)
];
var vertices2 = [
vec4(0.1, 0.1, 0.0, 1.0),
vec4(0.1, 0.3, 0.0, 1.0),
vec4(0.3, 0.3, 0.0, 1.0),
vec4(0.3, 0.1, 0.0, 1.0)
];
var indices1 = [
0, 1, 2,
0, 2, 3
];
var indices2 = [
0, 1, 2,
0, 2, 3
];
var tcs1 = [
vec2(0, 0),
vec2(0, 1),
vec2(1, 1),
vec2(1, 0)
];
var tcs2 = [
vec2(0, 0),
vec2(0, 1),
vec2(1, 1),
vec2(1, 0)
];
window.onload = function init() {
var canvas = document.getElementById("gl-canvas");
gl = WebGLUtils.setupWebGL(canvas);
if (!gl) { alert("WebGL isn't available"); }
gl.viewport(0, 0, canvas.width, canvas.height);
gl.clearColor(0.0, 0.0, 0.0, 1.0);
loadImages();
program1 = initShaders(gl, "vertex-shader1", "fragment-shader1");
gl.useProgram(program1);
vBuff1 = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, vBuff1);
gl.bufferData(gl.ARRAY_BUFFER, flatten(vertices1), gl.STATIC_DRAW);
vPos1 = gl.getAttribLocation(program1, "vPosition");
gl.vertexAttribPointer(vPos1, 4, gl.FLOAT, false, 0, 0);
//gl.enableVertexAttribArray(vPos1);
iBuff1 = gl.createBuffer();
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, iBuff1);
gl.bufferData(gl.ELEMENT_ARRAY_BUFFER, new Uint8Array(indices1), gl.STATIC_DRAW);
tBuff1 = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, tBuff1);
gl.bufferData(gl.ARRAY_BUFFER, flatten(tcs1), gl.STATIC_DRAW);
fTexCoord1 = gl.getAttribLocation(program1, "vTexCoord");
gl.vertexAttribPointer(fTexCoord1, 2, gl.FLOAT, false, 0, 0);
//gl.enableVertexAttribArray(fTexCoord1);
sampler1 = gl.getUniformLocation(program1, "texture");
texture1 = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, texture1);
gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, true);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGB, gl.RGB, gl.UNSIGNED_BYTE, images[0]);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
gl.bindTexture(gl.TEXTURE_2D, null);
///////////////////////////////////////////////////////////////////////////////////////
/*
program2 = initShaders(gl, "vertex-shader2", "fragment-shader2");
gl.useProgram(program2);
*/
vBuff2 = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, vBuff2);
gl.bufferData(gl.ARRAY_BUFFER, flatten(vertices2), gl.STATIC_DRAW);
vPos2 = gl.getAttribLocation(program1, "vPosition");
gl.vertexAttribPointer(vPos2, 4, gl.FLOAT, false, 0, 0);
//gl.enableVertexAttribArray(vPos2);
iBuff2 = gl.createBuffer();
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, iBuff2);
gl.bufferData(gl.ELEMENT_ARRAY_BUFFER, new Uint8Array(indices2), gl.STATIC_DRAW);
tBuff2 = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, tBuff2);
gl.bufferData(gl.ARRAY_BUFFER, flatten(tcs2), gl.STATIC_DRAW);
fTexCoord2 = gl.getAttribLocation(program1, "vTexCoord");
gl.vertexAttribPointer(fTexCoord2, 2, gl.FLOAT, false, 0, 0);
//gl.enableVertexAttribArray(fTexCoord2);
sampler2 = gl.getUniformLocation(program1, "texture");
texture2 = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, texture2);
gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, true);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGB, gl.RGB, gl.UNSIGNED_BYTE, images[1]);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
gl.bindTexture(gl.TEXTURE_2D, null);
render();
};
function render() {
gl.clear(gl.COLOR_BUFFER_BIT);
gl.useProgram(program1);
gl.bindBuffer(gl.ARRAY_BUFFER, vBuff1);
gl.enableVertexAttribArray(vPos1);
gl.enableVertexAttribArray(fTexCoord1);
gl.activeTexture(gl.TEXTURE0);
gl.bindTexture(gl.TEXTURE_2D, texture1);
gl.uniform1i(sampler1, 0);
// gl.bindBuffer(gl.ARRAY_BUFFER, vBuff1);
// gl.enableVertexAttribArray(vPos1);
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, iBuff1);
gl.drawElements(gl.TRIANGLES, indices1.length, gl.UNSIGNED_BYTE, 0);
//gl.bindTexture(gl.TEXTURE_2D, null);
// gl.useProgram(program2);
gl.bindBuffer(gl.ARRAY_BUFFER,vBuff2);
gl.enableVertexAttribArray(vPos2);
gl.enableVertexAttribArray(fTexCoord2);
gl.activeTexture(gl.TEXTURE0);
gl.bindTexture(gl.TEXTURE_2D, texture2);
gl.uniform1i(sampler2, 0);
// gl.bindBuffer(gl.ARRAY_BUFFER, vBuff2);
// gl.enableVertexAttribArray(vPos2);
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, iBuff2);
gl.drawElements(gl.TRIANGLES, indices2.length, gl.UNSIGNED_BYTE, 0);
requestAnimFrame(render);
}
First off AFAIK your code can't work. It calls a function loadImages and then immediately uses the images. Images load asychronously in the browser so you need to either have a callback for when the images load or else use async functions
Here is your code working. First I made a loadImage that returns a Promise. Then I made a async function called loadImages that uses that to load all the images and wait for them to load. Then I made another async function called main what first waits for loadImages and then calls init
The second issue was that in WebGL1 attributes are global state. That means you need to set them at render time not init time so the calls go gl.enableVertexAttribArray and gl.vertexAttribPointer need to happen at render time with the appropriate values for rendering the particular thing you are rendering. gl.vertexAttribPointer copies the current ARRAY_BUFFER binding to that attribute.
you might find these tutorials helpful and in particular this one about attributes and this state diagram that might help you visualize what is happening inside WebGL
"use-strict";
const vec2 = (...args) => [...args];
const vec4 = (...args) => [...args];
const flatten = a => new Float32Array(a.flat());
const WebGLUtils = {
setupWebGL: (canvas) => { return canvas.getContext('webgl'); },
};
const initShaders = (gl, vs, fs) => twgl.createProgram(gl, [vs, fs]);
const requestAnimFrame = requestAnimationFrame;
var gl;
var images = [];
var program1;
var program2;
var texture1;
var texture2;
var vBuff1;
var vBuff2;
var iBuff1;
var iBuff2;
var tBuff1;
var tBuff2;
var vPos1;
var vPos2;
var fTexCoord1;
var fTexCoord2;
var sampler1;
var sampler2;
var vertices1 = [
vec4(-0.8, 0.1, 0.0, 1.0),
vec4(-0.8, 0.3, 0.0, 1.0),
vec4(-0.6, 0.3, 0.0, 1.0),
vec4(-0.6, 0.1, 0.0, 1.0)
];
var vertices2 = [
vec4(0.1, 0.1, 0.0, 1.0),
vec4(0.1, 0.3, 0.0, 1.0),
vec4(0.3, 0.3, 0.0, 1.0),
vec4(0.3, 0.1, 0.0, 1.0)
];
var indices1 = [
0, 1, 2,
0, 2, 3
];
var indices2 = [
0, 1, 2,
0, 2, 3
];
var tcs1 = [
vec2(0, 0),
vec2(0, 1),
vec2(1, 1),
vec2(1, 0)
];
var tcs2 = [
vec2(0, 0),
vec2(0, 1),
vec2(1, 1),
vec2(1, 0)
];
function init() {
var canvas = document.getElementById("gl-canvas");
gl = WebGLUtils.setupWebGL(canvas);
if (!gl) { alert("WebGL isn't available"); }
gl.viewport(0, 0, canvas.width, canvas.height);
gl.clearColor(0.0, 0.0, 0.0, 1.0);
program1 = initShaders(gl, "vertex-shader1", "fragment-shader1");
gl.useProgram(program1);
vBuff1 = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, vBuff1);
gl.bufferData(gl.ARRAY_BUFFER, flatten(vertices1), gl.STATIC_DRAW);
vPos1 = gl.getAttribLocation(program1, "vPosition");
iBuff1 = gl.createBuffer();
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, iBuff1);
gl.bufferData(gl.ELEMENT_ARRAY_BUFFER, new Uint8Array(indices1), gl.STATIC_DRAW);
tBuff1 = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, tBuff1);
gl.bufferData(gl.ARRAY_BUFFER, flatten(tcs1), gl.STATIC_DRAW);
fTexCoord1 = gl.getAttribLocation(program1, "vTexCoord");
sampler1 = gl.getUniformLocation(program1, "texture");
texture1 = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, texture1);
gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, true);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGB, gl.RGB, gl.UNSIGNED_BYTE, images[0]);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
gl.bindTexture(gl.TEXTURE_2D, null);
///////////////////////////////////////////////////////////////////////////////////////
/*
program2 = initShaders(gl, "vertex-shader2", "fragment-shader2");
gl.useProgram(program2);
*/
vBuff2 = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, vBuff2);
gl.bufferData(gl.ARRAY_BUFFER, flatten(vertices2), gl.STATIC_DRAW);
vPos2 = gl.getAttribLocation(program1, "vPosition");
iBuff2 = gl.createBuffer();
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, iBuff2);
gl.bufferData(gl.ELEMENT_ARRAY_BUFFER, new Uint8Array(indices2), gl.STATIC_DRAW);
tBuff2 = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, tBuff2);
gl.bufferData(gl.ARRAY_BUFFER, flatten(tcs2), gl.STATIC_DRAW);
fTexCoord2 = gl.getAttribLocation(program1, "vTexCoord");
gl.vertexAttribPointer(fTexCoord2, 2, gl.FLOAT, false, 0, 0);
//gl.enableVertexAttribArray(fTexCoord2);
sampler2 = gl.getUniformLocation(program1, "texture");
texture2 = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, texture2);
gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, true);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGB, gl.RGB, gl.UNSIGNED_BYTE, images[1]);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
gl.bindTexture(gl.TEXTURE_2D, null);
render();
};
function render() {
gl.clear(gl.COLOR_BUFFER_BIT);
gl.useProgram(program1);
gl.bindBuffer(gl.ARRAY_BUFFER, vBuff1);
gl.enableVertexAttribArray(vPos1);
gl.vertexAttribPointer(vPos1, 4, gl.FLOAT, false, 0, 0);
gl.bindBuffer(gl.ARRAY_BUFFER, tBuff1);
gl.enableVertexAttribArray(fTexCoord1);
gl.vertexAttribPointer(fTexCoord1, 2, gl.FLOAT, false, 0, 0);
gl.activeTexture(gl.TEXTURE0);
gl.bindTexture(gl.TEXTURE_2D, texture1);
gl.uniform1i(sampler1, 0);
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, iBuff1);
gl.drawElements(gl.TRIANGLES, indices1.length, gl.UNSIGNED_BYTE, 0);
gl.bindBuffer(gl.ARRAY_BUFFER,vBuff2);
gl.enableVertexAttribArray(vPos2);
gl.vertexAttribPointer(vPos2, 4, gl.FLOAT, false, 0, 0);
gl.activeTexture(gl.TEXTURE0);
gl.bindTexture(gl.TEXTURE_2D, texture2);
gl.uniform1i(sampler2, 0);
gl.bindBuffer(gl.ARRAY_BUFFER,tBuff2);
gl.enableVertexAttribArray(fTexCoord2);
gl.vertexAttribPointer(fTexCoord2, 2, gl.FLOAT, false, 0, 0);
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, iBuff2);
gl.drawElements(gl.TRIANGLES, indices2.length, gl.UNSIGNED_BYTE, 0);
requestAnimFrame(render);
}
function loadImage(url) {
return new Promise((resolve, reject) => {
const img = new Image();
img.onload = () => resolve(img);
img.onerror = reject;
img.crossOrigin = 'anonymous';
img.src = url;
});
}
async function loadImages(imgs) {
images = await Promise.all(imgs.map(loadImage));
}
async function main() {
await loadImages([
'https://webglfundamentals.org/webgl/resources/f-texture.png',
'https://webglfundamentals.org/webgl/lessons/resources/noodles-01.jpg',
]);
init();
}
main();
<script id="vertex-shader1" type="x-shader/x-vertex">
attribute vec4 vPosition;
attribute vec2 vTexCoord;
varying vec2 fTexCoord;
void main() {
fTexCoord = vTexCoord;
gl_Position = vPosition;
}
</script>
<script id="fragment-shader1" type="x-shader/x-fragment">
precision mediump float;
varying vec2 fTexCoord;
uniform sampler2D texture;
void main() {
gl_FragColor = texture2D(texture, fTexCoord);
}
</script>
<canvas id="gl-canvas"></canvas>
<script src="https://twgljs.org/dist/4.x/twgl.min.js"></script>

create WebGL texture out of RGBA values

I tried to do this based off of this answer. This is my code:
<canvas id='canvas' width='500' height='500' style='border: solid 1px black'></canvas>
<script>
var canvas = document.getElementById("canvas");
var gl = canvas.getContext("webgl");
var texture = gl.createTexture();
var data = new Uint8Array([128, 128, 0, 1]);
var texture = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, 1, 1, 0, gl.RGBA, gl.UNSIGNED_BYTE, data);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
gl.bindTexture(gl.TEXTURE_2D, texture);
</script>
But all I can see is a white box with a black outline.
What am I doing wrong?
Thank you!
The code you have there just creates a texture and loads it into GPU memory. So you're not actually drawing anything to the canvas.
To get that texture on screen you'll need quite a few more things. Here is your code with the rest added:
// helper function for loading shader sources
function loadShaderSource(gl, id) {
var script = document.getElementById(id);
var source = "";
var child = script.firstChild;
while (child) {
if (child.nodeType == child.TEXT_NODE) {
source += child.textContent;
}
child = child.nextSibling;
}
return source;
}
// setup an OpenGL context
var canvas = document.getElementById("canvas");
var gl = canvas.getContext("webgl");
// build the vertex shader
var vertexShaderSource = loadShaderSource(gl, "shader-vertex");
var vertexShader = gl.createShader(gl.VERTEX_SHADER);
gl.shaderSource(vertexShader, vertexShaderSource);
gl.compileShader(vertexShader);
// build the fragment shader
var fragmentShaderSource = loadShaderSource(gl, "shader-fragment");
var fragmentShader = gl.createShader(gl.FRAGMENT_SHADER);
gl.shaderSource(fragmentShader, fragmentShaderSource);
gl.compileShader(fragmentShader);
// build a shader program from the vertex and fragment shader
var shaderProgram = gl.createProgram();
gl.attachShader(shaderProgram, vertexShader);
gl.attachShader(shaderProgram, fragmentShader);
gl.linkProgram(shaderProgram);
gl.useProgram(shaderProgram);
// define vertex positions
var vertexPositions = new Float32Array([
1.0, 1.0, 0, // a
-1.0, 1.0, 0, // b b----a
1.0, -1.0, 0, // c | |
-1.0, 1.0, 0, // b | |
-1.0, -1.0, 0, // d d----c
1.0, -1.0, 0 // c
]);
// send the vertex positions to the GPU
var vertexBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, vertexBuffer);
gl.bufferData(gl.ARRAY_BUFFER, vertexPositions, gl.STATIC_DRAW);
// define vertex texcoords
var vertexTexcoords = new Float32Array([
1.0, 0.0, // a
0.0, 0.0, // b
1.0, 1.0, // c
0.0, 0.0, // b
0.0, 1.0, // d
1.0, 1.0 // c
]);
// send the vertex texcoords to the GPU
var texcoordBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, texcoordBuffer);
gl.bufferData(gl.ARRAY_BUFFER, vertexTexcoords, gl.STATIC_DRAW);
// wire up the shader program to the vertex position data
var positionAttribute = gl.getAttribLocation(shaderProgram, "position");
gl.bindBuffer(gl.ARRAY_BUFFER, vertexBuffer);
gl.enableVertexAttribArray(positionAttribute);
gl.vertexAttribPointer(positionAttribute, 3, gl.FLOAT, false, 0, 0);
// wire up the shader program to the vertex texcoord data
var texcoordAttribute = gl.getAttribLocation(shaderProgram, "texcoord");
gl.bindBuffer(gl.ARRAY_BUFFER, texcoordBuffer);
gl.enableVertexAttribArray(texcoordAttribute);
gl.vertexAttribPointer(texcoordAttribute, 2, gl.FLOAT, false, 0, 0);
// generate and send texture data to the GPU
var textureData = new Uint8Array([128, 128, 0, 255]);
var texture = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, 1, 1, 0, gl.RGBA, gl.UNSIGNED_BYTE, textureData);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
// wire up the shader program to the texture data
var imageUniform = gl.getUniformLocation(shaderProgram, "image")
gl.uniform1i(imageUniform, 0);
// tell the GPU to draw
gl.drawArrays(gl.TRIANGLES, 0, 6);
<canvas id='canvas' width='300' height='200' style='border: solid 1px black'></canvas>
<script id="shader-vertex" type="x-shader/x-vertex">
attribute vec3 position;
attribute vec2 texcoord;
varying highp vec2 uv;
void main() {
gl_Position = vec4(position, 1);
uv = texcoord;
}
</script>
<script id="shader-fragment" type="x-shader/x-fragment">
varying highp vec2 uv;
uniform sampler2D image;
void main() {
gl_FragColor = texture2D(image, uv);
}
</script>
I realize that is a TON of info to grok so I recommend reading through some guides. A good place to start might be MDN: Getting started with WebGL.

Categories