Black screen when rendering to a texture - javascript

I'm trying to do the following:
Draw the scene in a first pass to a framebuffer I've created.
Take the texture that was attached to the created framebuffer and draw that onto a plane so that it can be displayed on screen.
Do some post-processing.
Just using the default framebuffer the scene looks like this:
Currently I'm unable to get parts 1 & 2 working. All I get is a black screen. However, the plane is placed in the scene correctly (confirmed by looking at a wireframe using gl.LINE_STRIP). I'm unsure if this is due to a mistake I've made with the code, or a lack of understanding of how framebuffers work (webgl is new to me).
Here's the relevant code excerpts:
// ======== FRAMEBUFFER PHASE ======== //
const framebuffer = gl.createFramebuffer();
gl.bindFramebuffer(gl.FRAMEBUFFER, framebuffer);
const texture = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.texImage2D(
gl.TEXTURE_2D,
0,
gl.RGB,
canvas.clientWidth,
canvas.clientHeight,
0,
gl.RGB,
gl.UNSIGNED_BYTE,
null // don't fill it with pixel data just yet
);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
gl.framebufferTexture2D(
gl.FRAMEBUFFER,
gl.COLOR_ATTACHMENT0,
gl.TEXTURE_2D,
texture,
0
);
// ======== END FRAMEBUFFER PHASE ======== //
// =========== RENDERBUFFER PHASE ============== //
const renderBuffer = gl.createRenderbuffer();
gl.bindRenderbuffer(gl.RENDERBUFFER, renderBuffer);
gl.renderbufferStorage(
gl.RENDERBUFFER,
gl.DEPTH_STENCIL,
canvas.clientWidth,
canvas.clientHeight
);
gl.framebufferRenderbuffer(
gl.FRAMEBUFFER,
gl.DEPTH_STENCIL_ATTACHMENT,
gl.RENDERBUFFER,
renderBuffer
);
// =========== END RENDERBUFFER PHASE ============== //
// =========== CHECK FRAMEBUFFER STATUS ============== //
const framebufferState = gl.checkFramebufferStatus(gl.FRAMEBUFFER);
if (framebufferState !== gl.FRAMEBUFFER_COMPLETE) {
throw new Error(
`Framebuffer status is not complete: ${framebufferState}`
);
}
// =========== END CHECK FRAMEBUFFER STATUS ============== //
// =========== FIRST PASS RENDERING ============ //
gl.bindFramebuffer(gl.FRAMEBUFFER, framebuffer);
gl.clearColor(0.1, 0.1, 0.1, 1.0);
gl.clear(gl.DEPTH_BUFFER_BIT | gl.COLOR_BUFFER_BIT);
gl.enable(gl.DEPTH_TEST);
// this sets up the green quad and draws it to the screen
const objectModel = setupObjectModel({
position: [100.0, -10.0, 0.0],
colour: [0.734345265462, 0.89624528765, 0.9868589658, 1.0],
gl,
canvas,
});
objectModel.draw({
shaderProgram: mainShaderProgram,
camera: updatedCamera,
currentTime,
deltaTime,
});
// =========== END FIRST PASS RENDERING ============ //
// =========== SECOND PASS RENDERING ============ //
// back to rendering with the default framebuffer
gl.bindFramebuffer(gl.FRAMEBUFFER, null);
gl.clearColor(1.0, 1.0, 1.0, 1.0);
gl.clear(gl.COLOR_BUFFER_BIT);
gl.disable(gl.DEPTH_TEST);
gl.useProgram(frameBufferShaderProgram);
// prettier-ignore
const verts = [
// positions // texCoords
-1.0, 1.0, 0.0, 1.0,
-1.0, -1.0, 0.0, 0.0,
1.0, -1.0, 1.0, 0.0,
-1.0, 1.0, 0.0, 1.0,
1.0, -1.0, 1.0, 0.0,
1.0, 1.0, 1.0, 1.0
];
// prettier-ignore-end
const screenQuad = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, screenQuad);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(verts), gl.STATIC_DRAW);
const aPosAttributeLocation = gl.getAttribLocation(
frameBufferShaderProgram,
"aPos"
);
gl.enableVertexAttribArray(aPosAttributeLocation);
gl.vertexAttribPointer(
aPosAttributeLocation,
2,
gl.FLOAT,
false,
Float32Array.BYTES_PER_ELEMENT * 4,
0
);
const aTexCoordsAttributeLocation = gl.getAttribLocation(
frameBufferShaderProgram,
"aTexCoords"
);
gl.enableVertexAttribArray(aTexCoordsAttributeLocation);
gl.vertexAttribPointer(
aTexCoordsAttributeLocation,
2,
gl.FLOAT,
false,
Float32Array.BYTES_PER_ELEMENT * 4,
Float32Array.BYTES_PER_ELEMENT * 2
);
const screenTexture = gl.getUniformLocation(
frameBufferShaderProgram,
"screenTexture"
);
gl.uniform1i(screenTexture, 0);
gl.drawArrays(gl.TRIANGLES, 0, 6);
And here is the framebuffer shader program:
// vertex shader
precision mediump float;
attribute vec2 aPos;
attribute vec2 aTexCoords;
varying vec2 TexCoords;
void main() {
gl_Position = vec4(aPos.x, aPos.y, 0.0, 1.0);
TexCoords = aTexCoords;
}
// fragment shader
precision mediump float;
uniform sampler2D screenTexture;
varying vec2 TexCoords;
void main() {
// the texture coordinates are fine here, it's the screen texture that's the issue
gl_FragColor = texture2D(screenTexture, TexCoords.xy);
}

This is a common mistake. WebGL 1.0 is base on OpenGL ES 2.0. The same rules apply to texture framebuffer attachments as to mipmaps. The size of a framebuffer texture must be a power of 2. See Texture Completeness and Non-Power-Of-Two Textures.
Create a framebuffer with a size equal to a power of 2 (e.g. 1024x1024):
gl.bindFramebuffer(gl.FRAMEBUFFER, framebuffer);
// [...]
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, 1024, 1024, 0, gl.RGBA, gl.UNSIGNED_BYTE, null);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, texture, 0);
// [...]
gl.renderbufferStorage(gl.RENDERBUFFER, gl.DEPTH_STENCIL, 1024, 1024);
gl.framebufferRenderbuffer(gl.FRAMEBUFFER, gl.DEPTH_STENCIL_ATTACHMENT, gl.RENDERBUFFER, renderBuffer);
// [...]
gl.bindFramebuffer(gl.FRAMEBUFFER, framebuffer);
gl.viewport(0, 0, 1024, 1024);
// [...]
gl.bindFramebuffer(gl.FRAMEBUFFER, null);
gl.viewport(0, 0, canvas.clientWidth, canvas.clientHeight,);
// [...]

Related

WebGL: Rendering two objects with separate textures

I am currently working on a simple project in WebGL that requires rendering multiple 2D objects with a simple image textured on. The actual project generates a random number of objects, usually between 5 and 9, sets vertex data for them around the canvas to separate them, and is supposed to render, however it will only render one at a time (usually the last but I can change around gl.activeTexture to show other objects in the array). I tried to use a question on here about texture arrays in the shader, but to no avail, so I ended up creating a very simple test program that just tries to load two objects and textures, one on the left of the canvas and the other on the right.
From here I tried to do everything completely separate, even giving each object their own shaders, programs, buffers and everything, and subsequently binding everything in the draw call for each before calling gl.drawElements for each. This still doesn't show me the correct result, only the second texture appears, however it did lead me to discover what I believe to be happening. By commenting out the bindings and draw call for the second one, the first texture shows up, however it appears at the location of the second texture, not where it's vertices should be placing it. So, I assume what is happening in this program (and my project code) is that it is in fact drawing both, but for some reason applying the vertices of the last drawn one to all of them, thus stacking them and only showing the top (or last drawn one).
I have also tried a mishmash of tweaks to the below code, using only one program, using the same indices, texture coordinates, there are some commented out lines from trying to make calls in different orders as well. Anything commented out doesn't mean I necessarily think it is wrong or right, just from various things I've aimlessly tried at this point.
I have worked with OpenGL a little and had little to no trouble drawing multiple objects with their own textures, and I know that WebGL works differently than OpenGL in some ways including textures, but I do not see where I am creating the issue. I'm sure it is something very simple, and any guidance would be greatly appreciated.
I apologize for the long block of code, it's pretty much just straight typing everything out that I believe to be needed without trying to take any shortcuts. The initShaders call is from the WebGL js files I'm using from my textbook and isn't something I've written, and the loadImage call just simply loads an <img> from the html code. There are no issues with the images being loaded correctly as far as I can tell. I only included the first vertex and fragment shader because the other two are the same save for the id.
<script id="vertex-shader1" type="x-shader/x-vertex">
attribute vec4 vPosition;
attribute vec2 vTexCoord;
varying vec2 fTexCoord;
void main() {
fTexCoord = vTexCoord;
gl_Position = vPosition;
}
</script>
<script id="fragment-shader1" type="x-shader/x-fragment">
precision mediump float;
varying vec2 fTexCoord;
uniform sampler2D texture;
void main() {
gl_FragColor = texture2D(texture, fTexCoord);
}
</script>
"use-strict"
var gl;
var images = [];
var program1;
var program2;
var texture1;
var texture2;
var vBuff1;
var vBuff2;
var iBuff1;
var iBuff2;
var tBuff1;
var tBuff2;
var vPos1;
var vPos2;
var fTexCoord1;
var fTexCoord2;
var sampler1;
var sampler2;
var vertices1 = [
vec4(-0.8, 0.1, 0.0, 1.0),
vec4(-0.8, 0.3, 0.0, 1.0),
vec4(-0.6, 0.3, 0.0, 1.0),
vec4(-0.6, 0.1, 0.0, 1.0)
];
var vertices2 = [
vec4(0.1, 0.1, 0.0, 1.0),
vec4(0.1, 0.3, 0.0, 1.0),
vec4(0.3, 0.3, 0.0, 1.0),
vec4(0.3, 0.1, 0.0, 1.0)
];
var indices1 = [
0, 1, 2,
0, 2, 3
];
var indices2 = [
0, 1, 2,
0, 2, 3
];
var tcs1 = [
vec2(0, 0),
vec2(0, 1),
vec2(1, 1),
vec2(1, 0)
];
var tcs2 = [
vec2(0, 0),
vec2(0, 1),
vec2(1, 1),
vec2(1, 0)
];
window.onload = function init() {
var canvas = document.getElementById("gl-canvas");
gl = WebGLUtils.setupWebGL(canvas);
if (!gl) { alert("WebGL isn't available"); }
gl.viewport(0, 0, canvas.width, canvas.height);
gl.clearColor(0.0, 0.0, 0.0, 1.0);
loadImages();
program1 = initShaders(gl, "vertex-shader1", "fragment-shader1");
gl.useProgram(program1);
vBuff1 = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, vBuff1);
gl.bufferData(gl.ARRAY_BUFFER, flatten(vertices1), gl.STATIC_DRAW);
vPos1 = gl.getAttribLocation(program1, "vPosition");
gl.vertexAttribPointer(vPos1, 4, gl.FLOAT, false, 0, 0);
//gl.enableVertexAttribArray(vPos1);
iBuff1 = gl.createBuffer();
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, iBuff1);
gl.bufferData(gl.ELEMENT_ARRAY_BUFFER, new Uint8Array(indices1), gl.STATIC_DRAW);
tBuff1 = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, tBuff1);
gl.bufferData(gl.ARRAY_BUFFER, flatten(tcs1), gl.STATIC_DRAW);
fTexCoord1 = gl.getAttribLocation(program1, "vTexCoord");
gl.vertexAttribPointer(fTexCoord1, 2, gl.FLOAT, false, 0, 0);
//gl.enableVertexAttribArray(fTexCoord1);
sampler1 = gl.getUniformLocation(program1, "texture");
texture1 = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, texture1);
gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, true);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGB, gl.RGB, gl.UNSIGNED_BYTE, images[0]);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
gl.bindTexture(gl.TEXTURE_2D, null);
///////////////////////////////////////////////////////////////////////////////////////
/*
program2 = initShaders(gl, "vertex-shader2", "fragment-shader2");
gl.useProgram(program2);
*/
vBuff2 = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, vBuff2);
gl.bufferData(gl.ARRAY_BUFFER, flatten(vertices2), gl.STATIC_DRAW);
vPos2 = gl.getAttribLocation(program1, "vPosition");
gl.vertexAttribPointer(vPos2, 4, gl.FLOAT, false, 0, 0);
//gl.enableVertexAttribArray(vPos2);
iBuff2 = gl.createBuffer();
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, iBuff2);
gl.bufferData(gl.ELEMENT_ARRAY_BUFFER, new Uint8Array(indices2), gl.STATIC_DRAW);
tBuff2 = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, tBuff2);
gl.bufferData(gl.ARRAY_BUFFER, flatten(tcs2), gl.STATIC_DRAW);
fTexCoord2 = gl.getAttribLocation(program1, "vTexCoord");
gl.vertexAttribPointer(fTexCoord2, 2, gl.FLOAT, false, 0, 0);
//gl.enableVertexAttribArray(fTexCoord2);
sampler2 = gl.getUniformLocation(program1, "texture");
texture2 = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, texture2);
gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, true);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGB, gl.RGB, gl.UNSIGNED_BYTE, images[1]);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
gl.bindTexture(gl.TEXTURE_2D, null);
render();
};
function render() {
gl.clear(gl.COLOR_BUFFER_BIT);
gl.useProgram(program1);
gl.bindBuffer(gl.ARRAY_BUFFER, vBuff1);
gl.enableVertexAttribArray(vPos1);
gl.enableVertexAttribArray(fTexCoord1);
gl.activeTexture(gl.TEXTURE0);
gl.bindTexture(gl.TEXTURE_2D, texture1);
gl.uniform1i(sampler1, 0);
// gl.bindBuffer(gl.ARRAY_BUFFER, vBuff1);
// gl.enableVertexAttribArray(vPos1);
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, iBuff1);
gl.drawElements(gl.TRIANGLES, indices1.length, gl.UNSIGNED_BYTE, 0);
//gl.bindTexture(gl.TEXTURE_2D, null);
// gl.useProgram(program2);
gl.bindBuffer(gl.ARRAY_BUFFER,vBuff2);
gl.enableVertexAttribArray(vPos2);
gl.enableVertexAttribArray(fTexCoord2);
gl.activeTexture(gl.TEXTURE0);
gl.bindTexture(gl.TEXTURE_2D, texture2);
gl.uniform1i(sampler2, 0);
// gl.bindBuffer(gl.ARRAY_BUFFER, vBuff2);
// gl.enableVertexAttribArray(vPos2);
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, iBuff2);
gl.drawElements(gl.TRIANGLES, indices2.length, gl.UNSIGNED_BYTE, 0);
requestAnimFrame(render);
}
First off AFAIK your code can't work. It calls a function loadImages and then immediately uses the images. Images load asychronously in the browser so you need to either have a callback for when the images load or else use async functions
Here is your code working. First I made a loadImage that returns a Promise. Then I made a async function called loadImages that uses that to load all the images and wait for them to load. Then I made another async function called main what first waits for loadImages and then calls init
The second issue was that in WebGL1 attributes are global state. That means you need to set them at render time not init time so the calls go gl.enableVertexAttribArray and gl.vertexAttribPointer need to happen at render time with the appropriate values for rendering the particular thing you are rendering. gl.vertexAttribPointer copies the current ARRAY_BUFFER binding to that attribute.
you might find these tutorials helpful and in particular this one about attributes and this state diagram that might help you visualize what is happening inside WebGL
"use-strict";
const vec2 = (...args) => [...args];
const vec4 = (...args) => [...args];
const flatten = a => new Float32Array(a.flat());
const WebGLUtils = {
setupWebGL: (canvas) => { return canvas.getContext('webgl'); },
};
const initShaders = (gl, vs, fs) => twgl.createProgram(gl, [vs, fs]);
const requestAnimFrame = requestAnimationFrame;
var gl;
var images = [];
var program1;
var program2;
var texture1;
var texture2;
var vBuff1;
var vBuff2;
var iBuff1;
var iBuff2;
var tBuff1;
var tBuff2;
var vPos1;
var vPos2;
var fTexCoord1;
var fTexCoord2;
var sampler1;
var sampler2;
var vertices1 = [
vec4(-0.8, 0.1, 0.0, 1.0),
vec4(-0.8, 0.3, 0.0, 1.0),
vec4(-0.6, 0.3, 0.0, 1.0),
vec4(-0.6, 0.1, 0.0, 1.0)
];
var vertices2 = [
vec4(0.1, 0.1, 0.0, 1.0),
vec4(0.1, 0.3, 0.0, 1.0),
vec4(0.3, 0.3, 0.0, 1.0),
vec4(0.3, 0.1, 0.0, 1.0)
];
var indices1 = [
0, 1, 2,
0, 2, 3
];
var indices2 = [
0, 1, 2,
0, 2, 3
];
var tcs1 = [
vec2(0, 0),
vec2(0, 1),
vec2(1, 1),
vec2(1, 0)
];
var tcs2 = [
vec2(0, 0),
vec2(0, 1),
vec2(1, 1),
vec2(1, 0)
];
function init() {
var canvas = document.getElementById("gl-canvas");
gl = WebGLUtils.setupWebGL(canvas);
if (!gl) { alert("WebGL isn't available"); }
gl.viewport(0, 0, canvas.width, canvas.height);
gl.clearColor(0.0, 0.0, 0.0, 1.0);
program1 = initShaders(gl, "vertex-shader1", "fragment-shader1");
gl.useProgram(program1);
vBuff1 = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, vBuff1);
gl.bufferData(gl.ARRAY_BUFFER, flatten(vertices1), gl.STATIC_DRAW);
vPos1 = gl.getAttribLocation(program1, "vPosition");
iBuff1 = gl.createBuffer();
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, iBuff1);
gl.bufferData(gl.ELEMENT_ARRAY_BUFFER, new Uint8Array(indices1), gl.STATIC_DRAW);
tBuff1 = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, tBuff1);
gl.bufferData(gl.ARRAY_BUFFER, flatten(tcs1), gl.STATIC_DRAW);
fTexCoord1 = gl.getAttribLocation(program1, "vTexCoord");
sampler1 = gl.getUniformLocation(program1, "texture");
texture1 = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, texture1);
gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, true);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGB, gl.RGB, gl.UNSIGNED_BYTE, images[0]);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
gl.bindTexture(gl.TEXTURE_2D, null);
///////////////////////////////////////////////////////////////////////////////////////
/*
program2 = initShaders(gl, "vertex-shader2", "fragment-shader2");
gl.useProgram(program2);
*/
vBuff2 = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, vBuff2);
gl.bufferData(gl.ARRAY_BUFFER, flatten(vertices2), gl.STATIC_DRAW);
vPos2 = gl.getAttribLocation(program1, "vPosition");
iBuff2 = gl.createBuffer();
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, iBuff2);
gl.bufferData(gl.ELEMENT_ARRAY_BUFFER, new Uint8Array(indices2), gl.STATIC_DRAW);
tBuff2 = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, tBuff2);
gl.bufferData(gl.ARRAY_BUFFER, flatten(tcs2), gl.STATIC_DRAW);
fTexCoord2 = gl.getAttribLocation(program1, "vTexCoord");
gl.vertexAttribPointer(fTexCoord2, 2, gl.FLOAT, false, 0, 0);
//gl.enableVertexAttribArray(fTexCoord2);
sampler2 = gl.getUniformLocation(program1, "texture");
texture2 = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, texture2);
gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, true);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGB, gl.RGB, gl.UNSIGNED_BYTE, images[1]);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
gl.bindTexture(gl.TEXTURE_2D, null);
render();
};
function render() {
gl.clear(gl.COLOR_BUFFER_BIT);
gl.useProgram(program1);
gl.bindBuffer(gl.ARRAY_BUFFER, vBuff1);
gl.enableVertexAttribArray(vPos1);
gl.vertexAttribPointer(vPos1, 4, gl.FLOAT, false, 0, 0);
gl.bindBuffer(gl.ARRAY_BUFFER, tBuff1);
gl.enableVertexAttribArray(fTexCoord1);
gl.vertexAttribPointer(fTexCoord1, 2, gl.FLOAT, false, 0, 0);
gl.activeTexture(gl.TEXTURE0);
gl.bindTexture(gl.TEXTURE_2D, texture1);
gl.uniform1i(sampler1, 0);
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, iBuff1);
gl.drawElements(gl.TRIANGLES, indices1.length, gl.UNSIGNED_BYTE, 0);
gl.bindBuffer(gl.ARRAY_BUFFER,vBuff2);
gl.enableVertexAttribArray(vPos2);
gl.vertexAttribPointer(vPos2, 4, gl.FLOAT, false, 0, 0);
gl.activeTexture(gl.TEXTURE0);
gl.bindTexture(gl.TEXTURE_2D, texture2);
gl.uniform1i(sampler2, 0);
gl.bindBuffer(gl.ARRAY_BUFFER,tBuff2);
gl.enableVertexAttribArray(fTexCoord2);
gl.vertexAttribPointer(fTexCoord2, 2, gl.FLOAT, false, 0, 0);
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, iBuff2);
gl.drawElements(gl.TRIANGLES, indices2.length, gl.UNSIGNED_BYTE, 0);
requestAnimFrame(render);
}
function loadImage(url) {
return new Promise((resolve, reject) => {
const img = new Image();
img.onload = () => resolve(img);
img.onerror = reject;
img.crossOrigin = 'anonymous';
img.src = url;
});
}
async function loadImages(imgs) {
images = await Promise.all(imgs.map(loadImage));
}
async function main() {
await loadImages([
'https://webglfundamentals.org/webgl/resources/f-texture.png',
'https://webglfundamentals.org/webgl/lessons/resources/noodles-01.jpg',
]);
init();
}
main();
<script id="vertex-shader1" type="x-shader/x-vertex">
attribute vec4 vPosition;
attribute vec2 vTexCoord;
varying vec2 fTexCoord;
void main() {
fTexCoord = vTexCoord;
gl_Position = vPosition;
}
</script>
<script id="fragment-shader1" type="x-shader/x-fragment">
precision mediump float;
varying vec2 fTexCoord;
uniform sampler2D texture;
void main() {
gl_FragColor = texture2D(texture, fTexCoord);
}
</script>
<canvas id="gl-canvas"></canvas>
<script src="https://twgljs.org/dist/4.x/twgl.min.js"></script>

WebGL: can't apply texture to object

I'm attempting to apply a texture to an object in webGL, but I get the following error:
WebGL warning: drawElements: Vertex attrib array 0 is enabled but has no buffer bound.
which I am unable to find any information on online. Otherwise the scene shows, but the object that needs the texture is appearing as a flat square.
The error points to my drawElements
gl.bindBuffer(gl.ARRAY_BUFFER, cubeVertexTextureCoordBuffer);
gl.vertexAttribPointer(shaderProgram.textureCoordAttribute, cubeVertexTextureCoordBuffer.itemSize, gl.FLOAT, false, 0, 0);
gl.activeTexture(gl.TEXTURE0);
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.uniform1i(shaderProgram.samplerUniform, 0);
gl.bindBuffer(gl.ARRAY_BUFFER, cubeNormalBuffer);
gl.vertexAttribPointer(shaderProgram.vertexNormalAttribute, cubeNormalBuffer.itemSize, gl.FLOAT, false, 0, 0);
gl.enableVertexAttribArray(shaderProgram.vertexNormalAttribute);
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, cubeVertexIndexBuffer);
setMatrixUniforms();
gl.drawElements(gl.TRIANGLES, cubeVertexIndexBuffer.numItems, gl.UNSIGNED_SHORT, 0);
and my buffer for the texture
cubeVertexTextureCoordBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, cubeVertexTextureCoordBuffer);
const textureCoords = [
// Front
0.0, 0.0,
1.0, 0.0,
1.0, 1.0,
0.0, 1.0,
// Back
0.0, 0.0,
1.0, 0.0,
1.0, 1.0,
0.0, 1.0,
// Top
0.0, 0.0,
1.0, 0.0,
1.0, 1.0,
0.0, 1.0,
// Bottom
0.0, 0.0,
1.0, 0.0,
1.0, 1.0,
0.0, 1.0,
// Right
0.0, 0.0,
1.0, 0.0,
1.0, 1.0,
0.0, 1.0,
// Left
0.0, 0.0,
1.0, 0.0,
1.0, 1.0,
0.0, 1.0,
];
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(textureCoords), gl.STATIC_DRAW);
cubeVertexTextureCoordBuffer.itemSize = 2;
cubeVertexTextureCoordBuffer.numItems = 24;
and the texture code
let texture;
function initTexture(){
texture = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, 1, 1, 0, gl.RGBA, gl.UNSIGNED_BYTE, new Uint8Array([0,0,255,255]));
const image = new Image();
image.onload = function () {
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, image);
if(isPowerOf2(image.width) && isPowerOf2(image.height)){
gl.generateMipmap(gl.TEXTURE_2D);
}
else{
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
}
drawScene();
};
// image.src = "crate.gif";
image.src = "WoodFine23_COL_3K.jpg";
}
function isPowerOf2(value) {
return(value & (value - 1)) == 0;
}
Otherwise my entire code can be found here https://pastebin.com/HaQBFEuL
Also, I have followed the following tutorials
http://learningwebgl.com/blog/?p=1359,
https://developer.mozilla.org/en-US/docs/Web/API/WebGL_API/Tutorial/Using_textures_in_WebGL,
and https://developer.mozilla.org/en-US/docs/Web/API/WebGL_API/Tutorial/Lighting_in_WebGL
Any help would be greatly appreciated.
In the function initShaders the generic vertex attribute arrays for the indices shaderProgram.vertexPositionAttribut, shaderProgram.vertexColorAttribute, shaderProgram.vertexNormalAttribute,
shaderProgram.textureCoordAttribute are enabled and they are never disabled in your code.
But in the function drawScene there are only defined the arrays of generic vertex attribute data for the indices shaderProgram.vertexPositionAttribut, shaderProgram.vertexNormalAttribute and shaderProgram.textureCoordAttribute.
shaderProgram.vertexColorAttribute is missing because it is under comment.
The error messag
WebGL warning: drawElements: Vertex attrib array 0 is enabled but has no buffer bound.
means, that there is a vertex attribute index, which is "enabled", but where are no data defined for.
In your case this is shaderProgram.textureCoordAttribute, which is defined but not enabled.
Skip enabling the vertex attribute with index shaderProgram.vertexColoAttribute and your code should proper run.
Of course the vertex attributes can be enable in the function initShaders and defined in the function drawScene. But the vertex attrbute shaderProgram.vertexNormalAttribute is enabled in the function drawScene too. You should not call gl.enableVertexAttribArray twice for the same attribute index, without of any need.

WebGL Framebuffer Multisampling

I know webgl can antialias or multisample stuff you render to the screen to avoid hard edges, but when I used a framebuffer it didnt do it anymore and there were a bunch of jagged edges on the screen.
How can I make the framebuffer use multisampling?
This took me a day to figure out, so I thought I should post an example for others to follow. I borrowed the cube animation code below from webgl2fundamentals.org. All I have added to it is the code that does antialiasing on the 3d texture. Make sure the context is initialized with canvas.getContext("webgl2", {antialias: false}); This method won't work with antialiasing on.
To antialias a generated texture you need to initialize a Renderbuffer object and two Framebuffer objects, one for storing the drawing, and the other to process the antialiased graphics into afterwards.
// Create and bind the framebuffer
const FRAMEBUFFER =
{
RENDERBUFFER: 0,
COLORBUFFER: 1
};
const fb =
[
gl.createFramebuffer(),
gl.createFramebuffer()
];
const colorRenderbuffer = gl.createRenderbuffer();
gl.bindRenderbuffer(gl.RENDERBUFFER,
colorRenderbuffer);
gl.renderbufferStorageMultisample(gl.RENDERBUFFER,
gl.getParameter(gl.MAX_SAMPLES),
gl.RGBA8,
targetTextureWidth,
targetTextureHeight);
gl.bindFramebuffer(gl.FRAMEBUFFER,
fb[FRAMEBUFFER.RENDERBUFFER]);
gl.framebufferRenderbuffer(gl.FRAMEBUFFER,
gl.COLOR_ATTACHMENT0,
gl.RENDERBUFFER,
colorRenderbuffer);
gl.bindFramebuffer(gl.FRAMEBUFFER,
fb[FRAMEBUFFER.COLORBUFFER]);
gl.framebufferTexture2D(gl.FRAMEBUFFER,
gl.COLOR_ATTACHMENT0,
gl.TEXTURE_2D,
targetTexture, 0);
gl.bindFramebuffer(gl.FRAMEBUFFER, null);
Just before drawing what will become your texture, set the Framebuffer as the first of the two Framebuffer objects.
// render to our targetTexture by binding the framebuffer
gl.bindFramebuffer(gl.FRAMEBUFFER,
fb[FRAMEBUFFER.RENDERBUFFER]);
Then do the texture drawing, and then do the antialiasing, which will require the second buffer.
// ... drawing code ...
//
// "blit" the cube into the color buffer, which adds antialiasing
gl.bindFramebuffer(gl.READ_FRAMEBUFFER,
fb[FRAMEBUFFER.RENDERBUFFER]);
gl.bindFramebuffer(gl.DRAW_FRAMEBUFFER,
fb[FRAMEBUFFER.COLORBUFFER]);
gl.clearBufferfv(gl.COLOR, 0, [1.0, 1.0, 1.0, 1.0]);
gl.blitFramebuffer(0, 0, targetTextureWidth, targetTextureHeight,
0, 0, targetTextureWidth, targetTextureHeight,
gl.COLOR_BUFFER_BIT, gl.LINEAR);
// render the top layer to the framebuffer as well
gl.bindFramebuffer(gl.FRAMEBUFFER,
fb[FRAMEBUFFER.RENDERBUFFER]);
Once you have finished drawing the top layer into the buffer, use the same antialiasing method from before, this time setting DRAW_FRAMEBUFFER to null; this tells it to draw to the actual canvas.
// this time render to the default buffer, which is just canvas
gl.bindFramebuffer(gl.READ_FRAMEBUFFER,
fb[FRAMEBUFFER.RENDERBUFFER]);
gl.bindFramebuffer(gl.DRAW_FRAMEBUFFER, null);
gl.clearBufferfv(gl.COLOR, 0, [1.0, 1.0, 1.0, 1.0]);
gl.blitFramebuffer(0, 0, canvas.width, canvas.height,
0, 0, canvas.width, canvas.height,
gl.COLOR_BUFFER_BIT, gl.LINEAR);
Here is the finished product:
"use strict";
var vertexShaderSource = `#version 300 es
// an attribute is an input (in) to a vertex shader.
// It will receive data from a buffer
in vec4 a_position;
in vec2 a_texcoord;
// A matrix to transform the positions by
uniform mat4 u_matrix;
// a varying to pass the texture coordinates to the fragment shader
out vec2 v_texcoord;
// all shaders have a main function
void main() {
// Multiply the position by the matrix.
gl_Position = u_matrix * a_position;
// Pass the texcoord to the fragment shader.
v_texcoord = a_texcoord;
}
`;
var fragmentShaderSource = `#version 300 es
precision mediump float;
// Passed in from the vertex shader.
in vec2 v_texcoord;
// The texture.
uniform sampler2D u_texture;
// we need to declare an output for the fragment shader
out vec4 outColor;
void main() {
outColor = texture(u_texture, v_texcoord);
}
`;
function main() {
// Get A WebGL context
/** #type {HTMLCanvasElement} */
var canvas = document.getElementById("canvas");
var gl = canvas.getContext("webgl2", {
antialias: false
});
if (!gl) {
return;
}
// Use our boilerplate utils to compile the shaders and link into a program
var program = webglUtils.createProgramFromSources(gl, [vertexShaderSource, fragmentShaderSource]);
// look up where the vertex data needs to go.
var positionAttributeLocation = gl.getAttribLocation(program, "a_position");
var texcoordAttributeLocation = gl.getAttribLocation(program, "a_texcoord");
// look up uniform locations
var matrixLocation = gl.getUniformLocation(program, "u_matrix");
var textureLocation = gl.getUniformLocation(program, "u_texture");
// Create a buffer
var positionBuffer = gl.createBuffer();
// Create a vertex array object (attribute state)
var vao = gl.createVertexArray();
// and make it the one we're currently working with
gl.bindVertexArray(vao);
// Turn on the attribute
gl.enableVertexAttribArray(positionAttributeLocation);
// Bind it to ARRAY_BUFFER (think of it as ARRAY_BUFFER = positionBuffer)
gl.bindBuffer(gl.ARRAY_BUFFER, positionBuffer);
// Set Geometry.
setGeometry(gl);
// Tell the attribute how to get data out of positionBuffer (ARRAY_BUFFER)
var size = 3; // 3 components per iteration
var type = gl.FLOAT; // the data is 32bit floats
var normalize = false; // don't normalize the data
var stride = 0; // 0 = move forward size * sizeof(type) each iteration to get the next position
var offset = 0; // start at the beginning of the buffer
gl.vertexAttribPointer(
positionAttributeLocation, size, type, normalize, stride, offset);
// create the texcoord buffer, make it the current ARRAY_BUFFER
// and copy in the texcoord values
var texcoordBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, texcoordBuffer);
setTexcoords(gl);
// Turn on the attribute
gl.enableVertexAttribArray(texcoordAttributeLocation);
// Tell the attribute how to get data out of colorBuffer (ARRAY_BUFFER)
var size = 2; // 2 components per iteration
var type = gl.FLOAT; // the data is 32bit floating point values
var normalize = true; // convert from 0-255 to 0.0-1.0
var stride = 0; // 0 = move forward size * sizeof(type) each iteration to get the next color
var offset = 0; // start at the beginning of the buffer
gl.vertexAttribPointer(
texcoordAttributeLocation, size, type, normalize, stride, offset);
// Create a texture.
var texture = gl.createTexture();
// use texture unit 0
gl.activeTexture(gl.TEXTURE0 + 0);
// bind to the TEXTURE_2D bind point of texture unit 0
gl.bindTexture(gl.TEXTURE_2D, texture);
// fill texture with 3x2 pixels
{
const level = 0;
const internalFormat = gl.R8;
const width = 3;
const height = 2;
const border = 0;
const format = gl.RED;
const type = gl.UNSIGNED_BYTE;
const data = new Uint8Array([
128, 64, 128,
0, 192, 0,
]);
gl.pixelStorei(gl.UNPACK_ALIGNMENT, 1);
gl.texImage2D(gl.TEXTURE_2D, level, internalFormat, width, height, border,
format, type, data);
}
// set the filtering so we don't need mips
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
// Create a texture to render to
const targetTextureWidth = 512;
const targetTextureHeight = 512;
const targetTexture = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, targetTexture);
{
// define size and format of level 0
const level = 0;
const internalFormat = gl.RGBA;
const border = 0;
const format = gl.RGBA;
const type = gl.UNSIGNED_BYTE;
const data = null;
gl.texImage2D(gl.TEXTURE_2D, level, internalFormat,
targetTextureWidth, targetTextureHeight, border,
format, type, data);
// set the filtering so we don't need mips
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
}
// Create and bind the framebuffer
const FRAMEBUFFER = {
RENDERBUFFER: 0,
COLORBUFFER: 1
};
const fb = [gl.createFramebuffer(), gl.createFramebuffer()];
const colorRenderbuffer = gl.createRenderbuffer();
gl.bindRenderbuffer(gl.RENDERBUFFER, colorRenderbuffer);
gl.renderbufferStorageMultisample(gl.RENDERBUFFER, gl.getParameter(gl.MAX_SAMPLES), gl.RGBA8, targetTextureWidth, targetTextureHeight);
gl.bindFramebuffer(gl.FRAMEBUFFER, fb[FRAMEBUFFER.RENDERBUFFER]);
gl.framebufferRenderbuffer(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.RENDERBUFFER, colorRenderbuffer);
gl.bindFramebuffer(gl.FRAMEBUFFER, fb[FRAMEBUFFER.COLORBUFFER]);
gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, targetTexture, 0);
gl.bindFramebuffer(gl.FRAMEBUFFER, null);
function degToRad(d) {
return d * Math.PI / 180;
}
var fieldOfViewRadians = degToRad(60);
var modelXRotationRadians = degToRad(0);
var modelYRotationRadians = degToRad(0);
// Get the starting time.
var then = 0;
requestAnimationFrame(drawScene);
function drawCube(aspect) {
// Tell it to use our program (pair of shaders)
gl.useProgram(program);
// Bind the attribute/buffer set we want.
gl.bindVertexArray(vao);
// Compute the projection matrix
var projectionMatrix =
m4.perspective(fieldOfViewRadians, aspect, 1, 2000);
var cameraPosition = [0, 0, 2];
var up = [0, 1, 0];
var target = [0, 0, 0];
// Compute the camera's matrix using look at.
var cameraMatrix = m4.lookAt(cameraPosition, target, up);
// Make a view matrix from the camera matrix.
var viewMatrix = m4.inverse(cameraMatrix);
var viewProjectionMatrix = m4.multiply(projectionMatrix, viewMatrix);
var matrix = m4.xRotate(viewProjectionMatrix, modelXRotationRadians);
matrix = m4.yRotate(matrix, modelYRotationRadians);
// Set the matrix.
gl.uniformMatrix4fv(matrixLocation, false, matrix);
// Tell the shader to use texture unit 0 for u_texture
gl.uniform1i(textureLocation, 0);
// Draw the geometry.
var primitiveType = gl.TRIANGLES;
var offset = 0;
var count = 6 * 6;
gl.drawArrays(primitiveType, offset, count);
}
// Draw the scene.
function drawScene(time) {
// convert to seconds
time *= 0.001;
// Subtract the previous time from the current time
var deltaTime = time - then;
// Remember the current time for the next frame.
then = time;
// Animate the rotation
modelYRotationRadians += -0.7 * deltaTime;
modelXRotationRadians += -0.4 * deltaTime;
//webglUtils.resizeCanvasToDisplaySize(gl.canvas);
gl.enable(gl.CULL_FACE);
gl.enable(gl.DEPTH_TEST);
{
// render to our targetTexture by binding the framebuffer
gl.bindFramebuffer(gl.FRAMEBUFFER, fb[FRAMEBUFFER.RENDERBUFFER]);
// render cube with our 3x2 texture
gl.bindTexture(gl.TEXTURE_2D, texture);
// Tell WebGL how to convert from clip space to pixels
gl.viewport(0, 0, targetTextureWidth, targetTextureHeight);
// Clear the canvas AND the depth buffer.
gl.clearColor(0, 0, 1, 1); // clear to blue
gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
const aspect = targetTextureWidth / targetTextureHeight;
drawCube(aspect);
// "blit" the cube into the color buffer, which adds antialiasing
gl.bindFramebuffer(gl.READ_FRAMEBUFFER, fb[FRAMEBUFFER.RENDERBUFFER]);
gl.bindFramebuffer(gl.DRAW_FRAMEBUFFER, fb[FRAMEBUFFER.COLORBUFFER]);
gl.clearBufferfv(gl.COLOR, 0, [1.0, 1.0, 1.0, 1.0]);
gl.blitFramebuffer(0, 0, targetTextureWidth, targetTextureHeight,
0, 0, targetTextureWidth, targetTextureHeight,
gl.COLOR_BUFFER_BIT, gl.LINEAR);
}
{
// render the top layer to the frame buffer as well
gl.bindFramebuffer(gl.FRAMEBUFFER, fb[FRAMEBUFFER.RENDERBUFFER]);
// render the cube with the texture we just rendered to
gl.bindTexture(gl.TEXTURE_2D, targetTexture);
// Tell WebGL how to convert from clip space to pixels
gl.viewport(0, 0, targetTextureWidth, targetTextureHeight);
// Clear the canvas AND the depth buffer.
gl.clearColor(0.105, 0.105, 0.105, 1); // clear to black
gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
const aspect = 1;
drawCube(aspect);
// this time render to the default buffer, which is just canvas
gl.bindFramebuffer(gl.READ_FRAMEBUFFER, fb[FRAMEBUFFER.RENDERBUFFER]);
gl.bindFramebuffer(gl.DRAW_FRAMEBUFFER, null);
gl.clearBufferfv(gl.COLOR, 0, [1.0, 1.0, 1.0, 1.0]);
gl.blitFramebuffer(0, 0, canvas.width, canvas.height,
0, 0, canvas.width, canvas.height,
gl.COLOR_BUFFER_BIT, gl.LINEAR);
}
requestAnimationFrame(drawScene);
}
}
// Fill the buffer with the values that define a cube.
function setGeometry(gl) {
var positions = new Float32Array(
[-0.5, -0.5, -0.5, -0.5, 0.5, -0.5,
0.5, -0.5, -0.5, -0.5, 0.5, -0.5,
0.5, 0.5, -0.5,
0.5, -0.5, -0.5,
-0.5, -0.5, 0.5,
0.5, -0.5, 0.5, -0.5, 0.5, 0.5, -0.5, 0.5, 0.5,
0.5, -0.5, 0.5,
0.5, 0.5, 0.5,
-0.5, 0.5, -0.5, -0.5, 0.5, 0.5,
0.5, 0.5, -0.5, -0.5, 0.5, 0.5,
0.5, 0.5, 0.5,
0.5, 0.5, -0.5,
-0.5, -0.5, -0.5,
0.5, -0.5, -0.5, -0.5, -0.5, 0.5, -0.5, -0.5, 0.5,
0.5, -0.5, -0.5,
0.5, -0.5, 0.5,
-0.5, -0.5, -0.5, -0.5, -0.5, 0.5, -0.5, 0.5, -0.5, -0.5, -0.5, 0.5, -0.5, 0.5, 0.5, -0.5, 0.5, -0.5,
0.5, -0.5, -0.5,
0.5, 0.5, -0.5,
0.5, -0.5, 0.5,
0.5, -0.5, 0.5,
0.5, 0.5, -0.5,
0.5, 0.5, 0.5,
]);
gl.bufferData(gl.ARRAY_BUFFER, positions, gl.STATIC_DRAW);
}
// Fill the buffer with texture coordinates the cube.
function setTexcoords(gl) {
gl.bufferData(
gl.ARRAY_BUFFER,
new Float32Array(
[
0, 0,
0, 1,
1, 0,
0, 1,
1, 1,
1, 0,
0, 0,
0, 1,
1, 0,
1, 0,
0, 1,
1, 1,
0, 0,
0, 1,
1, 0,
0, 1,
1, 1,
1, 0,
0, 0,
0, 1,
1, 0,
1, 0,
0, 1,
1, 1,
0, 0,
0, 1,
1, 0,
0, 1,
1, 1,
1, 0,
0, 0,
0, 1,
1, 0,
1, 0,
0, 1,
1, 1,
]),
gl.STATIC_DRAW);
}
main();
html
{
background-color: #1b1b1b;
}
<canvas id="canvas" width="512" height="512"></canvas>
<!--
for most samples webgl-utils only provides shader compiling/linking and
canvas resizing because why clutter the examples with code that's the same in every sample.
See http://webglfundamentals.org/webgl/lessons/webgl-boilerplate.html
and http://webglfundamentals.org/webgl/lessons/webgl-resizing-the-canvas.html
for webgl-utils, m3, m4, and webgl-lessons-ui.
-->
<script src="https://webgl2fundamentals.org/webgl/resources/webgl-utils.js"></script>
<script src="https://webgl2fundamentals.org/webgl/resources/m4.js"></script>
WebGL1 does not support multisampling for framebuffers so in that case your options are things like rendering to a higher resolution and down sampling when rendering to the canvas and/or running some post processing effect to do the anti-aliasing
WebGL2 does support multisampling for framebuffers. You can call renderbufferStorageMultisample to create a multisampled renderbufffer and you can call blitFramebuffer to resolve it into the canvas

create WebGL texture out of RGBA values

I tried to do this based off of this answer. This is my code:
<canvas id='canvas' width='500' height='500' style='border: solid 1px black'></canvas>
<script>
var canvas = document.getElementById("canvas");
var gl = canvas.getContext("webgl");
var texture = gl.createTexture();
var data = new Uint8Array([128, 128, 0, 1]);
var texture = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, 1, 1, 0, gl.RGBA, gl.UNSIGNED_BYTE, data);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
gl.bindTexture(gl.TEXTURE_2D, texture);
</script>
But all I can see is a white box with a black outline.
What am I doing wrong?
Thank you!
The code you have there just creates a texture and loads it into GPU memory. So you're not actually drawing anything to the canvas.
To get that texture on screen you'll need quite a few more things. Here is your code with the rest added:
// helper function for loading shader sources
function loadShaderSource(gl, id) {
var script = document.getElementById(id);
var source = "";
var child = script.firstChild;
while (child) {
if (child.nodeType == child.TEXT_NODE) {
source += child.textContent;
}
child = child.nextSibling;
}
return source;
}
// setup an OpenGL context
var canvas = document.getElementById("canvas");
var gl = canvas.getContext("webgl");
// build the vertex shader
var vertexShaderSource = loadShaderSource(gl, "shader-vertex");
var vertexShader = gl.createShader(gl.VERTEX_SHADER);
gl.shaderSource(vertexShader, vertexShaderSource);
gl.compileShader(vertexShader);
// build the fragment shader
var fragmentShaderSource = loadShaderSource(gl, "shader-fragment");
var fragmentShader = gl.createShader(gl.FRAGMENT_SHADER);
gl.shaderSource(fragmentShader, fragmentShaderSource);
gl.compileShader(fragmentShader);
// build a shader program from the vertex and fragment shader
var shaderProgram = gl.createProgram();
gl.attachShader(shaderProgram, vertexShader);
gl.attachShader(shaderProgram, fragmentShader);
gl.linkProgram(shaderProgram);
gl.useProgram(shaderProgram);
// define vertex positions
var vertexPositions = new Float32Array([
1.0, 1.0, 0, // a
-1.0, 1.0, 0, // b b----a
1.0, -1.0, 0, // c | |
-1.0, 1.0, 0, // b | |
-1.0, -1.0, 0, // d d----c
1.0, -1.0, 0 // c
]);
// send the vertex positions to the GPU
var vertexBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, vertexBuffer);
gl.bufferData(gl.ARRAY_BUFFER, vertexPositions, gl.STATIC_DRAW);
// define vertex texcoords
var vertexTexcoords = new Float32Array([
1.0, 0.0, // a
0.0, 0.0, // b
1.0, 1.0, // c
0.0, 0.0, // b
0.0, 1.0, // d
1.0, 1.0 // c
]);
// send the vertex texcoords to the GPU
var texcoordBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, texcoordBuffer);
gl.bufferData(gl.ARRAY_BUFFER, vertexTexcoords, gl.STATIC_DRAW);
// wire up the shader program to the vertex position data
var positionAttribute = gl.getAttribLocation(shaderProgram, "position");
gl.bindBuffer(gl.ARRAY_BUFFER, vertexBuffer);
gl.enableVertexAttribArray(positionAttribute);
gl.vertexAttribPointer(positionAttribute, 3, gl.FLOAT, false, 0, 0);
// wire up the shader program to the vertex texcoord data
var texcoordAttribute = gl.getAttribLocation(shaderProgram, "texcoord");
gl.bindBuffer(gl.ARRAY_BUFFER, texcoordBuffer);
gl.enableVertexAttribArray(texcoordAttribute);
gl.vertexAttribPointer(texcoordAttribute, 2, gl.FLOAT, false, 0, 0);
// generate and send texture data to the GPU
var textureData = new Uint8Array([128, 128, 0, 255]);
var texture = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, 1, 1, 0, gl.RGBA, gl.UNSIGNED_BYTE, textureData);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
// wire up the shader program to the texture data
var imageUniform = gl.getUniformLocation(shaderProgram, "image")
gl.uniform1i(imageUniform, 0);
// tell the GPU to draw
gl.drawArrays(gl.TRIANGLES, 0, 6);
<canvas id='canvas' width='300' height='200' style='border: solid 1px black'></canvas>
<script id="shader-vertex" type="x-shader/x-vertex">
attribute vec3 position;
attribute vec2 texcoord;
varying highp vec2 uv;
void main() {
gl_Position = vec4(position, 1);
uv = texcoord;
}
</script>
<script id="shader-fragment" type="x-shader/x-fragment">
varying highp vec2 uv;
uniform sampler2D image;
void main() {
gl_FragColor = texture2D(image, uv);
}
</script>
I realize that is a TON of info to grok so I recommend reading through some guides. A good place to start might be MDN: Getting started with WebGL.

OpenGL multiple frame buffer rendering

I'm creating a simple WebGL 3D engine and now I'm trying to add some post processing effects.
I do this way: I draw the scene using a Frame Buffer and render to a texture, than I pass the texture to my simple post-processing shader, that renders to the actual physical screen (I follow the openGL post-processing tutorial: http://en.wikibooks.org/wiki/OpenGL_Programming/Post-Processing).
creation of the frame buffer:
// texture
gl.activeTexture(gl.TEXTURE0);
frameBufferTexture = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, frameBufferTexture);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.viewportWidth, gl.viewportHeight, 0, gl.RGBA, gl.UNSIGNED_BYTE, null);
gl.bindTexture(gl.TEXTURE_2D, null);
// depth buffer
frameBufferDepth = gl.createRenderbuffer();
gl.bindRenderbuffer(gl.RENDERBUFFER, frameBufferDepth);
gl.renderbufferStorage(gl.RENDERBUFFER, gl.DEPTH_COMPONENT16, gl.viewportWidth, gl.viewportHeight);
// framebuffer to link everything together
frameBufferObject = gl.createFramebuffer();
gl.bindFramebuffer(gl.FRAMEBUFFER, frameBufferObject);
gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, frameBufferTexture, 0);
gl.framebufferRenderbuffer(gl.FRAMEBUFFER, gl.DEPTH_ATTACHMENT, gl.RENDERBUFFER, frameBufferDepth);
// check status
var success = (gl.checkFramebufferStatus(gl.FRAMEBUFFER) == gl.FRAMEBUFFER_COMPLETE);
if (success)
console.log('post processing frame object created');
else
throw "could not create post processing frame: " + gl.checkFramebufferStatus(gl.FRAMEBUFFER);
// vertices
var fbo_vertices = [
-1.0, -1.0,
1.0, -1.0,
-1.0, 1.0,
1.0, 1.0
];
frameBufferVertices = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, frameBufferVertices);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(fbo_vertices), gl.STATIC_DRAW);
then, when I draw the scene, I call:
gl.bindFramebuffer(gl.FRAMEBUFFER, frameBufferObject);
to target the frame buffer
and I finally render the texture to the screen with the basic shader (does nothing except take the points of the texture):
gl.bindFramebuffer(gl.FRAMEBUFFER, null);
gl.viewport(0, 0, gl.viewportWidth, gl.viewportHeight);
gl.clearColor(0.0, 0.0, 0.0, 1.0);
gl.clearDepth(1.0);
gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
gl.activeTexture(gl.TEXTURE0);
gl.bindTexture(gl.TEXTURE_2D, frameBufferTexture);
gl.uniform1i(shaderProgram.fbo_texture, 0);
gl.bindBuffer(gl.ARRAY_BUFFER, frameBufferVertices);
gl.vertexAttribPointer(shaderProgram.fbo_vertex, 2, gl.FLOAT, false, 0, 0);
gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4);
At this point all works perfectly, now my idea was to use multiple shaders (every shader with a different program), that share the same Frame Buffer, so every shader takes in input the texture and write to it.
I tried to use multiple shaders and calls in a for cycle (for every shader):
gl.useProgram(postProcessingProgram);
gl.bindFramebuffer(gl.FRAMEBUFFER, frameBufferObject);
gl.viewport(0, 0, gl.viewportWidth, gl.viewportHeight);
gl.clearColor(0.0, 0.0, 0.0, 1.0);
gl.clearDepth(1.0);
gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
gl.activeTexture(gl.TEXTURE0);
gl.bindTexture(gl.TEXTURE_2D, frameBufferTexture);
gl.uniform1i(postProcessingProgram.basic_texture, 0);
gl.bindBuffer(gl.ARRAY_BUFFER, frameBufferVertices);
gl.vertexAttribPointer(postProcessingProgram.fbo_vertex, 2, gl.FLOAT, false, 0, 0);
gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4);
except for the last basic shader that render to the physical screen
gl.bindFramebuffer(gl.FRAMEBUFFER, null);
the screen is totally black, but I receive no error or warning. It doesn't work even if I use the same basic shader twice.
My question is: is it correct to use the same frame buffer for multiple shaders? If I use multiple frame buffer, how can I share the same texture to do the consecutive steps?

Categories