webGL texture not showed in rectangle - javascript

I'm having problem texturing my rectangle where is displayed black canvas instead of textured canvas with image.
First I'm creating webGL program, attaching shaders and linking webGL program as usual.
Then I'm creating texture when image is loaded like this:
var texture = gl.createTexture();
var image = document.createElement("img");
image.src = "https://upload.wikimedia.org/wikipedia/commons/3/3a/Saint-Gervais-les-Bains_-_Mt-Blanc_JPG01.jpg";
image.onload = function() {
gl.bindTexture(gl.TEXTURE_2D, texture);
// Set the parameters so we can render any size image.
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
// Upload the image into the texture.
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, image);
}
After that I pass the information about rectangle verticies into the vertex shader:
var pos = gl.getAttribLocation(program, "pos");
gl.enableVertexAttribArray(pos);
var pos_Buffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, pos_Buffer);
var vertices = [-1.0, -1.0, // "left-down"
-1.0, 1.0, // "left-top"
1.0, -1.0, // "right-down"
1.0, 1.0, // "right-top"
];
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(vertices), gl.STATIC_DRAW);
gl.vertexAttribPointer(pos, 2, gl.FLOAT, false, 0, 0);
And in the end I draw my rectangle by passing indexes of verticies into drawElements function:
var indexBuffer = gl.createBuffer();
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, indexBuffer);
var indices = [0, 1, 2, 1, 2, 3];
gl.bufferData(gl.ELEMENT_ARRAY_BUFFER, new Uint8Array(indices), gl.STATIC_DRAW);
// draw triangles
gl.drawElements(gl.TRIANGLES, indices.length, gl.UNSIGNED_BYTE, 0);
There is a jsfiddle with my problem.
Please, don't you have any idea how to solve it?

image.onload is asynchronous function and you do draw call before the function is executed (image isn't loaded when you draw on canvas).
You must put gl.drawElements inside:
image.onload = function() { // image.onload STARTS
gl.bindTexture(gl.TEXTURE_2D, texture);
// Set the parameters so we can render any size image.
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
// Upload the image into the texture.
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, image);
// anything you want, blehblahbleh ...
// draw on canvas
gl.drawElements(gl.TRIANGLES, indices.length, gl.UNSIGNED_BYTE, 0);
} // image.onload ENDS
Also yes there was a problem with cross origin resource call, but I guess you solved it out with extension (or for jsfiddle tests you might use base64 format).
Slightly updated sample: http://jsfiddle.net/windkiller/6cLo3890/

Related

error: WebGL warning: texImage2D: Desired upload requires more data than is available: (when loading the texture with triangle mesh data and normals)

i probably have messed up when i load 2 textures. i am getting this error :"WebGL warning: texImage2D: Desired upload requires more data than is available: (0 rows plus 246 pixels needed, 0 rows plus 244 pixels available)"
and nothing in the screen. I have 2 textures one with the vertices of a mesh and another with the normals. i will post the way i try to load the textures:
const uLSr = gl.getUniformLocation(P, 'uMeshData');
const uNormData = gl.getUniformLocation(P, 'uNormData');
const texture = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, texture);
const verts = [
...
4.910892,0.000000,4.910892,
-4.910892,0.000000,-4.910892,
-4.910892,0.000000,4.910892,
4.910892,0.000000,4.910892,
4.910892,0.000000,-4.910892,
...
];
const vertsNorm = [
...
0.0000,-1.0000,0.0000,
0.4253,-0.8506,0.3090,
-0.1625,-0.8506,0.5000,
0.7236,-0.4472,0.5257,
0.4253,-0.8506,0.3090,
...
];
const meshVerts = new Float32Array(verts);
const vertsLenght = meshVerts.length / 3;
gl.uniform1i(uLvertices, vertsLenght);
gl.pixelStorei(gl.UNPACK_ALIGNMENT, 1);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGB32F, vertsLenght, 1, 0, gl.RGB, gl.FLOAT, meshVerts);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
const textureNorm = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, textureNorm);
const meshNorm = new Float32Array(vertsNorm);
gl.pixelStorei(gl.UNPACK_ALIGNMENT, 1);
//vertsLength is the same cause normal and vertices have the same length
//foreachone normal there is one single vertex
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGB32F, vertsLenght, 1, 0, gl.RGB, gl.FLOAT, meshNorm);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
and then in the draw function i call them like that:
gl.activeTexture(gl.TEXTURE0);
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.activeTexture(gl.TEXTURE1);
gl.bindTexture(gl.TEXTURE_2D, textureNorm);
gl.uniform1i(uLSr, 0);
gl.uniform1i(uNormData, 1);
and then inside the shader i try to unpack them like that:
for (int i = 6; i < vertsCount; i += 3) {
a = texelFetch(uMeshData, ivec2(i, 0), 0);
b = texelFetchOffset(uMeshData, ivec2(i, 0), 0, ivec2(1, 0));
c = texelFetchOffset(uMeshData, ivec2(i, 0), 0, ivec2(2, 0));
aN = texelFetch(uNormData, ivec2(i, 0), 0);
bN = texelFetchOffset(uNormData, ivec2(i, 0), 0, ivec2(1, 0));
cN = texelFetchOffset(uNormData, ivec2(i, 0), 0, ivec2(2, 0));
triangleNormal = (aN.xyz + bN.xyz + cN.xyz) / 3.;
vec3 uvt;
vec3 intersect;
float z;
bool isHit = hitTriangleSecond(R_.orig, R_.dir, a.xyz, b.xyz, c.xyz, uvt, triangleNormal, intersect, z);;
if (isHit) {
if (z<mindist && z > 0.001) {
hitPos1 = intersect;
mindist = z;
weHitSomething = true;
material.type = DIEL;
material.albedo = vec3(.8, .3, .4);
normal = triangleNormal;
hitPos = hitPos1;
}
}
}
if i comment the line where i calculate the normal of the surface and then replace it with the normal i got from texture i got a white page. So i assume i have made something wrong with the loading of the textures. Or is it in the HitTriandglSecond function where i calculate the face normal of the triangle?
and below i post the link to the jsfiddle with a minimal example(its long cause i put the vertices and normals of a plane and a sphere of triangles):
link of jsfiddle example
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGB32F, vertsLenght, 1, 0, gl.RGB, gl.FLOAT, meshNorm);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
i found the mistake.
it was the vertsLength variable, i made a new variable for the second texture cause to reuse the vertsLength didnt seem to work some reason.
const normLength = meshNorm.length /3;
gl.pixelStorei(gl.UNPACK_ALIGNMENT, 1);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGB32F, normLength, 1, 0, gl.RGB, gl.FLOAT, meshNorm);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);

Rendering Pipeline works in WebGL 1 but not in WebGL 2

Update
This already answered by #gman.
A call to gl.getExtension('EXT_color_buffer_float') was need when starting things up.
The issue is that a call to gl.checkFramebufferStatus(gl.FRAMEBUFFER) does not return gl.FRAMEBUFFER_COMPLETE under WebGL2. I make this call just prior to calling gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4).
Here's the common sequence of things for either version. The differences are in createTexture() and readTexture(). Those are specific to the versions of WebGL.
General Flow:
gl.useProgram(program);
var texShape = getTextureShape(programInfo.outputTensor);
var outputTexture = this.textureManager.getOrCreateTexture(programInfo.outputTensor);
this.gpuContext.attachFramebuffer(outputTexture, texShape.width, texShape.height);
var inputTextures = this.createTextures(programInfo.textureData);
this.bindAttributes(buildArtifact.attribLocations);
this.bindUniforms(buildArtifact.uniformLocations, programInfo.uniformData);
this.bindTextures(buildArtifact.uniformLocations, inputTextures);
if (!this.gpuContext.isFramebufferReady()) {
throw new Error("Framebuffer is not ready");
}
gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4);
this.copyToOutput(outputTexture, programInfo.outputTensor);
attachFramebuffer: common
GpuContext.prototype.attachFramebuffer = function (texture, width, height) {
var gl = this.gl;
gl.activeTexture(gl.TEXTURE0 + (this.maxTextureImageUnits - 1));
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.bindFramebuffer(gl.FRAMEBUFFER, this.framebuffer);
gl.framebufferTexture2D(gl.FRAMEBUFFER,
gl.COLOR_ATTACHMENT0,
gl.TEXTURE_2D,
texture,
0);
gl.viewport(0, 0, width, height);
};
createTexture: WebGL 1.0:
GpuContext.prototype.createTexture = function (width, height, data) {
var gl = this.gl;
var texture = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
// TODO: remove this override
var type = gl.FLOAT;
var buffer = null;
if (data) {
buffer = new Float32Array(data.length * 4);
data.forEach(function (value, index) { return buffer[index * 4] = value; });
}
// Pixel format and data for the texture
gl.texImage2D(gl.TEXTURE_2D, // Target, matches bind above.
0,
gl.RGBA,
width,
height,
0,
gl.RGBA,
type,
buffer);
gl.bindTexture(gl.TEXTURE_2D, null);
return texture;
};
readTexture: WebGL 1.0:
GpuContext.prototype.readTexture = function (texture, width, height) {
var gl = this.gl;
var buffer = new Float32Array(width * height * 4);
var format = gl.RGBA;
var type = gl.FLOAT;
// bind texture to framebuffer
gl.framebufferTexture2D(gl.FRAMEBUFFER,
gl.COLOR_ATTACHMENT0,
gl.TEXTURE_2D,
texture,
0);
if (!this.isFramebufferReady()) {
throw new Error("Framebuffer is not ready after attaching texture");
}
gl.readPixels(0, // x-coord of lower left corner
0, // y-coord of lower left corner
width, // width of the block
height, // height of the block
format, // Format of pixel data.
type, // Data type of the pixel data, must match makeTexture
buffer); // Load pixel data into buffer
gl.bindFramebuffer(gl.FRAMEBUFFER, null);
return buffer.filter(function (value, index) { return index % 4 === 0; });
};
WebGL 2.0 Overrides:
WebGL2GpuContext.prototype.createTexture = function (width, height, data) {
var gl = this.gl;
var texture = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
var internalFormat = WebGL2RenderingContext.R32F;
var format = WebGL2RenderingContext.RED;
var type = gl.FLOAT;
gl.texImage2D(gl.TEXTURE_2D,
0,
internalFormat,
width,
height,
0,
format,
type,
data);
gl.bindTexture(gl.TEXTURE_2D, null);
return texture;
};
WebGL2GpuContext.prototype.readTexture = function (texture, width, height) {
var gl = this.gl;
var buffer = new Float32Array(width * height);
var format = WebGL2RenderingContext.RED;
var type = gl.FLOAT;
gl.bindFramebuffer(gl.FRAMEBUFFER, this.framebuffer);
gl.framebufferTexture2D(gl.FRAMEBUFFER,
gl.COLOR_ATTACHMENT0,
gl.TEXTURE_2D,
texture,
0);
if (!this.isFramebufferReady()) {
throw new Error("Framebuffer is not ready after attaching texture");
}
gl.readPixels(0,
0,
width,
height,
format,
type,
buffer);
gl.bindFramebuffer(gl.FRAMEBUFFER, null);
return buffer;
};

WebGL - drawElements: The texture is a non-power-of-two texture or not mipmap complete

I am drawing a <video> onto a webgl canvas. I have it working in chrome and firefox, but IE11 seems to be throwing an error. The error states:
drawElements: The texture is a non-power-of-two texture or not mipmap complete
and refers to this specific line:
gl.drawElements(gl.TRIANGLES, 6, gl.UNSIGNED_SHORT, 0);
I have a requestAnimationFrame loop running that is updating my canvas. I've read this error can be caused by the image not being ready or loaded. Here is my loop:
(function loop()
{
if (!$this.paused && !$this.ended)
{
gl.clear(gl.COLOR_BUFFER_BIT);
gl.activeTexture(gl.TEXTURE0);
gl.bindTexture(gl.TEXTURE_2D, tex);
if (!$this.paused && !$this.ended)
{
try
{
// update the video frame
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGB, gl.RGB, gl.UNSIGNED_BYTE, player);
}
catch(e)
{
log(e);
}
}
gl.bindBuffer(gl.ARRAY_BUFFER, vx);
gl.vertexAttribPointer(vx_ptr, 2, gl.FLOAT, false, 0, 0);
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, ix);
gl.drawElements(gl.TRIANGLES, 6, gl.UNSIGNED_SHORT, 0);
window.requestAnimationFrame(loop);
}
})();
What seems to be causing this? I have been searching with no luck. Before my loop runs, I setup the webgl canvas with this code:
$scope.prepWebGL = function()
{
// prepare visible WebGL
vs = gl.createShader(gl.VERTEX_SHADER);
gl.shaderSource(vs, "attribute vec2 vx;varying vec2 tx;void main(){gl_Position=vec4(vx.x*2.0-1.0,1.0-vx.y*2.0,0,1);tx=vx;}");
gl.compileShader(vs);
ps = gl.createShader(gl.FRAGMENT_SHADER);
gl.shaderSource(ps, "precision mediump float;uniform sampler2D sm;varying vec2 tx;void main(){gl_FragColor=texture2D(sm,tx);}");
gl.compileShader(ps);
shader = gl.createProgram();
gl.attachShader(shader, vs);
gl.attachShader(shader, ps);
gl.linkProgram(shader);
gl.useProgram(shader);
vx_ptr = gl.getAttribLocation(shader, "vx");
gl.enableVertexAttribArray(vx_ptr);
gl.uniform1i(gl.getUniformLocation(shader, "sm"), 0);
vx = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, vx);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([0,0, 1,0, 1,1, 0,1]), gl.STATIC_DRAW);
ix = gl.createBuffer();
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, ix);
gl.bufferData(gl.ELEMENT_ARRAY_BUFFER, new Uint16Array([0,1,2, 0,2,3]), gl.STATIC_DRAW);
tex = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, tex);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
};
If the problem is actually that the video is not loaded, you could add a flag to your video, to indicate that the video is loaded
player.loaded = false;
player.onload = function() {
this.loaded = true;
}
and then check if the video is loaded in your loop
if (!$this.paused && !$this.ended && player.loaded)
{
try
{
// update the video frame
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGB, gl.RGB, gl.UNSIGNED_BYTE, player);
}
catch(e)
{
log(e);
}
}
The error means that the texture you are currently using is not a power of 2 and webgl has limited non power of 2 texture support. In particular, you cant generate mip maps with non power of 2 textures nor can you set them on repeat.
What is strange is that you are using gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR); which supposely do not use mip maps and you are using gl.CLAMP_TO_EDGE as well. Perhaps it is a bug with IE?
Maybe you will have some scale problem, but you may use video formats like 512*512 or 1024*512, that will solve your problem your compatibility problem, you can too include your video in a video in the correct size with some video converters or some video editing tools. In order to not lose the scale ratio of your source video

OpenGL multiple frame buffer rendering

I'm creating a simple WebGL 3D engine and now I'm trying to add some post processing effects.
I do this way: I draw the scene using a Frame Buffer and render to a texture, than I pass the texture to my simple post-processing shader, that renders to the actual physical screen (I follow the openGL post-processing tutorial: http://en.wikibooks.org/wiki/OpenGL_Programming/Post-Processing).
creation of the frame buffer:
// texture
gl.activeTexture(gl.TEXTURE0);
frameBufferTexture = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, frameBufferTexture);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.viewportWidth, gl.viewportHeight, 0, gl.RGBA, gl.UNSIGNED_BYTE, null);
gl.bindTexture(gl.TEXTURE_2D, null);
// depth buffer
frameBufferDepth = gl.createRenderbuffer();
gl.bindRenderbuffer(gl.RENDERBUFFER, frameBufferDepth);
gl.renderbufferStorage(gl.RENDERBUFFER, gl.DEPTH_COMPONENT16, gl.viewportWidth, gl.viewportHeight);
// framebuffer to link everything together
frameBufferObject = gl.createFramebuffer();
gl.bindFramebuffer(gl.FRAMEBUFFER, frameBufferObject);
gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, frameBufferTexture, 0);
gl.framebufferRenderbuffer(gl.FRAMEBUFFER, gl.DEPTH_ATTACHMENT, gl.RENDERBUFFER, frameBufferDepth);
// check status
var success = (gl.checkFramebufferStatus(gl.FRAMEBUFFER) == gl.FRAMEBUFFER_COMPLETE);
if (success)
console.log('post processing frame object created');
else
throw "could not create post processing frame: " + gl.checkFramebufferStatus(gl.FRAMEBUFFER);
// vertices
var fbo_vertices = [
-1.0, -1.0,
1.0, -1.0,
-1.0, 1.0,
1.0, 1.0
];
frameBufferVertices = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, frameBufferVertices);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(fbo_vertices), gl.STATIC_DRAW);
then, when I draw the scene, I call:
gl.bindFramebuffer(gl.FRAMEBUFFER, frameBufferObject);
to target the frame buffer
and I finally render the texture to the screen with the basic shader (does nothing except take the points of the texture):
gl.bindFramebuffer(gl.FRAMEBUFFER, null);
gl.viewport(0, 0, gl.viewportWidth, gl.viewportHeight);
gl.clearColor(0.0, 0.0, 0.0, 1.0);
gl.clearDepth(1.0);
gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
gl.activeTexture(gl.TEXTURE0);
gl.bindTexture(gl.TEXTURE_2D, frameBufferTexture);
gl.uniform1i(shaderProgram.fbo_texture, 0);
gl.bindBuffer(gl.ARRAY_BUFFER, frameBufferVertices);
gl.vertexAttribPointer(shaderProgram.fbo_vertex, 2, gl.FLOAT, false, 0, 0);
gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4);
At this point all works perfectly, now my idea was to use multiple shaders (every shader with a different program), that share the same Frame Buffer, so every shader takes in input the texture and write to it.
I tried to use multiple shaders and calls in a for cycle (for every shader):
gl.useProgram(postProcessingProgram);
gl.bindFramebuffer(gl.FRAMEBUFFER, frameBufferObject);
gl.viewport(0, 0, gl.viewportWidth, gl.viewportHeight);
gl.clearColor(0.0, 0.0, 0.0, 1.0);
gl.clearDepth(1.0);
gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
gl.activeTexture(gl.TEXTURE0);
gl.bindTexture(gl.TEXTURE_2D, frameBufferTexture);
gl.uniform1i(postProcessingProgram.basic_texture, 0);
gl.bindBuffer(gl.ARRAY_BUFFER, frameBufferVertices);
gl.vertexAttribPointer(postProcessingProgram.fbo_vertex, 2, gl.FLOAT, false, 0, 0);
gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4);
except for the last basic shader that render to the physical screen
gl.bindFramebuffer(gl.FRAMEBUFFER, null);
the screen is totally black, but I receive no error or warning. It doesn't work even if I use the same basic shader twice.
My question is: is it correct to use the same frame buffer for multiple shaders? If I use multiple frame buffer, how can I share the same texture to do the consecutive steps?

Webgl weird rendering with simple texture

I'm having trouble rendering textures on my custom 3D shape.
With the following parameters:
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, true);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, texture.image);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.REPEAT);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.REPEAT);
gl.generateMipmap(gl.TEXTURE_2D);
gl.bindTexture(gl.TEXTURE_2D, null);
It gives me this result:
Changing the following parameters:
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.REPEAT);
It gives me this :
And with these parameters:
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.REPEAT);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
I tried changing the texture coordinates but without success the one used for each face here was:
0.0, 0.0,
0.0, 10.0,
10.0, 10.0,
10.0, 0.0
Any idea why one triangle(actually 2 the one on the parallel face) behaves weirdly?
The issue is that you're using faces with varying amount of points - but still using a fixed number (4) of texture coords for a face. As some faces have 5 points, at some point the texture coords are "shifted".
Instead, you should add them just like the vertices:
for (var j=0; j < face.length; j++){
vertices...
//generate texture coords, they could also be stored like the points
textureCoords.push(points[face[j]][0] / 4);
textureCoords.push((points[face[j]][1]+points[face[j]][2]) / 4);
http://jsfiddle.net/bNTkK/9/

Categories