Rendering Pipeline works in WebGL 1 but not in WebGL 2 - javascript

Update
This already answered by #gman.
A call to gl.getExtension('EXT_color_buffer_float') was need when starting things up.
The issue is that a call to gl.checkFramebufferStatus(gl.FRAMEBUFFER) does not return gl.FRAMEBUFFER_COMPLETE under WebGL2. I make this call just prior to calling gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4).
Here's the common sequence of things for either version. The differences are in createTexture() and readTexture(). Those are specific to the versions of WebGL.
General Flow:
gl.useProgram(program);
var texShape = getTextureShape(programInfo.outputTensor);
var outputTexture = this.textureManager.getOrCreateTexture(programInfo.outputTensor);
this.gpuContext.attachFramebuffer(outputTexture, texShape.width, texShape.height);
var inputTextures = this.createTextures(programInfo.textureData);
this.bindAttributes(buildArtifact.attribLocations);
this.bindUniforms(buildArtifact.uniformLocations, programInfo.uniformData);
this.bindTextures(buildArtifact.uniformLocations, inputTextures);
if (!this.gpuContext.isFramebufferReady()) {
throw new Error("Framebuffer is not ready");
}
gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4);
this.copyToOutput(outputTexture, programInfo.outputTensor);
attachFramebuffer: common
GpuContext.prototype.attachFramebuffer = function (texture, width, height) {
var gl = this.gl;
gl.activeTexture(gl.TEXTURE0 + (this.maxTextureImageUnits - 1));
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.bindFramebuffer(gl.FRAMEBUFFER, this.framebuffer);
gl.framebufferTexture2D(gl.FRAMEBUFFER,
gl.COLOR_ATTACHMENT0,
gl.TEXTURE_2D,
texture,
0);
gl.viewport(0, 0, width, height);
};
createTexture: WebGL 1.0:
GpuContext.prototype.createTexture = function (width, height, data) {
var gl = this.gl;
var texture = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
// TODO: remove this override
var type = gl.FLOAT;
var buffer = null;
if (data) {
buffer = new Float32Array(data.length * 4);
data.forEach(function (value, index) { return buffer[index * 4] = value; });
}
// Pixel format and data for the texture
gl.texImage2D(gl.TEXTURE_2D, // Target, matches bind above.
0,
gl.RGBA,
width,
height,
0,
gl.RGBA,
type,
buffer);
gl.bindTexture(gl.TEXTURE_2D, null);
return texture;
};
readTexture: WebGL 1.0:
GpuContext.prototype.readTexture = function (texture, width, height) {
var gl = this.gl;
var buffer = new Float32Array(width * height * 4);
var format = gl.RGBA;
var type = gl.FLOAT;
// bind texture to framebuffer
gl.framebufferTexture2D(gl.FRAMEBUFFER,
gl.COLOR_ATTACHMENT0,
gl.TEXTURE_2D,
texture,
0);
if (!this.isFramebufferReady()) {
throw new Error("Framebuffer is not ready after attaching texture");
}
gl.readPixels(0, // x-coord of lower left corner
0, // y-coord of lower left corner
width, // width of the block
height, // height of the block
format, // Format of pixel data.
type, // Data type of the pixel data, must match makeTexture
buffer); // Load pixel data into buffer
gl.bindFramebuffer(gl.FRAMEBUFFER, null);
return buffer.filter(function (value, index) { return index % 4 === 0; });
};
WebGL 2.0 Overrides:
WebGL2GpuContext.prototype.createTexture = function (width, height, data) {
var gl = this.gl;
var texture = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
var internalFormat = WebGL2RenderingContext.R32F;
var format = WebGL2RenderingContext.RED;
var type = gl.FLOAT;
gl.texImage2D(gl.TEXTURE_2D,
0,
internalFormat,
width,
height,
0,
format,
type,
data);
gl.bindTexture(gl.TEXTURE_2D, null);
return texture;
};
WebGL2GpuContext.prototype.readTexture = function (texture, width, height) {
var gl = this.gl;
var buffer = new Float32Array(width * height);
var format = WebGL2RenderingContext.RED;
var type = gl.FLOAT;
gl.bindFramebuffer(gl.FRAMEBUFFER, this.framebuffer);
gl.framebufferTexture2D(gl.FRAMEBUFFER,
gl.COLOR_ATTACHMENT0,
gl.TEXTURE_2D,
texture,
0);
if (!this.isFramebufferReady()) {
throw new Error("Framebuffer is not ready after attaching texture");
}
gl.readPixels(0,
0,
width,
height,
format,
type,
buffer);
gl.bindFramebuffer(gl.FRAMEBUFFER, null);
return buffer;
};

Related

How to read only single channel(R component) from webGL context readPixels() API?

I've converted a RGBA image to greyscale image using webgl.
When reading the pixel using gl.readPixels() with gl.RGBA format, getting the values for each pixel as YYYA because an RGBA pixel is converted to YYYA and assigned to gl_FragColor. I want only 1 byte Y component for each pixel instead of 4 bytes.
Tried reading the pixels with gl.RED format(instead of gl.RGBA)
gl.readPixels(0, 0, gl.drawingBufferWidth, gl.drawingBufferHeight, gl.RED, gl.UNSIGNED_BYTE, pixels);
but getting the following error on Chrome and getting only zeroes.
WebGL: INVALID_ENUM: readPixels: invalid format
Is it possible to make gl_FragColor to output 1 byte per pixel in LUMINANCE mode, instead of RGBA, but the input texture has to be RGBA?
If the format of gl rendering cannot be changed, is it possible to read only the first byte of each 4 bytes pixel, when calling gl.readPixels()?
Note:
3. I've already done copy the gl.readPixels() output to another array by jumping every 4 bytes. But I want to avoid this copy as it takes more time.
4. Also, I need the solution to be a mobile browser(ios safari and android chrome) compatible.
function webGL() {
var gTexture;
var gFramebuffer;
var srcCanvas = null;
var programs = {};
var program;
var pixels;
this.convertRGBA2Gray = function(inCanvas, inArray) {
// Y component from YCbCr
const shaderSourceRGB2GRAY = `
precision mediump float;
uniform sampler2D u_image;
uniform vec2 u_textureSize;
vec4 scale = vec4(0.299, 0.587, 0.114, 0.0);
void main() {
vec4 color = texture2D(u_image, gl_FragCoord.xy / u_textureSize);
gl_FragColor = vec4(vec3(dot(color,scale)), color.a);
}`;
if (srcCanvas === null) {
console.log('Setting up webgl');
srcCanvas = inCanvas;
_initialize(srcCanvas.width, srcCanvas.height);
program = _createProgram("rgb2grey", shaderSourceRGB2GRAY);
}
pixels = inArray;
_run(program);
}
///////////////////////////////////////
// Private functions
var _getWebGLContext = function(canvas) {
try {
return (canvas.getContext("webgl", {premultipliedAlpha: false, preserveDrawingBuffer: true}) || canvas.getContext("experimental-webgl", {premultipliedAlpha: false, preserveDrawingBuffer: true}));
}
catch(e) {
console.log("ERROR: %o", e);
}
return null;
}
var gl = _getWebGLContext(document.createElement('canvas'));
var _initialize = function(width, height) {
var canvas = gl.canvas;
canvas.width = width;
canvas.height = height;
if (this.originalImageTexture) {
return;
}
this.originalImageTexture = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, this.originalImageTexture);
gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, false);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gTexture = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, gTexture);
gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, false);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texImage2D(
gl.TEXTURE_2D, 0, gl.RGBA, canvas.width, canvas.height, 0,
gl.RGBA, gl.UNSIGNED_BYTE, null);
gFramebuffer = gl.createFramebuffer();
gl.bindFramebuffer(gl.FRAMEBUFFER, gFramebuffer);
var positionBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, positionBuffer);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([
-1.0, 1.0,
1.0, 1.0,
1.0, -1.0,
-1.0, 1.0,
1.0, -1.0,
-1.0, -1.0
]), gl.STATIC_DRAW);
gl.framebufferTexture2D(
gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, gTexture, 0);
gl.bindTexture(gl.TEXTURE_2D, this.originalImageTexture);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, srcCanvas);
}
var _createProgram = function(name, fragmentSource, vertexSource) {
shaderProgram = programs[name];
if (shaderProgram){
console.log('Reusing program');
gl.useProgram(shaderProgram);
return shaderProgram;
}
function createShader(type, source){
var shader = gl.createShader(type);
gl.shaderSource(shader, source);
gl.compileShader(shader);
return shader;
}
var vertexShader, fragmentShader;
if (!vertexSource){
vertexShader = createShader(gl.VERTEX_SHADER, `attribute vec2 a_position;
void main() { gl_Position = vec4(a_position, 0.0, 1.0); }`
);
} else {
vertexShader = createShader(gl.VERTEX_SHADER, vertexSource);
}
fragmentShader = createShader(gl.FRAGMENT_SHADER, fragmentSource);
shaderProgram = gl.createProgram();
gl.attachShader(shaderProgram, vertexShader);
gl.attachShader(shaderProgram, fragmentShader);
gl.linkProgram(shaderProgram);
gl.useProgram(shaderProgram);
return shaderProgram;
}
var _render = function(gl, program){
var positionLocation = gl.getAttribLocation(program, "a_position");
var u_imageLoc = gl.getUniformLocation(program, "u_image");
var textureSizeLocation = gl.getUniformLocation(program, "u_textureSize");
gl.vertexAttribPointer(positionLocation, 2, gl.FLOAT, false, 0, 0);
gl.enableVertexAttribArray(positionLocation);
var width = gl.canvas.width,
height = gl.canvas.height;
gl.bindFramebuffer(gl.FRAMEBUFFER, gFramebuffer);
gl.uniform2f(textureSizeLocation, width, height);
gl.uniform1i(u_imageLoc, 0);
gl.viewport(0, 0, width, height);
gl.drawArrays(gl.TRIANGLES, 0, 6);
}
var _run = function(program){
let t0 = performance.now();
_render(gl, program);
gl.bindTexture(gl.TEXTURE_2D, gTexture);
let t1 = performance.now();
// gl.readPixels(0, 0, gl.drawingBufferWidth, gl.drawingBufferHeight, gl.RGBA, gl.UNSIGNED_BYTE, pixels);
gl.readPixels(0, 0, gl.drawingBufferWidth, gl.drawingBufferHeight, gl.RED, gl.UNSIGNED_BYTE, pixels);
let t2 = performance.now();
console.log('_render dur = ' + Number((t1-t0).toFixed(3)) + ' ms');
console.log('_run dur = ' + Number((t2-t0).toFixed(3)) + ' ms');
}
};
<div>
<canvas id="source"></canvas>
</div>
<script src="webgl.js" type="text/javascript"></script>
<script>
window.addEventListener('load', function(e) {
var srcImg = new Image();
srcImg.crossOrigin = "anonymous";
srcImg.src = "https://i.picsum.photos/id/17/480/480.jpg";
srcImg.width = 480;
srcImg.height = 480;
srcImg.onload = function(){
// image has been loaded
let srcCanvas = document.getElementById("source");
srcCanvas.width = srcImg.width;
srcCanvas.height = srcImg.height;
let ctx = srcCanvas.getContext('2d');
ctx.drawImage(srcImg, 0, 0, srcImg.width, srcImg.height);
var webgl = new webGL();
let pixels = new Uint8Array(srcCanvas.width * srcCanvas.height * 4);
webgl.convertRGBA2Gray(srcCanvas, pixels);
var outData = ctx.createImageData(srcCanvas.width, srcCanvas.height);
console.log('\n');
for (let k = 0; k < 12; ++k) {
console.log(pixels[k] + ', ');
}
console.log('\n');
// Luminance plot
for (let i = 0, j = 0; i < (srcCanvas.width * srcCanvas.height * 4); i+=4, ++j ) {
outData.data[i] = outData.data[i+1] = outData.data[i+2] = pixels[j];
outData.data[i+3] = 255;
}
// RGB plot
// for ( let i = 0; i < (srcCanvas.width * srcCanvas.height * 4); ++i ) {
// outData.data[i] = pixels[i];
// }
srcCanvas.getContext('2d').putImageData(outData, 0, 0);
};
}, true);
</script>
Is it possible to make gl_FragColor to output 1 byte per pixel in LUMINANCE mode, instead of RGBA, but the input texture has to be RGBA?
Not portably. The spec for WebGL1 says rendering to a texture only has to be supported for gl.RGBA / gl.UNSIGNED_BYTE. All other formats are optional.
If the format of gl rendering cannot be changed, is it possible to read only the first byte of each 4 bytes pixel, when calling gl.readPixels()?
No, The spec section 4.3.1 says only gl.RGBA, gl.UNSIGNED_BYTE is supported. All other formats are optional and up to the implementation. This is the same on WebGL2. Even if you make a R8 texture (red only, 8 bits) it's up to the implemenation if if you can read it as gl.RED/gl.UNSIGNED_BYTE.
See Webgl1 and Webgl2

copyTexImage2D — GL ERROR :GL_INVALID_OPERATION : glCopyTexImage2D:

I’m trying to figure out how to use gl.copyTexImage2D function.
I have the following code (unwieldy though):
//--------- SETUP (not important) ---------------
//I use budo package to easily run browserify
var createContext = require('webgl-context');
var createShader = require('gl-shader');
//↓ here is webgl setup usual routine, using floats and simple one-triangle vertex shader
var gl = createContext({width: 2, height: 2});
gl.getExtension('OES_texture_float');
gl.getExtension('OES_texture_float_linear');
var shader = createShader(gl, `
precision mediump float;
attribute vec2 position;
varying vec2 uv;
void main (void) {
gl_Position = vec4(position, 0, 1);
uv = vec2(position.x * 0.5 + 0.5, position.y * 0.5 + 0.5);
}
`, `
precision mediump float;
uniform sampler2D image;
varying vec2 uv;
void main (void) {
gl_FragColor = texture2D(image, uv);
}
`);
//fullscreen triangle
var buffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, buffer);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([-1, -1, -1, 3, 3, -1]), gl.STATIC_DRAW);
shader.attributes.position.pointer();
//textures
var outTexture = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, outTexture);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, 2, 2, 0, gl.RGBA, gl.FLOAT, new Float32Array([1,1,1,1, 0,0,0,1, 0,0,0,1, 0,0,0,1]));
var sourceTexture = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, sourceTexture);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, 2, 2, 0, gl.RGBA, gl.FLOAT, new Float32Array([0,0,0,1, 1,1,1,1, 0,0,0,1, 0,0,0,1]));
//--------------- MAIN PART (important) ---------------
//then I setup custom framebuffer ↓
var framebuffer = gl.createFramebuffer();
gl.bindFramebuffer(gl.FRAMEBUFFER, framebuffer);
gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, outTexture, 0);
gl.bindTexture(gl.TEXTURE_2D, sourceTexture);
gl.bindFramebuffer(gl.FRAMEBUFFER, framebuffer);
gl.drawArrays(gl.TRIANGLES, 0, 3);
//here ↓ I am expecting to copy framebuffer’s output, which is `outTexture`, to `sourceTexture`
gl.bindTexture(gl.TEXTURE_2D, sourceTexture);
gl.copyTexImage2D(gl.TEXTURE_2D, 0, gl.RGBA, 0, 0, w, h, 0);
//then I try to render shader again, with it’s own output as input
gl.drawArrays(gl.TRIANGLES, 0, 3);
//when I try to read pixels here ↓ I get the error below
gl.readPixels(0, 0, w, h, gl.RGBA, gl.FLOAT, pixels);
The error: [.CommandBufferContext]GL ERROR :GL_INVALID_OPERATION : glCopyTexImage2D:
Cannot figure out what do I do wrong.
The error is from gl.copyTexImage2D not from gl.readPixels. The reason you don't see it until calling gl.readPixels is because WebGL is a command driven language. Commands are not executed until they have to be for various reasons. gl.flush will force the commands to be executed at some point. gl.readPixels also forces the commands to be executed since the results of the commands needed to be used to read the pixels.
As for the error you need to provide more code. The code as is works through gl.copyTexImage2D which means the error you're getting from that has to do with some code your not showing. Either you created your textures wrong or w and h or funky values or something
Trying it out myself below it works but pointed out another error. You can't read floats with gl.readPixels in WebGL. Switching to UNSIGNED_BYTE works
var gl = document.getElementById("c").getContext("webgl");
var w = 300;
var h = 150;
var programInfo = twgl.createProgramInfo(gl, ["vs", "fs"]);
gl.useProgram(programInfo.program);
var arrays = {
position: [-1, -1, 0, 1, -1, 0, -1, 1, 0, -1, 1, 0, 1, -1, 0, 1, 1, 0],
};
var bufferInfo = twgl.createBufferInfoFromArrays(gl, arrays);
twgl.setBuffersAndAttributes(gl, programInfo, bufferInfo);
// make a renderable npot texture
function createRenderableTexture(gl, w, h) {
var tex = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, tex);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, w, h, 0, gl.RGBA, gl.UNSIGNED_BYTE, null);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
return tex;
}
var outTexture = createRenderableTexture(gl, w, h);
var framebuffer = gl.createFramebuffer();
gl.bindFramebuffer(gl.FRAMEBUFFER, framebuffer);
gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, outTexture, 0);
// render something to it
gl.clearColor(0,1,0,1); // green
gl.clear(gl.COLOR_BUFFER_BIT);
// copy the framebuffer to the texture
var sourceTexture = createRenderableTexture(gl, w, h)
gl.copyTexImage2D(gl.TEXTURE_2D, 0, gl.RGBA, 0, 0, w, h, 0);
// draw to canvas
gl.bindFramebuffer(gl.FRAMEBUFFER, null);
// clear to red
gl.clearColor(1,0,0,1);
gl.clear(gl.COLOR_BUFFER_BIT);
// Since we cleared to red and the texture is filled with green
// the result should be green
gl.drawArrays(gl.TRIANGLES, 0, 6);
var pixels = new Uint8Array(w * h * 4);
gl.readPixels(0, 0, w, h, gl.RGBA, gl.UNSIGNED_BYTE, pixels);
log("pixel 0: "
+ pixels[0] + ","
+ pixels[1] + ","
+ pixels[2] + ","
+ pixels[3]);
function log(msg) {
var div = document.createElement("pre");
div.appendChild(document.createTextNode(msg));
document.body.appendChild(div);
}
<script src="https://twgljs.org/dist/twgl.min.js"></script>
<script id="vs" type="notjs">
attribute vec4 position;
varying vec2 v_uv;
void main() {
gl_Position = position;
v_uv = position.xy * 0.5 + 0.5;
}
</script>
<script id="fs" type="notjs">
precision mediump float;
varying vec2 v_uv;
uniform sampler2D u_texture;
void main() {
gl_FragColor = texture2D(u_texture, v_uv);
}
</script>
<canvas id="c"></canvas>
On the other hand FLOAT textures should not work. They worked for me but as far as I can tell that's a bug on my system not yours.
Specifically FLOAT textures are not allowed by default in WebGL. You can turn them on by enabling the OES_texture_float extension but that extension specifically says
Should CopyTexImage2D, CopyTexSubImage{2D|3D} be supported for textures with half-float and float channels?
RESOLUTION: No.
I filed a bug for Chrome
You can try to work around that issue by implementing copyTexImage2D yourself. Basically attach the texture you want to copy as a source texture to some shader and quad. Attach the texture you want to copy to to a framebuffer object. Since it's a FLOAT texture you need to call checkFramebufferStatus and check it returns FRAMEBUFFER_COMPLETE to make sure your GPU/Driver actually supports floating point textures as attachments. Then draw your quad with a simple shader effectively copying your texture from src to dst.

webGL texture not showed in rectangle

I'm having problem texturing my rectangle where is displayed black canvas instead of textured canvas with image.
First I'm creating webGL program, attaching shaders and linking webGL program as usual.
Then I'm creating texture when image is loaded like this:
var texture = gl.createTexture();
var image = document.createElement("img");
image.src = "https://upload.wikimedia.org/wikipedia/commons/3/3a/Saint-Gervais-les-Bains_-_Mt-Blanc_JPG01.jpg";
image.onload = function() {
gl.bindTexture(gl.TEXTURE_2D, texture);
// Set the parameters so we can render any size image.
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
// Upload the image into the texture.
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, image);
}
After that I pass the information about rectangle verticies into the vertex shader:
var pos = gl.getAttribLocation(program, "pos");
gl.enableVertexAttribArray(pos);
var pos_Buffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, pos_Buffer);
var vertices = [-1.0, -1.0, // "left-down"
-1.0, 1.0, // "left-top"
1.0, -1.0, // "right-down"
1.0, 1.0, // "right-top"
];
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(vertices), gl.STATIC_DRAW);
gl.vertexAttribPointer(pos, 2, gl.FLOAT, false, 0, 0);
And in the end I draw my rectangle by passing indexes of verticies into drawElements function:
var indexBuffer = gl.createBuffer();
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, indexBuffer);
var indices = [0, 1, 2, 1, 2, 3];
gl.bufferData(gl.ELEMENT_ARRAY_BUFFER, new Uint8Array(indices), gl.STATIC_DRAW);
// draw triangles
gl.drawElements(gl.TRIANGLES, indices.length, gl.UNSIGNED_BYTE, 0);
There is a jsfiddle with my problem.
Please, don't you have any idea how to solve it?
image.onload is asynchronous function and you do draw call before the function is executed (image isn't loaded when you draw on canvas).
You must put gl.drawElements inside:
image.onload = function() { // image.onload STARTS
gl.bindTexture(gl.TEXTURE_2D, texture);
// Set the parameters so we can render any size image.
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
// Upload the image into the texture.
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, image);
// anything you want, blehblahbleh ...
// draw on canvas
gl.drawElements(gl.TRIANGLES, indices.length, gl.UNSIGNED_BYTE, 0);
} // image.onload ENDS
Also yes there was a problem with cross origin resource call, but I guess you solved it out with extension (or for jsfiddle tests you might use base64 format).
Slightly updated sample: http://jsfiddle.net/windkiller/6cLo3890/

Using array of images as texture in WebGL

How to use an array of textures (which contain different images) in WebGL without initializing each texture separately? I want to display a cube and a pyramid (organized as an array), each with a different texture image. Here is a part of the code (to initialize the textures):
function handleTexture(texture) {
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, true);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, texture.Img);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR_MIPMAP_NEAREST);
gl.generateMipmap(gl.TEXTURE_2D);
gl.bindTexture(gl.TEXTURE_2D, null);
}
function initTextures() {
for (i=0; i<2; i++) {
tex[i] = gl.createTexture();
tex[i].Img = new Image();
tex[i].Img.onload = function() { handleTexture(tex[i]); }
tex[i].Img.src = texImgs[i]; // The name of the image file
}
}
The cube and pyramid are displayed in black color (no texture), and I get this error in the page:
Uncaught TypeError: Cannot read property 'Img' of undefined // gl.texImage2D()
tex.(anonymous function).Img.onload // tex[i].Img.onload = ...
I don't have this error if I initialize the texture separately, not using an array, but tex1 and tex2. Any suggestion on how to do this using array?
When you do:
tex[i] = gl.createTexture();
tex[i] becomes a type of WebGLTexture and you cannot attach new properties on that object as you did with .Img property. So slightly more correct version would be:
var tex = [], texImgs = ['pyra.jpg', 'cube.jpg'];
function handleTexture(myTexture) {
myTexture.texture = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, myTexture.texture);
gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, true);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, myTexture.image);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR_MIPMAP_NEAREST);
gl.generateMipmap(gl.TEXTURE_2D);
gl.bindTexture(gl.TEXTURE_2D, null);
}
function initTextures() {
for (var i=0; i<2; i++) {
(function(index) {
tex[index] = new Object();
tex[index].texture = null;
tex[index].image = new Image();
tex[index].image.onload = function() { handleTexture(tex[index]); }
tex[index].image.src = texImgs[index]; // The name of the image file
})(i);
}
}
Note that you create an object that has image and texture property separated.
Hope this helps you.
EDIT:
Closure messes the index of the tex.

WebGL display framebuffer?

I used the WEBKIT_WEBGL_depth_texture Extension.
And init the buffers below.
But how am I able to draw this framebuffer?? I'm totaly stuck right now. -.-
function InitDepthtextures (){
var size = 256;
// Create a color texture
var colorTexture = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, colorTexture);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, size, size, 0, gl.RGBA, gl.UNSIGNED_BYTE, null);
// Create the depth texture
depthTexture = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, depthTexture);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.DEPTH_COMPONENT, size, size, 0, gl.DEPTH_COMPONENT, gl.UNSIGNED_SHORT, null);
framebuffer = gl.createFramebuffer();
gl.bindFramebuffer(gl.FRAMEBUFFER, framebuffer);
gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, colorTexture, 0);
gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.DEPTH_ATTACHMENT, gl.TEXTURE_2D, depthTexture, 0);
//set to default
gl.bindTexture(gl.TEXTURE_2D, null);
gl.bindFramebuffer(gl.FRAMEBUFFER, null);
}
You don't draw framebuffers. You draw textures. So, first you attach a texture to a framebuffer. Now, with that framebuffer bound, draw something, the result is drawn into the the attachments of the framebuffer. Next unbind the framebuffer and draw something using the textures you attached to the framebuffer to see their contents.
Example:
const gl = document.querySelector('#c').getContext('webgl');
const vshader = `
attribute vec4 a_position;
varying vec2 v_texcoord;
void main() {
gl_Position = a_position;
v_texcoord = a_position.xy * 0.5 + 0.5;
}
`;
const fshader = `
precision mediump float;
varying vec2 v_texcoord;
uniform sampler2D u_sampler;
void main() {
gl_FragColor = texture2D(u_sampler, v_texcoord);
}
`;
// compiles shaders, links program
const program = twgl.createProgram(gl, [vshader, fshader]);
gl.useProgram(program);
const positionLocation = gl.getAttribLocation(program, "a_position");
// a single triangle from top right to bottom left to bottom right
const verts = [
1, 1,
-1, 1,
-1, -1,
];
const vertBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, vertBuffer);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(verts), gl.STATIC_DRAW);
{
const numElements = 2;
const type = gl.FLOAT;
const normalize = false;
const stride = 0;
const offset = 0;
gl.vertexAttribPointer(
positionLocation, numElements, type, normalize, stride, offset);
gl.enableVertexAttribArray(positionLocation);
}
// create an empty texture
const tex = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, tex);
{
const level = 0;
const internalFormat = gl.RGBA;
const width = 1;
const height = 1;
const border = 0;
const format = gl.RGBA;
const type = gl.UNSIGNED_BYTE;
const data = null;
gl.texImage2D(gl.TEXTURE_2D, level, internalFormat, width, height, border,
format, type, data);
}
// Create a framebuffer and attach the texture.
const fb = gl.createFramebuffer();
gl.bindFramebuffer(gl.FRAMEBUFFER, fb);
{
const level = 0
gl.framebufferTexture2D(
gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, tex, level);
}
// Render to the texture (using clear because it's simple)
gl.clearColor(0, 1, 0, 1); // green;
gl.clear(gl.COLOR_BUFFER_BIT);
// Now draw with the texture to the canvas
// NOTE: We clear the canvas to red so we'll know
// so anywhere that shows up green is coming from the canvas texture
// from above.
gl.bindFramebuffer(gl.FRAMEBUFFER, null);
gl.clearColor(1, 0, 0, 1); // red
gl.clear(gl.COLOR_BUFFER_BIT);
{
const offset = 0;
const count = 3;
gl.drawArrays(gl.TRIANGLES, offset, count);
}
canvas { border: 1px solid black; }
<script src="https://twgljs.org/dist/4.x/twgl.min.js"></script>
<canvas id="c" width="400" height="400"></canvas>
You can read more about this topic here

Categories