Related
I am currently working on a simple project in WebGL that requires rendering multiple 2D objects with a simple image textured on. The actual project generates a random number of objects, usually between 5 and 9, sets vertex data for them around the canvas to separate them, and is supposed to render, however it will only render one at a time (usually the last but I can change around gl.activeTexture to show other objects in the array). I tried to use a question on here about texture arrays in the shader, but to no avail, so I ended up creating a very simple test program that just tries to load two objects and textures, one on the left of the canvas and the other on the right.
From here I tried to do everything completely separate, even giving each object their own shaders, programs, buffers and everything, and subsequently binding everything in the draw call for each before calling gl.drawElements for each. This still doesn't show me the correct result, only the second texture appears, however it did lead me to discover what I believe to be happening. By commenting out the bindings and draw call for the second one, the first texture shows up, however it appears at the location of the second texture, not where it's vertices should be placing it. So, I assume what is happening in this program (and my project code) is that it is in fact drawing both, but for some reason applying the vertices of the last drawn one to all of them, thus stacking them and only showing the top (or last drawn one).
I have also tried a mishmash of tweaks to the below code, using only one program, using the same indices, texture coordinates, there are some commented out lines from trying to make calls in different orders as well. Anything commented out doesn't mean I necessarily think it is wrong or right, just from various things I've aimlessly tried at this point.
I have worked with OpenGL a little and had little to no trouble drawing multiple objects with their own textures, and I know that WebGL works differently than OpenGL in some ways including textures, but I do not see where I am creating the issue. I'm sure it is something very simple, and any guidance would be greatly appreciated.
I apologize for the long block of code, it's pretty much just straight typing everything out that I believe to be needed without trying to take any shortcuts. The initShaders call is from the WebGL js files I'm using from my textbook and isn't something I've written, and the loadImage call just simply loads an <img> from the html code. There are no issues with the images being loaded correctly as far as I can tell. I only included the first vertex and fragment shader because the other two are the same save for the id.
<script id="vertex-shader1" type="x-shader/x-vertex">
attribute vec4 vPosition;
attribute vec2 vTexCoord;
varying vec2 fTexCoord;
void main() {
fTexCoord = vTexCoord;
gl_Position = vPosition;
}
</script>
<script id="fragment-shader1" type="x-shader/x-fragment">
precision mediump float;
varying vec2 fTexCoord;
uniform sampler2D texture;
void main() {
gl_FragColor = texture2D(texture, fTexCoord);
}
</script>
"use-strict"
var gl;
var images = [];
var program1;
var program2;
var texture1;
var texture2;
var vBuff1;
var vBuff2;
var iBuff1;
var iBuff2;
var tBuff1;
var tBuff2;
var vPos1;
var vPos2;
var fTexCoord1;
var fTexCoord2;
var sampler1;
var sampler2;
var vertices1 = [
vec4(-0.8, 0.1, 0.0, 1.0),
vec4(-0.8, 0.3, 0.0, 1.0),
vec4(-0.6, 0.3, 0.0, 1.0),
vec4(-0.6, 0.1, 0.0, 1.0)
];
var vertices2 = [
vec4(0.1, 0.1, 0.0, 1.0),
vec4(0.1, 0.3, 0.0, 1.0),
vec4(0.3, 0.3, 0.0, 1.0),
vec4(0.3, 0.1, 0.0, 1.0)
];
var indices1 = [
0, 1, 2,
0, 2, 3
];
var indices2 = [
0, 1, 2,
0, 2, 3
];
var tcs1 = [
vec2(0, 0),
vec2(0, 1),
vec2(1, 1),
vec2(1, 0)
];
var tcs2 = [
vec2(0, 0),
vec2(0, 1),
vec2(1, 1),
vec2(1, 0)
];
window.onload = function init() {
var canvas = document.getElementById("gl-canvas");
gl = WebGLUtils.setupWebGL(canvas);
if (!gl) { alert("WebGL isn't available"); }
gl.viewport(0, 0, canvas.width, canvas.height);
gl.clearColor(0.0, 0.0, 0.0, 1.0);
loadImages();
program1 = initShaders(gl, "vertex-shader1", "fragment-shader1");
gl.useProgram(program1);
vBuff1 = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, vBuff1);
gl.bufferData(gl.ARRAY_BUFFER, flatten(vertices1), gl.STATIC_DRAW);
vPos1 = gl.getAttribLocation(program1, "vPosition");
gl.vertexAttribPointer(vPos1, 4, gl.FLOAT, false, 0, 0);
//gl.enableVertexAttribArray(vPos1);
iBuff1 = gl.createBuffer();
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, iBuff1);
gl.bufferData(gl.ELEMENT_ARRAY_BUFFER, new Uint8Array(indices1), gl.STATIC_DRAW);
tBuff1 = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, tBuff1);
gl.bufferData(gl.ARRAY_BUFFER, flatten(tcs1), gl.STATIC_DRAW);
fTexCoord1 = gl.getAttribLocation(program1, "vTexCoord");
gl.vertexAttribPointer(fTexCoord1, 2, gl.FLOAT, false, 0, 0);
//gl.enableVertexAttribArray(fTexCoord1);
sampler1 = gl.getUniformLocation(program1, "texture");
texture1 = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, texture1);
gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, true);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGB, gl.RGB, gl.UNSIGNED_BYTE, images[0]);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
gl.bindTexture(gl.TEXTURE_2D, null);
///////////////////////////////////////////////////////////////////////////////////////
/*
program2 = initShaders(gl, "vertex-shader2", "fragment-shader2");
gl.useProgram(program2);
*/
vBuff2 = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, vBuff2);
gl.bufferData(gl.ARRAY_BUFFER, flatten(vertices2), gl.STATIC_DRAW);
vPos2 = gl.getAttribLocation(program1, "vPosition");
gl.vertexAttribPointer(vPos2, 4, gl.FLOAT, false, 0, 0);
//gl.enableVertexAttribArray(vPos2);
iBuff2 = gl.createBuffer();
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, iBuff2);
gl.bufferData(gl.ELEMENT_ARRAY_BUFFER, new Uint8Array(indices2), gl.STATIC_DRAW);
tBuff2 = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, tBuff2);
gl.bufferData(gl.ARRAY_BUFFER, flatten(tcs2), gl.STATIC_DRAW);
fTexCoord2 = gl.getAttribLocation(program1, "vTexCoord");
gl.vertexAttribPointer(fTexCoord2, 2, gl.FLOAT, false, 0, 0);
//gl.enableVertexAttribArray(fTexCoord2);
sampler2 = gl.getUniformLocation(program1, "texture");
texture2 = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, texture2);
gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, true);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGB, gl.RGB, gl.UNSIGNED_BYTE, images[1]);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
gl.bindTexture(gl.TEXTURE_2D, null);
render();
};
function render() {
gl.clear(gl.COLOR_BUFFER_BIT);
gl.useProgram(program1);
gl.bindBuffer(gl.ARRAY_BUFFER, vBuff1);
gl.enableVertexAttribArray(vPos1);
gl.enableVertexAttribArray(fTexCoord1);
gl.activeTexture(gl.TEXTURE0);
gl.bindTexture(gl.TEXTURE_2D, texture1);
gl.uniform1i(sampler1, 0);
// gl.bindBuffer(gl.ARRAY_BUFFER, vBuff1);
// gl.enableVertexAttribArray(vPos1);
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, iBuff1);
gl.drawElements(gl.TRIANGLES, indices1.length, gl.UNSIGNED_BYTE, 0);
//gl.bindTexture(gl.TEXTURE_2D, null);
// gl.useProgram(program2);
gl.bindBuffer(gl.ARRAY_BUFFER,vBuff2);
gl.enableVertexAttribArray(vPos2);
gl.enableVertexAttribArray(fTexCoord2);
gl.activeTexture(gl.TEXTURE0);
gl.bindTexture(gl.TEXTURE_2D, texture2);
gl.uniform1i(sampler2, 0);
// gl.bindBuffer(gl.ARRAY_BUFFER, vBuff2);
// gl.enableVertexAttribArray(vPos2);
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, iBuff2);
gl.drawElements(gl.TRIANGLES, indices2.length, gl.UNSIGNED_BYTE, 0);
requestAnimFrame(render);
}
First off AFAIK your code can't work. It calls a function loadImages and then immediately uses the images. Images load asychronously in the browser so you need to either have a callback for when the images load or else use async functions
Here is your code working. First I made a loadImage that returns a Promise. Then I made a async function called loadImages that uses that to load all the images and wait for them to load. Then I made another async function called main what first waits for loadImages and then calls init
The second issue was that in WebGL1 attributes are global state. That means you need to set them at render time not init time so the calls go gl.enableVertexAttribArray and gl.vertexAttribPointer need to happen at render time with the appropriate values for rendering the particular thing you are rendering. gl.vertexAttribPointer copies the current ARRAY_BUFFER binding to that attribute.
you might find these tutorials helpful and in particular this one about attributes and this state diagram that might help you visualize what is happening inside WebGL
"use-strict";
const vec2 = (...args) => [...args];
const vec4 = (...args) => [...args];
const flatten = a => new Float32Array(a.flat());
const WebGLUtils = {
setupWebGL: (canvas) => { return canvas.getContext('webgl'); },
};
const initShaders = (gl, vs, fs) => twgl.createProgram(gl, [vs, fs]);
const requestAnimFrame = requestAnimationFrame;
var gl;
var images = [];
var program1;
var program2;
var texture1;
var texture2;
var vBuff1;
var vBuff2;
var iBuff1;
var iBuff2;
var tBuff1;
var tBuff2;
var vPos1;
var vPos2;
var fTexCoord1;
var fTexCoord2;
var sampler1;
var sampler2;
var vertices1 = [
vec4(-0.8, 0.1, 0.0, 1.0),
vec4(-0.8, 0.3, 0.0, 1.0),
vec4(-0.6, 0.3, 0.0, 1.0),
vec4(-0.6, 0.1, 0.0, 1.0)
];
var vertices2 = [
vec4(0.1, 0.1, 0.0, 1.0),
vec4(0.1, 0.3, 0.0, 1.0),
vec4(0.3, 0.3, 0.0, 1.0),
vec4(0.3, 0.1, 0.0, 1.0)
];
var indices1 = [
0, 1, 2,
0, 2, 3
];
var indices2 = [
0, 1, 2,
0, 2, 3
];
var tcs1 = [
vec2(0, 0),
vec2(0, 1),
vec2(1, 1),
vec2(1, 0)
];
var tcs2 = [
vec2(0, 0),
vec2(0, 1),
vec2(1, 1),
vec2(1, 0)
];
function init() {
var canvas = document.getElementById("gl-canvas");
gl = WebGLUtils.setupWebGL(canvas);
if (!gl) { alert("WebGL isn't available"); }
gl.viewport(0, 0, canvas.width, canvas.height);
gl.clearColor(0.0, 0.0, 0.0, 1.0);
program1 = initShaders(gl, "vertex-shader1", "fragment-shader1");
gl.useProgram(program1);
vBuff1 = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, vBuff1);
gl.bufferData(gl.ARRAY_BUFFER, flatten(vertices1), gl.STATIC_DRAW);
vPos1 = gl.getAttribLocation(program1, "vPosition");
iBuff1 = gl.createBuffer();
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, iBuff1);
gl.bufferData(gl.ELEMENT_ARRAY_BUFFER, new Uint8Array(indices1), gl.STATIC_DRAW);
tBuff1 = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, tBuff1);
gl.bufferData(gl.ARRAY_BUFFER, flatten(tcs1), gl.STATIC_DRAW);
fTexCoord1 = gl.getAttribLocation(program1, "vTexCoord");
sampler1 = gl.getUniformLocation(program1, "texture");
texture1 = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, texture1);
gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, true);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGB, gl.RGB, gl.UNSIGNED_BYTE, images[0]);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
gl.bindTexture(gl.TEXTURE_2D, null);
///////////////////////////////////////////////////////////////////////////////////////
/*
program2 = initShaders(gl, "vertex-shader2", "fragment-shader2");
gl.useProgram(program2);
*/
vBuff2 = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, vBuff2);
gl.bufferData(gl.ARRAY_BUFFER, flatten(vertices2), gl.STATIC_DRAW);
vPos2 = gl.getAttribLocation(program1, "vPosition");
iBuff2 = gl.createBuffer();
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, iBuff2);
gl.bufferData(gl.ELEMENT_ARRAY_BUFFER, new Uint8Array(indices2), gl.STATIC_DRAW);
tBuff2 = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, tBuff2);
gl.bufferData(gl.ARRAY_BUFFER, flatten(tcs2), gl.STATIC_DRAW);
fTexCoord2 = gl.getAttribLocation(program1, "vTexCoord");
gl.vertexAttribPointer(fTexCoord2, 2, gl.FLOAT, false, 0, 0);
//gl.enableVertexAttribArray(fTexCoord2);
sampler2 = gl.getUniformLocation(program1, "texture");
texture2 = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, texture2);
gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, true);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGB, gl.RGB, gl.UNSIGNED_BYTE, images[1]);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
gl.bindTexture(gl.TEXTURE_2D, null);
render();
};
function render() {
gl.clear(gl.COLOR_BUFFER_BIT);
gl.useProgram(program1);
gl.bindBuffer(gl.ARRAY_BUFFER, vBuff1);
gl.enableVertexAttribArray(vPos1);
gl.vertexAttribPointer(vPos1, 4, gl.FLOAT, false, 0, 0);
gl.bindBuffer(gl.ARRAY_BUFFER, tBuff1);
gl.enableVertexAttribArray(fTexCoord1);
gl.vertexAttribPointer(fTexCoord1, 2, gl.FLOAT, false, 0, 0);
gl.activeTexture(gl.TEXTURE0);
gl.bindTexture(gl.TEXTURE_2D, texture1);
gl.uniform1i(sampler1, 0);
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, iBuff1);
gl.drawElements(gl.TRIANGLES, indices1.length, gl.UNSIGNED_BYTE, 0);
gl.bindBuffer(gl.ARRAY_BUFFER,vBuff2);
gl.enableVertexAttribArray(vPos2);
gl.vertexAttribPointer(vPos2, 4, gl.FLOAT, false, 0, 0);
gl.activeTexture(gl.TEXTURE0);
gl.bindTexture(gl.TEXTURE_2D, texture2);
gl.uniform1i(sampler2, 0);
gl.bindBuffer(gl.ARRAY_BUFFER,tBuff2);
gl.enableVertexAttribArray(fTexCoord2);
gl.vertexAttribPointer(fTexCoord2, 2, gl.FLOAT, false, 0, 0);
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, iBuff2);
gl.drawElements(gl.TRIANGLES, indices2.length, gl.UNSIGNED_BYTE, 0);
requestAnimFrame(render);
}
function loadImage(url) {
return new Promise((resolve, reject) => {
const img = new Image();
img.onload = () => resolve(img);
img.onerror = reject;
img.crossOrigin = 'anonymous';
img.src = url;
});
}
async function loadImages(imgs) {
images = await Promise.all(imgs.map(loadImage));
}
async function main() {
await loadImages([
'https://webglfundamentals.org/webgl/resources/f-texture.png',
'https://webglfundamentals.org/webgl/lessons/resources/noodles-01.jpg',
]);
init();
}
main();
<script id="vertex-shader1" type="x-shader/x-vertex">
attribute vec4 vPosition;
attribute vec2 vTexCoord;
varying vec2 fTexCoord;
void main() {
fTexCoord = vTexCoord;
gl_Position = vPosition;
}
</script>
<script id="fragment-shader1" type="x-shader/x-fragment">
precision mediump float;
varying vec2 fTexCoord;
uniform sampler2D texture;
void main() {
gl_FragColor = texture2D(texture, fTexCoord);
}
</script>
<canvas id="gl-canvas"></canvas>
<script src="https://twgljs.org/dist/4.x/twgl.min.js"></script>
I'm trying to copy the content (display of an image) of my first webgl context to a texture from another webgl context.
I'm using texImage2D function with a canvas element as a source, getting no errors, but it renders nothing but black.
I don't know what I'm missing, so any kind of help is greatly appreciated.
I'm looking at a webgl1 solution, and using Chrome.
var canvas1;
var texture1;
var image;
var shaderProgram;
var vertex_buffer;
var texture_buffer;
var aVertLocation;
var aTexLocation;
var vertices = [];
var texCoords = [];
var gl;
var gl2;
var canvas2;
var texture2;
var shaderProgram2;
var vertex_buffer2;
var texture_buffer2;
var index_Buffer2;
var aVertLocation2;
var aTexLocation2;
var vertices2 = [];
var texCoords2 = [];
indices = [0, 1, 2, 0, 2, 3];
vertices = [-1, -1, 1, -1, 1, 1, -1, 1];
texCoords = [0, 0, 1, 0, 1, 1, 0, 1];
function initApp()
{
initWebGL();
image = new Image();
image.onload = function(){
render();
render2();
}
image.crossOrigin = '';
image.src = 'https://i.imgur.com/ZKMnXce.png';
}
function initWebGL()
{
canvas1 = document.getElementById('glCanvas1');
gl = canvas1.getContext('webgl');
/*====================== Shaders =======================*/
// Vertex shader source code
var vertCode =
'attribute vec2 coordinates;' +
'attribute vec2 aTexCoord;' +
'varying highp vec2 vTexCoord;' +
'void main(void) {' +
'gl_Position = vec4(coordinates,1.0,1.0);' +
'vTexCoord = aTexCoord;' +
'}';
var vertShader = gl.createShader(gl.VERTEX_SHADER);
gl.shaderSource(vertShader, vertCode);
gl.compileShader(vertShader);
//fragment shader source code
var fragCode =
'uniform sampler2D texture;' +
'varying highp vec2 vTexCoord;' +
'void main(void) {' +
' gl_FragColor = texture2D(texture, vTexCoord);' +
'}';
var fragShader = gl.createShader(gl.FRAGMENT_SHADER);
gl.shaderSource(fragShader, fragCode);
gl.compileShader(fragShader);
shaderProgram = gl.createProgram();
gl.attachShader(shaderProgram, vertShader);
gl.attachShader(shaderProgram, fragShader);
gl.deleteShader( vertShader );
gl.deleteShader( fragShader );
gl.linkProgram(shaderProgram);
gl.useProgram(shaderProgram);
aVertLocation = gl.getAttribLocation(shaderProgram, "coordinates");
aTexLocation = gl.getAttribLocation(shaderProgram, "aTexCoord");
vertex_buffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, vertex_buffer);
gl.enableVertexAttribArray(aVertLocation);
gl.vertexAttribPointer(aVertLocation, 2, gl.FLOAT, false, 0, 0);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(vertices), gl.STATIC_DRAW);
gl.bindBuffer(gl.ARRAY_BUFFER, null);
texture_buffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, texture_buffer);
gl.enableVertexAttribArray(aTexLocation);
gl.vertexAttribPointer(aTexLocation, 2, gl.FLOAT, false, 0, 0);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(texCoords), gl.STATIC_DRAW);
gl.bindBuffer(gl.ARRAY_BUFFER, null);
index_buffer = gl.createBuffer();
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, index_buffer);
gl.bufferData(gl.ELEMENT_ARRAY_BUFFER, new Uint16Array(indices), gl.STATIC_DRAW);
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, null);
texture1 = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, texture1);
gl.uniform1i( gl.getUniformLocation( shaderProgram, 'texture' ), 0 );
gl.bindTexture(gl.TEXTURE_2D, null);
//==========================================================//
canvas2 = document.getElementById('glCanvas2');
gl2 = canvas2.getContext('webgl');
var vertShader2 = gl2.createShader(gl2.VERTEX_SHADER);
var fragShader2 = gl2.createShader(gl2.FRAGMENT_SHADER);
gl2.shaderSource(vertShader2, vertCode);
gl2.shaderSource(fragShader2, fragCode);
gl2.compileShader(vertShader2);
gl2.compileShader(fragShader2);
shaderProgram2 = gl2.createProgram();
gl2.attachShader(shaderProgram2, vertShader2);
gl2.attachShader(shaderProgram2, fragShader2);
gl2.deleteShader( vertShader2 );
gl2.deleteShader( fragShader2 );
gl2.linkProgram(shaderProgram2);
gl2.useProgram(shaderProgram2);
aVertLocation2 = gl2.getAttribLocation(shaderProgram2, "coordinates");
aTexLocation2 = gl2.getAttribLocation(shaderProgram2, "aTexCoord");
vertex_buffer2 = gl2.createBuffer();
gl2.bindBuffer(gl2.ARRAY_BUFFER, vertex_buffer2);
gl2.enableVertexAttribArray(aVertLocation2);
gl2.vertexAttribPointer(aVertLocation2, 2, gl2.BYTE, false, 0, 0);
gl2.bufferData(gl2.ARRAY_BUFFER, new Float32Array(vertices), gl2.STATIC_DRAW);
gl2.bindBuffer(gl2.ARRAY_BUFFER, null);
texture_buffer2 = gl2.createBuffer();
gl2.bindBuffer(gl2.ARRAY_BUFFER, texture_buffer2);
gl2.enableVertexAttribArray(aTexLocation2);
gl2.vertexAttribPointer(aTexLocation, 2, gl2.BYTE, false, 0, 0);
gl2.bufferData(gl2.ARRAY_BUFFER, new Float32Array(texCoords), gl2.STATIC_DRAW);
gl2.bindBuffer(gl2.ARRAY_BUFFER, null);
index_buffer2 = gl2.createBuffer();
gl2.bindBuffer(gl2.ELEMENT_ARRAY_BUFFER, index_buffer2);
gl2.bufferData(gl2.ELEMENT_ARRAY_BUFFER, new Uint16Array(indices), gl2.STATIC_DRAW);
gl2.bindBuffer(gl2.ELEMENT_ARRAY_BUFFER, null);
texture2 = gl2.createTexture();
gl2.bindTexture(gl2.TEXTURE_2D, texture2);
gl2.uniform1i( gl2.getUniformLocation( shaderProgram2, 'texture' ), 0 );
gl2.bindTexture(gl2.TEXTURE_2D, null);
}
function updateTexture()
{
gl.bindTexture(gl.TEXTURE_2D, texture1);
gl.pixelStorei(gl.UNPACK_COLORSPACE_CONVERSION_WEBGL, gl.NONE);
gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, true);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGB, gl.RGB, gl.UNSIGNED_BYTE, image);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.generateMipmap(gl.TEXTURE_2D);
gl.bindTexture(gl.TEXTURE_2D, null);
}
function render()
{
if ( !shaderProgram ) return;
updateTexture();
gl.clearColor(0.0, 0.0, 0.0, 1.0);
gl.clear( gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT );
gl.bindTexture(gl.TEXTURE_2D, texture1);
gl.enableVertexAttribArray(aVertLocation);
gl.enableVertexAttribArray(aTexLocation);
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, index_buffer)
gl.drawElements(gl.TRIANGLES, 6, gl.UNSIGNED_SHORT, 0);
gl.disableVertexAttribArray(aVertLocation);
gl.disableVertexAttribArray(aTexLocation);
}
function updateTexture2()
{
gl2.bindTexture(gl2.TEXTURE_2D, texture2);
gl2.texImage2D(gl2.TEXTURE_2D, 0, gl2.RGBA, gl2.RGBA, gl2.UNSIGNED_BYTE, canvas1);
gl2.texParameteri(gl2.TEXTURE_2D, gl2.TEXTURE_MAG_FILTER, gl2.LINEAR);
gl2.texParameteri(gl2.TEXTURE_2D, gl2.TEXTURE_MIN_FILTER, gl2.LINEAR);
gl2.texParameteri(gl2.TEXTURE_2D, gl2.TEXTURE_WRAP_S, gl2.CLAMP_TO_EDGE);
gl2.texParameteri(gl2.TEXTURE_2D, gl2.TEXTURE_WRAP_T, gl2.CLAMP_TO_EDGE);
gl2.generateMipmap(gl2.TEXTURE_2D);
gl2.bindTexture(gl2.TEXTURE_2D, null);
}
function render2()
{
if ( !shaderProgram2 ) return;
updateTexture2();
gl2.clearColor(0.0, 0.0, 0.0, 1.0);
gl2.clear( gl2.COLOR_BUFFER_BIT | gl2.DEPTH_BUFFER_BIT );
gl2.bindTexture(gl2.TEXTURE_2D, texture2);
gl2.enableVertexAttribArray(aVertLocation2);
gl2.enableVertexAttribArray(aTexLocation2);
gl2.bindBuffer(gl2.ELEMENT_ARRAY_BUFFER, index_buffer2);
gl2.drawElements(gl2.TRIANGLES, 6, gl2.UNSIGNED_SHORT,0);
gl2.disableVertexAttribArray(aVertLocation2);
gl2.disableVertexAttribArray(aTexLocation2);
}
document.addEventListener('DOMContentLoaded', initApp);
<canvas id="glCanvas1" width="128" height="128" ></canvas>
<canvas id="glCanvas2" width="128" height="128" ></canvas>
Thanks in advance :)
The copy is working just fine. What's not working is your code 😜
Here's what I did to find the bug
First moved the code to snippet so I could actually run it. Please use snippets in the future.
Next I used an image from imgur. Because that image is on another domain I needed to set crossOrigin. Fortunately imgur supports CORS allowing WebGL to use the image. If it was me I wouldn't have used an image because that part is not important. A single colored pixel would show the issue just as well and remove the need for a image
Now that the code is running and the bug shown the first thing to do was to change this line in updateTexture2
gl2.texImage2D(gl2.TEXTURE_2D, 0, gl2.RGBA, gl2.RGBA, gl2.UNSIGNED_BYTE, canvas1);
to just use the same image
gl2.texImage2D(gl2.TEXTURE_2D, 0, gl2.RGBA, gl2.RGBA, gl2.UNSIGNED_BYTE, image);
The second canvas was still black showing the issue had nothing to with copying a canvas.
So, next I edited the fragment shader to this
gl_FragColor = vec4(1,0,0,1);
the second canvas was still black. This showed the issue had nothing to do with textures at all. The code was not drawing anything visible the second canvas.
So, looking at stuff related to the vertex shader the bug was these 2 lines
gl2.vertexAttribPointer(aVertLocation2, 2, gl2.BYTE, false, 0, 0);
...
gl2.vertexAttribPointer(aTexLocation, 2, gl2.BYTE, false, 0, 0);
needed to be gl.FLOAT not gl.BYTE
Some other random comments.
I used multi line template literals for the shaders
There's no reason to call gl.generateMips if your filtering is set not to use mips
This code has no meaning
gl.bindTexture(gl.TEXTURE_2D, texture1);
gl.uniform1i( gl.getUniformLocation( shaderProgram, 'texture' ), 0 );
gl.bindTexture(gl.TEXTURE_2D, null);
There's no reason to bind the texture here. gl.uniform1i just sets an integer value to the uniform shaderProgram. It does not record anything about the texture itself so just
gl.uniform1i( gl.getUniformLocation( shaderProgram, 'texture' ), 0 );
Without the the bindTexture calls would be fine. On top of that uniforms default to 0 so you don't really need the gl.uniform1i call. On the other hand maybe you had that their to set it to something other than 0 later.
Finally because WebGL can not share resources across canvases (at least as of July 2017), then, depending on what you're making you might want to consider using a single canvas. See the last solution in this answer
var canvas1;
var texture1;
var image;
var shaderProgram;
var vertex_buffer;
var texture_buffer;
var aVertLocation;
var aTexLocation;
var vertices = [];
var texCoords = [];
var gl;
var gl2;
var canvas2;
var texture2;
var shaderProgram2;
var vertex_buffer2;
var texture_buffer2;
var index_Buffer2;
var aVertLocation2;
var aTexLocation2;
var vertices2 = [];
var texCoords2 = [];
indices = [0, 1, 2, 0, 2, 3];
vertices = [-1, -1, 1, -1, 1, 1, -1, 1];
texCoords = [0, 0, 1, 0, 1, 1, 0, 1];
function initApp()
{
initWebGL();
image = new Image();
image.onload = function(){
render();
render2();
}
image.crossOrigin = '';
image.src = 'https://i.imgur.com/ZKMnXce.png';
}
function initWebGL()
{
canvas1 = document.getElementById('glCanvas1');
gl = canvas1.getContext('webgl');
/*====================== Shaders =======================*/
// Vertex shader source code
var vertCode = `
attribute vec2 coordinates;
attribute vec2 aTexCoord;
varying highp vec2 vTexCoord;
void main(void) {
gl_Position = vec4(coordinates,1.0,1.0);
vTexCoord = aTexCoord;
}
`;
var vertShader = gl.createShader(gl.VERTEX_SHADER);
gl.shaderSource(vertShader, vertCode);
gl.compileShader(vertShader);
//fragment shader source code
var fragCode = `
precision mediump float;
uniform sampler2D texture;
varying highp vec2 vTexCoord;
void main(void) {
gl_FragColor = texture2D(texture, vTexCoord);
}
`;
var fragShader = gl.createShader(gl.FRAGMENT_SHADER);
gl.shaderSource(fragShader, fragCode);
gl.compileShader(fragShader);
shaderProgram = gl.createProgram();
gl.attachShader(shaderProgram, vertShader);
gl.attachShader(shaderProgram, fragShader);
gl.deleteShader( vertShader );
gl.deleteShader( fragShader );
gl.linkProgram(shaderProgram);
gl.useProgram(shaderProgram);
aVertLocation = gl.getAttribLocation(shaderProgram, "coordinates");
aTexLocation = gl.getAttribLocation(shaderProgram, "aTexCoord");
vertex_buffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, vertex_buffer);
gl.enableVertexAttribArray(aVertLocation);
gl.vertexAttribPointer(aVertLocation, 2, gl.FLOAT, false, 0, 0);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(vertices), gl.STATIC_DRAW);
gl.bindBuffer(gl.ARRAY_BUFFER, null);
texture_buffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, texture_buffer);
gl.enableVertexAttribArray(aTexLocation);
gl.vertexAttribPointer(aTexLocation, 2, gl.FLOAT, false, 0, 0);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(texCoords), gl.STATIC_DRAW);
gl.bindBuffer(gl.ARRAY_BUFFER, null);
index_buffer = gl.createBuffer();
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, index_buffer);
gl.bufferData(gl.ELEMENT_ARRAY_BUFFER, new Uint16Array(indices), gl.STATIC_DRAW);
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, null);
texture1 = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, texture1);
gl.uniform1i( gl.getUniformLocation( shaderProgram, 'texture' ), 0 );
gl.bindTexture(gl.TEXTURE_2D, null);
//==========================================================//
canvas2 = document.getElementById('glCanvas2');
gl2 = canvas2.getContext('webgl');
var vertShader2 = gl2.createShader(gl2.VERTEX_SHADER);
var fragShader2 = gl2.createShader(gl2.FRAGMENT_SHADER);
gl2.shaderSource(vertShader2, vertCode);
gl2.shaderSource(fragShader2, fragCode);
gl2.compileShader(vertShader2);
gl2.compileShader(fragShader2);
shaderProgram2 = gl2.createProgram();
gl2.attachShader(shaderProgram2, vertShader2);
gl2.attachShader(shaderProgram2, fragShader2);
gl2.deleteShader( vertShader2 );
gl2.deleteShader( fragShader2 );
gl2.linkProgram(shaderProgram2);
gl2.useProgram(shaderProgram2);
aVertLocation2 = gl2.getAttribLocation(shaderProgram2, "coordinates");
aTexLocation2 = gl2.getAttribLocation(shaderProgram2, "aTexCoord");
vertex_buffer2 = gl2.createBuffer();
gl2.bindBuffer(gl2.ARRAY_BUFFER, vertex_buffer2);
gl2.enableVertexAttribArray(aVertLocation2);
gl2.vertexAttribPointer(aVertLocation2, 2, gl2.FLOAT, false, 0, 0);
gl2.bufferData(gl2.ARRAY_BUFFER, new Float32Array(vertices), gl2.STATIC_DRAW);
gl2.bindBuffer(gl2.ARRAY_BUFFER, null);
texture_buffer2 = gl2.createBuffer();
gl2.bindBuffer(gl2.ARRAY_BUFFER, texture_buffer2);
gl2.enableVertexAttribArray(aTexLocation2);
gl2.vertexAttribPointer(aTexLocation, 2, gl2.FLOAT, false, 0, 0);
gl2.bufferData(gl2.ARRAY_BUFFER, new Float32Array(texCoords), gl2.STATIC_DRAW);
gl2.bindBuffer(gl2.ARRAY_BUFFER, null);
index_buffer2 = gl2.createBuffer();
gl2.bindBuffer(gl2.ELEMENT_ARRAY_BUFFER, index_buffer2);
gl2.bufferData(gl2.ELEMENT_ARRAY_BUFFER, new Uint16Array(indices), gl2.STATIC_DRAW);
gl2.bindBuffer(gl2.ELEMENT_ARRAY_BUFFER, null);
texture2 = gl2.createTexture();
gl2.bindTexture(gl2.TEXTURE_2D, texture2);
gl2.uniform1i( gl2.getUniformLocation( shaderProgram2, 'texture' ), 0 );
gl2.bindTexture(gl2.TEXTURE_2D, null);
}
function updateTexture()
{
gl.bindTexture(gl.TEXTURE_2D, texture1);
gl.pixelStorei(gl.UNPACK_COLORSPACE_CONVERSION_WEBGL, gl.NONE);
gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, true);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGB, gl.RGB, gl.UNSIGNED_BYTE, image);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.generateMipmap(gl.TEXTURE_2D);
gl.bindTexture(gl.TEXTURE_2D, null);
}
function render()
{
if ( !shaderProgram ) return;
updateTexture();
gl.clearColor(0.0, 0.0, 0.0, 1.0);
gl.clear( gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT );
gl.bindTexture(gl.TEXTURE_2D, texture1);
gl.enableVertexAttribArray(aVertLocation);
gl.enableVertexAttribArray(aTexLocation);
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, index_buffer)
gl.drawElements(gl.TRIANGLES, 6, gl.UNSIGNED_SHORT, 0);
gl.disableVertexAttribArray(aVertLocation);
gl.disableVertexAttribArray(aTexLocation);
}
function updateTexture2()
{
gl2.bindTexture(gl2.TEXTURE_2D, texture2);
gl2.texImage2D(gl2.TEXTURE_2D, 0, gl2.RGBA, gl2.RGBA, gl2.UNSIGNED_BYTE, canvas1);
gl2.texParameteri(gl2.TEXTURE_2D, gl2.TEXTURE_MAG_FILTER, gl2.LINEAR);
gl2.texParameteri(gl2.TEXTURE_2D, gl2.TEXTURE_MIN_FILTER, gl2.LINEAR);
gl2.texParameteri(gl2.TEXTURE_2D, gl2.TEXTURE_WRAP_S, gl2.CLAMP_TO_EDGE);
gl2.texParameteri(gl2.TEXTURE_2D, gl2.TEXTURE_WRAP_T, gl2.CLAMP_TO_EDGE);
gl2.generateMipmap(gl2.TEXTURE_2D);
gl2.bindTexture(gl2.TEXTURE_2D, null);
}
function render2()
{
if ( !shaderProgram2 ) return;
updateTexture2();
gl2.clearColor(0.0, 0.0, 0.0, 1.0);
gl2.clear( gl2.COLOR_BUFFER_BIT | gl2.DEPTH_BUFFER_BIT );
gl2.bindTexture(gl2.TEXTURE_2D, texture2);
gl2.enableVertexAttribArray(aVertLocation2);
gl2.enableVertexAttribArray(aTexLocation2);
gl2.bindBuffer(gl2.ELEMENT_ARRAY_BUFFER, index_buffer2);
gl2.drawElements(gl2.TRIANGLES, 6, gl2.UNSIGNED_SHORT,0);
gl2.disableVertexAttribArray(aVertLocation2);
gl2.disableVertexAttribArray(aTexLocation2);
}
document.addEventListener('DOMContentLoaded', initApp);
<canvas id="glCanvas1" width="128" height="128" ></canvas>
<canvas id="glCanvas2" width="128" height="128" ></canvas>
I'm having problem texturing my rectangle where is displayed black canvas instead of textured canvas with image.
First I'm creating webGL program, attaching shaders and linking webGL program as usual.
Then I'm creating texture when image is loaded like this:
var texture = gl.createTexture();
var image = document.createElement("img");
image.src = "https://upload.wikimedia.org/wikipedia/commons/3/3a/Saint-Gervais-les-Bains_-_Mt-Blanc_JPG01.jpg";
image.onload = function() {
gl.bindTexture(gl.TEXTURE_2D, texture);
// Set the parameters so we can render any size image.
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
// Upload the image into the texture.
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, image);
}
After that I pass the information about rectangle verticies into the vertex shader:
var pos = gl.getAttribLocation(program, "pos");
gl.enableVertexAttribArray(pos);
var pos_Buffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, pos_Buffer);
var vertices = [-1.0, -1.0, // "left-down"
-1.0, 1.0, // "left-top"
1.0, -1.0, // "right-down"
1.0, 1.0, // "right-top"
];
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(vertices), gl.STATIC_DRAW);
gl.vertexAttribPointer(pos, 2, gl.FLOAT, false, 0, 0);
And in the end I draw my rectangle by passing indexes of verticies into drawElements function:
var indexBuffer = gl.createBuffer();
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, indexBuffer);
var indices = [0, 1, 2, 1, 2, 3];
gl.bufferData(gl.ELEMENT_ARRAY_BUFFER, new Uint8Array(indices), gl.STATIC_DRAW);
// draw triangles
gl.drawElements(gl.TRIANGLES, indices.length, gl.UNSIGNED_BYTE, 0);
There is a jsfiddle with my problem.
Please, don't you have any idea how to solve it?
image.onload is asynchronous function and you do draw call before the function is executed (image isn't loaded when you draw on canvas).
You must put gl.drawElements inside:
image.onload = function() { // image.onload STARTS
gl.bindTexture(gl.TEXTURE_2D, texture);
// Set the parameters so we can render any size image.
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
// Upload the image into the texture.
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, image);
// anything you want, blehblahbleh ...
// draw on canvas
gl.drawElements(gl.TRIANGLES, indices.length, gl.UNSIGNED_BYTE, 0);
} // image.onload ENDS
Also yes there was a problem with cross origin resource call, but I guess you solved it out with extension (or for jsfiddle tests you might use base64 format).
Slightly updated sample: http://jsfiddle.net/windkiller/6cLo3890/
I am drawing a <video> onto a webgl canvas. I have it working in chrome and firefox, but IE11 seems to be throwing an error. The error states:
drawElements: The texture is a non-power-of-two texture or not mipmap complete
and refers to this specific line:
gl.drawElements(gl.TRIANGLES, 6, gl.UNSIGNED_SHORT, 0);
I have a requestAnimationFrame loop running that is updating my canvas. I've read this error can be caused by the image not being ready or loaded. Here is my loop:
(function loop()
{
if (!$this.paused && !$this.ended)
{
gl.clear(gl.COLOR_BUFFER_BIT);
gl.activeTexture(gl.TEXTURE0);
gl.bindTexture(gl.TEXTURE_2D, tex);
if (!$this.paused && !$this.ended)
{
try
{
// update the video frame
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGB, gl.RGB, gl.UNSIGNED_BYTE, player);
}
catch(e)
{
log(e);
}
}
gl.bindBuffer(gl.ARRAY_BUFFER, vx);
gl.vertexAttribPointer(vx_ptr, 2, gl.FLOAT, false, 0, 0);
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, ix);
gl.drawElements(gl.TRIANGLES, 6, gl.UNSIGNED_SHORT, 0);
window.requestAnimationFrame(loop);
}
})();
What seems to be causing this? I have been searching with no luck. Before my loop runs, I setup the webgl canvas with this code:
$scope.prepWebGL = function()
{
// prepare visible WebGL
vs = gl.createShader(gl.VERTEX_SHADER);
gl.shaderSource(vs, "attribute vec2 vx;varying vec2 tx;void main(){gl_Position=vec4(vx.x*2.0-1.0,1.0-vx.y*2.0,0,1);tx=vx;}");
gl.compileShader(vs);
ps = gl.createShader(gl.FRAGMENT_SHADER);
gl.shaderSource(ps, "precision mediump float;uniform sampler2D sm;varying vec2 tx;void main(){gl_FragColor=texture2D(sm,tx);}");
gl.compileShader(ps);
shader = gl.createProgram();
gl.attachShader(shader, vs);
gl.attachShader(shader, ps);
gl.linkProgram(shader);
gl.useProgram(shader);
vx_ptr = gl.getAttribLocation(shader, "vx");
gl.enableVertexAttribArray(vx_ptr);
gl.uniform1i(gl.getUniformLocation(shader, "sm"), 0);
vx = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, vx);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([0,0, 1,0, 1,1, 0,1]), gl.STATIC_DRAW);
ix = gl.createBuffer();
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, ix);
gl.bufferData(gl.ELEMENT_ARRAY_BUFFER, new Uint16Array([0,1,2, 0,2,3]), gl.STATIC_DRAW);
tex = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, tex);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
};
If the problem is actually that the video is not loaded, you could add a flag to your video, to indicate that the video is loaded
player.loaded = false;
player.onload = function() {
this.loaded = true;
}
and then check if the video is loaded in your loop
if (!$this.paused && !$this.ended && player.loaded)
{
try
{
// update the video frame
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGB, gl.RGB, gl.UNSIGNED_BYTE, player);
}
catch(e)
{
log(e);
}
}
The error means that the texture you are currently using is not a power of 2 and webgl has limited non power of 2 texture support. In particular, you cant generate mip maps with non power of 2 textures nor can you set them on repeat.
What is strange is that you are using gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR); which supposely do not use mip maps and you are using gl.CLAMP_TO_EDGE as well. Perhaps it is a bug with IE?
Maybe you will have some scale problem, but you may use video formats like 512*512 or 1024*512, that will solve your problem your compatibility problem, you can too include your video in a video in the correct size with some video converters or some video editing tools. In order to not lose the scale ratio of your source video
I'm creating a simple WebGL 3D engine and now I'm trying to add some post processing effects.
I do this way: I draw the scene using a Frame Buffer and render to a texture, than I pass the texture to my simple post-processing shader, that renders to the actual physical screen (I follow the openGL post-processing tutorial: http://en.wikibooks.org/wiki/OpenGL_Programming/Post-Processing).
creation of the frame buffer:
// texture
gl.activeTexture(gl.TEXTURE0);
frameBufferTexture = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, frameBufferTexture);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.viewportWidth, gl.viewportHeight, 0, gl.RGBA, gl.UNSIGNED_BYTE, null);
gl.bindTexture(gl.TEXTURE_2D, null);
// depth buffer
frameBufferDepth = gl.createRenderbuffer();
gl.bindRenderbuffer(gl.RENDERBUFFER, frameBufferDepth);
gl.renderbufferStorage(gl.RENDERBUFFER, gl.DEPTH_COMPONENT16, gl.viewportWidth, gl.viewportHeight);
// framebuffer to link everything together
frameBufferObject = gl.createFramebuffer();
gl.bindFramebuffer(gl.FRAMEBUFFER, frameBufferObject);
gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, frameBufferTexture, 0);
gl.framebufferRenderbuffer(gl.FRAMEBUFFER, gl.DEPTH_ATTACHMENT, gl.RENDERBUFFER, frameBufferDepth);
// check status
var success = (gl.checkFramebufferStatus(gl.FRAMEBUFFER) == gl.FRAMEBUFFER_COMPLETE);
if (success)
console.log('post processing frame object created');
else
throw "could not create post processing frame: " + gl.checkFramebufferStatus(gl.FRAMEBUFFER);
// vertices
var fbo_vertices = [
-1.0, -1.0,
1.0, -1.0,
-1.0, 1.0,
1.0, 1.0
];
frameBufferVertices = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, frameBufferVertices);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(fbo_vertices), gl.STATIC_DRAW);
then, when I draw the scene, I call:
gl.bindFramebuffer(gl.FRAMEBUFFER, frameBufferObject);
to target the frame buffer
and I finally render the texture to the screen with the basic shader (does nothing except take the points of the texture):
gl.bindFramebuffer(gl.FRAMEBUFFER, null);
gl.viewport(0, 0, gl.viewportWidth, gl.viewportHeight);
gl.clearColor(0.0, 0.0, 0.0, 1.0);
gl.clearDepth(1.0);
gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
gl.activeTexture(gl.TEXTURE0);
gl.bindTexture(gl.TEXTURE_2D, frameBufferTexture);
gl.uniform1i(shaderProgram.fbo_texture, 0);
gl.bindBuffer(gl.ARRAY_BUFFER, frameBufferVertices);
gl.vertexAttribPointer(shaderProgram.fbo_vertex, 2, gl.FLOAT, false, 0, 0);
gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4);
At this point all works perfectly, now my idea was to use multiple shaders (every shader with a different program), that share the same Frame Buffer, so every shader takes in input the texture and write to it.
I tried to use multiple shaders and calls in a for cycle (for every shader):
gl.useProgram(postProcessingProgram);
gl.bindFramebuffer(gl.FRAMEBUFFER, frameBufferObject);
gl.viewport(0, 0, gl.viewportWidth, gl.viewportHeight);
gl.clearColor(0.0, 0.0, 0.0, 1.0);
gl.clearDepth(1.0);
gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
gl.activeTexture(gl.TEXTURE0);
gl.bindTexture(gl.TEXTURE_2D, frameBufferTexture);
gl.uniform1i(postProcessingProgram.basic_texture, 0);
gl.bindBuffer(gl.ARRAY_BUFFER, frameBufferVertices);
gl.vertexAttribPointer(postProcessingProgram.fbo_vertex, 2, gl.FLOAT, false, 0, 0);
gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4);
except for the last basic shader that render to the physical screen
gl.bindFramebuffer(gl.FRAMEBUFFER, null);
the screen is totally black, but I receive no error or warning. It doesn't work even if I use the same basic shader twice.
My question is: is it correct to use the same frame buffer for multiple shaders? If I use multiple frame buffer, how can I share the same texture to do the consecutive steps?