I am using webgl and what I want to do is first render to a texture, then use that texture to render on screen, and I encounter a problem with an attribute in the first step render.
I will try to explain in a few words what I am trying to do. First I want to use a fragment shader to render to a texture something that uses an attribute, then use that texture to render to screen, and repeat the steps for the next frame. But when I try to render with the framebuffer bound that attribute (pos) has invalid values, I mean, no value at all.
I wrote a little demo that shows my problem:
var canvas = document.getElementById("c");
var gl = getWebGLContext(canvas);
var program_init, program_pers, tex, fb;
function init() {
var verts = [
1, 1,
-1, 1,
-1, -1,
1, 1,
-1, -1,
1, -1,
];
var vertBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, vertBuffer);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(verts), gl.STATIC_DRAW);
gl.vertexAttribPointer(0, 2, gl.FLOAT, false, 0, 0);
gl.enableVertexAttribArray(0);
tex = gl.createTexture();
fb = gl.createFramebuffer();
program_init = createProgramFromScripts(gl, ["vshader-init", "fshader-init"], ["pos"]);
program_pers = createProgramFromScripts(gl, ["vshader-pers", "fshader-pers"], ["a_position"]);
}
function renderToTexture(gl, time) {
gl.useProgram(program_init);
var timeLocation = gl.getUniformLocation(program_init, 'time');
gl.uniform1f(timeLocation, time);
gl.bindTexture(gl.TEXTURE_2D, tex);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, 1, 1, 0, gl.RGBA, gl.UNSIGNED_BYTE, null);
gl.bindFramebuffer(gl.FRAMEBUFFER, fb);
gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, tex, 0);
gl.drawArrays(gl.TRIANGLES, 0, 6);
gl.bindFramebuffer(gl.FRAMEBUFFER, null);
};
function renderToScreen(gl) {
gl.useProgram(program_pers);
gl.drawArrays(gl.TRIANGLES, 0, 6);
};
init();
requestAnimationFrame(function(){
renderToTexture(gl, arguments[0]);
renderToScreen(gl);
requestAnimationFrame( arguments.callee );
});
canvas { border: 1px solid black; }
body { background-color: darkslategrey }
<script src="http://greggman.github.com/webgl-fundamentals/webgl/resources/webgl-utils.js"></script>
<script id="vshader-init" type="vs/shader">
attribute vec4 pos;
varying vec2 uv;
void main() {
gl_Position = pos;
uv = pos.xy * .5 + .5;
}
</script>
<script id="fshader-init" type="fs/shader">
precision mediump float;
varying vec2 uv;
uniform float time;
void main() {
float t = floor(time / 1000.);
vec3 color;
color.x = sin(t*uv.x);
color.y = tan(t*uv.y);
color.z = cos(t);
gl_FragColor = vec4(color, 1.);
}
</script>
<script id="vshader-pers" type="vs/shader">
attribute vec4 a_position;
varying vec2 v_texcoord;
void main() {
gl_Position = a_position;
v_texcoord = a_position.xy * .5 + .5;
}
</script>
<script id="fshader-pers" type="fs/sahder">
precision mediump float;
varying vec2 v_texcoord;
uniform sampler2D u_sampler;
void main() {
gl_FragColor = texture2D(u_sampler, v_texcoord);
}
</script>
<body>
<canvas id="c" width="400" height="400"></canvas>
</body>
If I comment the line 38,39 from the javascript code (the binding of the frameBuffer) and line 54 (the rendering to screen code, that runs a different program) we can see that it renders corectly and the name "pos" is given the right values.
var canvas = document.getElementById("c");
var gl = getWebGLContext(canvas);
var program_init, program_pers, tex, fb;
function init() {
var verts = [
1, 1,
-1, 1,
-1, -1,
1, 1,
-1, -1,
1, -1,
];
var vertBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, vertBuffer);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(verts), gl.STATIC_DRAW);
gl.vertexAttribPointer(0, 2, gl.FLOAT, false, 0, 0);
gl.enableVertexAttribArray(0);
tex = gl.createTexture();
fb = gl.createFramebuffer();
program_init = createProgramFromScripts(gl, ["vshader-init", "fshader-init"], ["pos"]);
program_pers = createProgramFromScripts(gl, ["vshader-pers", "fshader-pers"], ["a_position"]);
}
function renderToTexture(gl, time) {
gl.useProgram(program_init);
var timeLocation = gl.getUniformLocation(program_init, 'time');
gl.uniform1f(timeLocation, time);
gl.bindTexture(gl.TEXTURE_2D, tex);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, 1, 1, 0, gl.RGBA, gl.UNSIGNED_BYTE, null);
//gl.bindFramebuffer(gl.FRAMEBUFFER, fb);
//gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, tex, 0);
gl.drawArrays(gl.TRIANGLES, 0, 6);
gl.bindFramebuffer(gl.FRAMEBUFFER, null);
};
function renderToScreen(gl) {
gl.useProgram(program_pers);
gl.drawArrays(gl.TRIANGLES, 0, 6);
};
init();
requestAnimationFrame(function(){
renderToTexture(gl, arguments[0]);
//renderToScreen(gl);
requestAnimationFrame( arguments.callee );
});
canvas { border: 1px solid black; }
body { background-color: darkslategrey }
<script src="http://greggman.github.com/webgl-fundamentals/webgl/resources/webgl-utils.js"></script>
<script id="vshader-init" type="vs/shader">
attribute vec4 pos;
varying vec2 uv;
void main() {
gl_Position = pos;
uv = pos.xy * .5 + .5;
}
</script>
<script id="fshader-init" type="fs/shader">
precision mediump float;
varying vec2 uv;
uniform float time;
void main() {
float t = floor(time / 1000.);
vec3 color;
color.x = sin(t*uv.x);
color.y = tan(t*uv.y);
color.z = cos(t);
gl_FragColor = vec4(color, 1.);
}
</script>
<script id="vshader-pers" type="vs/shader">
attribute vec4 a_position;
varying vec2 v_texcoord;
void main() {
gl_Position = a_position;
v_texcoord = a_position.xy * .5 + .5;
}
</script>
<script id="fshader-pers" type="fs/shader">
precision mediump float;
varying vec2 v_texcoord;
uniform sampler2D u_sampler;
void main() {
gl_FragColor = texture2D(u_sampler, v_texcoord);
}
</script>
<body>
<canvas id="c" width="400" height="400"></canvas>
</body>
I do not know much about how it is supposed to work and I am a bit into the dark. I am sure I'm missing something important, but I can't find anywhere what. Any help will be appreciated.
Your framebuffer texture is 1x1 pixels big. What do you expect to see? You're rendering a single pixel.
Also note that unless your framebuffer is the same size as the canvas you'll want to call gl.viewport and set it to the size of the thing you rendering after each call to gl.bindFramebuffer
You probably also want to not create the texture every frame. Here's your code with those things changed.
var canvas = document.getElementById("c");
var gl = canvas.getContext("webgl");
var program_init, program_pers, tex, fb;
var fbWidth = 400;
var fbHeight = 300;
function init() {
var verts = [
1, 1,
-1, 1,
-1, -1,
1, 1,
-1, -1,
1, -1,
];
var vertBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, vertBuffer);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(verts), gl.STATIC_DRAW);
gl.vertexAttribPointer(0, 2, gl.FLOAT, false, 0, 0);
gl.enableVertexAttribArray(0);
tex = gl.createTexture();
fb = gl.createFramebuffer();
program_init = webglUtils.createProgramFromScripts(gl, ["vshader-init", "fshader-init"], ["pos"]);
program_pers = webglUtils.createProgramFromScripts(gl, ["vshader-pers", "fshader-pers"], ["a_position"]);
gl.bindTexture(gl.TEXTURE_2D, tex);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, fbWidth, fbHeight, 0, gl.RGBA, gl.UNSIGNED_BYTE, null);
// make non power-of-2 texture renderable
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
gl.bindFramebuffer(gl.FRAMEBUFFER, fb);
gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, tex, 0);
}
function renderToTexture(gl, time) {
gl.bindFramebuffer(gl.FRAMEBUFFER, fb);
gl.viewport(0, 0, fbWidth, fbHeight);
gl.useProgram(program_init);
var timeLocation = gl.getUniformLocation(program_init, 'time');
gl.uniform1f(timeLocation, time);
gl.drawArrays(gl.TRIANGLES, 0, 6);
};
function renderToScreen(gl) {
gl.bindFramebuffer(gl.FRAMEBUFFER, null);
gl.viewport(0, 0, gl.drawingBufferWidth, gl.drawingBufferHeight);
gl.useProgram(program_pers);
gl.drawArrays(gl.TRIANGLES, 0, 6);
};
init();
requestAnimationFrame(function(){
renderToTexture(gl, arguments[0]);
renderToScreen(gl);
requestAnimationFrame( arguments.callee );
});
canvas { border: 1px solid black; }
body { background-color: darkslategrey }
<script src="https://webglfundamentals.org/webgl/resources/webgl-utils.js"></script>
<script id="vshader-init" type="vs/shader">
attribute vec4 pos;
varying vec2 uv;
void main() {
gl_Position = pos;
uv = pos.xy * .5 + .5;
}
</script>
<script id="fshader-init" type="fs/shader">
precision mediump float;
varying vec2 uv;
uniform float time;
void main() {
float t = floor(time / 1000.);
vec3 color;
color.x = sin(t*uv.x);
color.y = tan(t*uv.y);
color.z = cos(t);
gl_FragColor = vec4(color, 1.);
}
</script>
<script id="vshader-pers" type="vs/shader">
attribute vec4 a_position;
varying vec2 v_texcoord;
void main() {
gl_Position = a_position;
v_texcoord = a_position.xy * .5 + .5;
}
</script>
<script id="fshader-pers" type="fs/sahder">
precision mediump float;
varying vec2 v_texcoord;
uniform sampler2D u_sampler;
void main() {
gl_FragColor = texture2D(u_sampler, v_texcoord);
}
</script>
<body>
<canvas id="c" width="400" height="400"></canvas>
</body>
Related
I'm busy transferring some code from OpenGL to WebGL2 (to do duel depth peeling) but I'm getting a warning in my console that I cannot make sense of and the output is just black.
I've gone through the process of drawing some of the buffers individually and discovered the warning only appears inside the loop for (var p = 1; p < numPasses; p++) when I do the geometry passes.
The shaders I based mine off also made extensive use of gl_NormalMatrix, gl_ModelViewMatrix, and gl_Vertex that I think could also be a result of the black output. I assume just replacing gl_ModelViewMatrix * gl_Vertex with uProjMatrix * uViewMatrix * uModelMatrix * vec4(inVertexPosition, 1.0); will yield the same result.
(function() {
var script = document.createElement("script");
script.onload = function() {
main();
};
script.src = "https://mdn.github.io/webgl-examples/tutorial/gl-matrix.js";
document.head.appendChild(script);
})();
var initShader, peelShader, blendShader, finalShader;
var accumTex0, accumTex1;
var backBlenderFBO, peelingSingleFBO;
var depthTex = [], frontBlenderTex = [], backTempTex = [], backBlenderTex = [];
var quadVAO;
var drawBuffers;
function main() {
const canvas = document.querySelector("#glcanvas");
const gl = canvas.getContext("webgl2", { alpha: false });
if (!gl) {
alert("Unable to initialize WebGL. Your browser or machine may not support it.");
return;
}
var ext = gl.getExtension("EXT_color_buffer_float");
if (!ext) { alert("Unable to initialize WebGL. Your browser or machine may not support it."); return; }
quadVAO = newMesh(gl);
drawBuffers = [gl.COLOR_ATTACHMENT0, gl.COLOR_ATTACHMENT1, gl.COLOR_ATTACHMENT2, gl.COLOR_ATTACHMENT3, gl.COLOR_ATTACHMENT4, gl.COLOR_ATTACHMENT5, gl.COLOR_ATTACHMENT6];
// Dual Peeling Render Targets
backBlenderTex = newTexture(gl, gl.TEXTURE_2D, gl.RGBA32F, 640, 480, gl.RGBA, gl.FLOAT, null);
backBlenderFBO = newFramebuffer(gl, [backBlenderTex]);
depthTex[0] = newTexture(gl, gl.TEXTURE_2D, gl.RG32F, 640, 480, gl.RG, gl.FLOAT, null);
frontBlenderTex[0] = newTexture(gl, gl.TEXTURE_2D, gl.RGBA32F, 640, 480, gl.RGBA, gl.FLOAT, null);
backTempTex[0] = newTexture(gl, gl.TEXTURE_2D, gl.RGBA32F, 640, 480, gl.RGBA, gl.FLOAT, null);
depthTex[1] = newTexture(gl, gl.TEXTURE_2D, gl.RG32F, 640, 480, gl.RG, gl.FLOAT, null);
frontBlenderTex[1] = newTexture(gl, gl.TEXTURE_2D, gl.RGBA32F, 640, 480, gl.RGBA, gl.FLOAT, null);
backTempTex[1] = newTexture(gl, gl.TEXTURE_2D, gl.RGBA32F, 640, 480, gl.RGBA, gl.FLOAT, null);
peelingSingleFBO = newFramebuffer(gl, [depthTex[0], frontBlenderTex[0], backTempTex[0], depthTex[1], frontBlenderTex[1], backTempTex[1], backBlenderTex]);
bindFramebuffer(gl, null);
initShader = newShader(gl, vsInitSource, fsInitSource);
peelShader = newShader(gl, vsPeelSource, fsPeelSource);
blendShader = newShader(gl, vsBlendSource, fsBlendSource);
finalShader = newShader(gl, vsFinalSource, fsFinalSource);
gl.disable(gl.CULL_FACE);
draw(gl);
}
// See below link to make sense of this function
// https://stackoverflow.com/questions/37381980/get-some-trounble-when-using-drawbuffers-in-webgl2
function getDrawBuffers(gl, ...idx) {
var buffers = [gl.NONE, gl.NONE, gl.NONE, gl.NONE, gl.NONE, gl.NONE, gl.NONE];
for (var i = 0; i < idx.length; i++) {
if (i == idx[i]) buffers[i] = drawBuffers[i];
}
return buffers;
}
function draw(gl) {
// setup MVP
const proj = mat4.create();
const cameraSize = 0.2;
mat4.ortho(proj, 0.0, 1.0, 0.0, 1.0, 0.0001, 10.0);
const view = mat4.create();
mat4.lookAt(view, [0, 0, 2], [0, 0, 0], [0, 1, 0]);
gl.disable(gl.DEPTH_TEST);
gl.enable(gl.BLEND);
bindFramebuffer(gl, peelingSingleFBO);
gl.drawBuffers(getDrawBuffers(gl, 1, 2));
gl.clearColor(0, 0, 0, 0);
gl.clear(gl.COLOR_BUFFER_BIT);
const maxDepth = 1;
gl.drawBuffers(getDrawBuffers(gl, 0));
gl.clearColor(-maxDepth, -maxDepth, 0, 0);
gl.clear(gl.COLOR_BUFFER_BIT);
gl.blendEquation(gl.MAX);
// init
// bindFramebuffer(gl, null); // to test with
// gl.drawBuffers([gl.BACK]); // to test with
gl.useProgram(initShader);
drawMesh(gl, initShader, proj, view, { x: 0.0, y: 0.0, z: 0.0 }, { r: 1.0, g: 0.0, b: 0.0, a: 1.0 });
gl.useProgram(null);
// return; // to test with
// peeling & blending
gl.drawBuffers(getDrawBuffers(gl, 6));
var backgroundColor = [1, 1, 1];
gl.clearColor(backgroundColor[0], backgroundColor[1], backgroundColor[2], 0);
gl.clear(gl.COLOR_BUFFER_BIT);
// return; // to test with
// for each pass
var numPasses = 4;
var currID = 0;
for (var p = 1; p < numPasses; p++) {
currID = p % 2;
var prevID = 1 - currID;
var bufID = currID * 3;
gl.drawBuffers(getDrawBuffers(gl, bufID + 1, bufID + 2));
gl.clearColor(0, 0, 0, 0);
gl.clear(gl.COLOR_BUFFER_BIT);
gl.drawBuffers(getDrawBuffers(gl, bufID));
gl.clearColor(-maxDepth, -maxDepth, 0, 0);
gl.clear(gl.COLOR_BUFFER_BIT);
// all three blending render targets
gl.drawBuffers(getDrawBuffers(gl, bufID, bufID + 1, bufID + 2));
gl.blendEquation(gl.MAX);
gl.useProgram(peelShader);
bindTexture(gl, gl.TEXTURE0, gl.TEXTURE_2D, depthTex[prevID]); // DepthBlenderTex
bindTexture(gl, gl.TEXTURE1, gl.TEXTURE_2D, frontBlenderTex[prevID]); // FrontBlenderTex
gl.uniform1f(gl.getUniformLocation(peelShader, "uAlpha"), 0.6);
drawMesh(gl, peelShader, proj, view, { x: 0.0, y: 0.0, z: 0.0 }, { r: 1.0, g: 0.0, b: 0.0, a: 1.0 });
gl.useProgram(null);
// alpha blend the back color
gl.drawBuffers(getDrawBuffers(gl, 6));
gl.blendEquation(gl.FUNC_ADD);
gl.blendFunc(gl.SRC_ALPHA, gl.ONE_MINUS_SRC_ALPHA);
gl.useProgram(blendShader);
bindTexture(gl, gl.TEXTURE0, gl.TEXTURE_2D, backTempTex[currID]); // TempTex
drawFullscreenQuad(gl);
gl.useProgram(null);
}
gl.disable(gl.BLEND);
// final pass
bindFramebuffer(gl, null);
gl.drawBuffers([gl.BACK]);
gl.useProgram(finalShader);
bindTexture(gl, gl.TEXTURE0, gl.TEXTURE_2D, depthTex[currID]); // DepthBlenderTex
bindTexture(gl, gl.TEXTURE1, gl.TEXTURE_2D, frontBlenderTex[currID]); // FrontBlenderTex
bindTexture(gl, gl.TEXTURE2, gl.TEXTURE_2D, backBlenderTex); // BackBlenderTex
drawFullscreenQuad(gl);
gl.useProgram(null);
}
function newShader(gl, vsSource, fsSource) {
const vertexShader = loadSource(gl, gl.VERTEX_SHADER, vsSource);
const fragmentShader = loadSource(gl, gl.FRAGMENT_SHADER, fsSource);
const shaderProgram = gl.createProgram();
gl.attachShader(shaderProgram, vertexShader);
gl.attachShader(shaderProgram, fragmentShader);
gl.linkProgram(shaderProgram);
if (!gl.getProgramParameter(shaderProgram, gl.LINK_STATUS)) {
alert("Unable to initialize the shader program: " + gl.getProgramInfoLog(shaderProgram));
return null;
}
return shaderProgram;
}
function loadSource(gl, type, source) {
const shader = gl.createShader(type);
gl.shaderSource(shader, source);
gl.compileShader(shader);
if (!gl.getShaderParameter(shader, gl.COMPILE_STATUS)) {
alert("An error occurred compiling the shaders: " + gl.getShaderInfoLog(shader));
gl.deleteShader(shader);
return null;
}
return shader;
}
function newMesh(gl) {
var vertices = [1.0, 1.0, 1.0, -1.0, -1.0, -1.0, -1.0, 1.0];
var indicies = [0, 1, 3, 1, 2, 3];
const vao = gl.createVertexArray();
gl.bindVertexArray(vao);
const vb = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, vb);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(vertices), gl.STATIC_DRAW);
const eb = gl.createBuffer();
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, eb);
gl.bufferData(
gl.ELEMENT_ARRAY_BUFFER,
new Uint16Array(indicies),
gl.STATIC_DRAW
);
gl.vertexAttribPointer(0, 2, gl.FLOAT, false, 2 * 4, 0);
gl.enableVertexAttribArray(null);
gl.bindVertexArray(null);
return vao;
}
function drawFullscreenQuad(gl) {
gl.bindVertexArray(quadVAO);
gl.drawElements(gl.TRIANGLES, 6, gl.UNSIGNED_SHORT, 0);
gl.bindVertexArray(null);
}
function drawMesh(gl, prog, proj, view, pos, col) {
gl.uniformMatrix4fv(gl.getUniformLocation(prog, "uProjMatrix"), false, proj);
gl.uniformMatrix4fv(gl.getUniformLocation(prog, "uViewMatrix"), false, view);
gl.bindVertexArray(quadVAO);
const model = mat4.create();
var trans = vec3.create();
vec3.set(trans, pos.x, pos.y, pos.z);
mat4.translate(model, model, trans);
gl.uniform4fv(gl.getUniformLocation(prog, "uColor"), [col.r, col.g, col.b, col.a]);
gl.uniformMatrix4fv(gl.getUniformLocation(prog, "uModelMatrix"), false, model);
gl.drawElements(gl.TRIANGLES, 6, gl.UNSIGNED_SHORT, 0);
gl.bindVertexArray(null);
}
function newTexture(gl, target, internalFormat, height, width, format, type, pixels) {
var tid = gl.createTexture();
gl.bindTexture(target, tid);
gl.texParameteri(target, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(target, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texParameteri(target, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
gl.texParameteri(target, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
gl.texImage2D(target, 0, internalFormat, width, height, 0, format, type, pixels);
return tid;
}
function bindTexture(gl, idx, target, id) {
gl.activeTexture(idx);
gl.bindTexture(target, id);
// wait should I be doing glUniforml1(id, ...) here?
}
function newFramebuffer(gl, colorAttachments) {
var fib = gl.createFramebuffer();
gl.bindFramebuffer(gl.FRAMEBUFFER, fib);
for (var i = 0; i < colorAttachments.length; i++) {
gl.framebufferTexture2D(
gl.FRAMEBUFFER,
drawBuffers[i],
gl.TEXTURE_2D,
colorAttachments[i],
0
);
}
if (gl.checkFramebufferStatus(gl.FRAMEBUFFER) !== gl.FRAMEBUFFER_COMPLETE) {
alert(gl.checkFramebufferStatus(gl.FRAMEBUFFER).toString(16));
}
return fib;
}
function bindFramebuffer(gl, fib) {
gl.bindFramebuffer(gl.FRAMEBUFFER, fib);
}
const vsInitSource = `#version 300 es
layout(location=0) in vec3 inVertexPosition;
uniform mat4 uModelMatrix;
uniform mat4 uViewMatrix;
uniform mat4 uProjMatrix;
void main(void) {
gl_Position = uProjMatrix * uViewMatrix * uModelMatrix * vec4(inVertexPosition, 1.0);
}`;
const fsInitSource = `#version 300 es
precision mediump float;
layout(location=0) out vec2 outColor;
void main(void) {
// This seems very important because it is based on the near/far values
// What is the correct value I can expect here?
outColor.xy = vec2(-gl_FragCoord.z, gl_FragCoord.z);
}`;
const vsPeelSource = `#version 300 es
layout(location=0) in vec3 inVertexPosition;
uniform mat4 uModelMatrix;
uniform mat4 uViewMatrix;
uniform mat4 uProjMatrix;
// I believe this is what gives the model the green and white stripes
// Not required?
// vec3 ShadeVertex() {
// float diffuse = abs(normalize(gl_NormalMatrix * gl_Normal).z);
// return vec3(gl_Vertex.xy, diffuse);
// }
void main(void) {
gl_Position = uProjMatrix * uViewMatrix * uModelMatrix * vec4(inVertexPosition, 1.0);
//gl_TexCoord[0].xyz = ShadeVertex();
}`;
const fsPeelSource = `#version 300 es
precision mediump float;
uniform float uAlpha;
#define COLOR_FREQ 30.0
#define ALPHA_FREQ 30.0
vec4 ShadeFragment() {
vec4 color;
color.rgb = vec3(.4,.85,.0);
color.a = uAlpha;
return color;
}
uniform sampler2D DepthBlenderTex;
uniform sampler2D FrontBlenderTex;
#define MAX_DEPTH 1.0
layout(location=0) out vec4 outFragData0;
layout(location=1) out vec4 outFragData1;
layout(location=2) out vec4 outFragData2;
void main(void) {
// window-space depth interpolated linearly in screen space
float fragDepth = gl_FragCoord.z;
vec2 depthBlender = texture(DepthBlenderTex, gl_FragCoord.xy).xy;
vec4 forwardTemp = texture(FrontBlenderTex, gl_FragCoord.xy);
// Depths and 1.0-alphaMult always increase
// so we can use pass-through by default with MAX blending
outFragData0.xy = depthBlender;
// Front colors always increase (DST += SRC*ALPHA_MULT)
// so we can use pass-through by default with MAX blending
outFragData1 = forwardTemp;
// Because over blending makes color increase or decrease,
// we cannot pass-through by default.
// Each pass, only one fragment writes a color greater than 0
outFragData2 = vec4(0.0);
float nearestDepth = -depthBlender.x;
float farthestDepth = depthBlender.y;
float alphaMultiplier = 1.0 - forwardTemp.w;
if (fragDepth < nearestDepth || fragDepth > farthestDepth) {
// Skip this depth in the peeling algorithm
outFragData0.xy = vec2(-MAX_DEPTH);
return;
}
if (fragDepth > nearestDepth && fragDepth < farthestDepth) {
// This fragment needs to be peeled again
outFragData0.xy = vec2(-fragDepth, fragDepth);
return;
}
// If we made it here, this fragment is on the peeled layer from last pass
// therefore, we need to shade it, and make sure it is not peeled any farther
vec4 color = ShadeFragment();
outFragData0.xy = vec2(-MAX_DEPTH);
if (fragDepth == nearestDepth) {
outFragData1.xyz += color.rgb * color.a * alphaMultiplier;
outFragData1.w = 1.0 - alphaMultiplier * (1.0 - color.a);
} else {
outFragData2 += color;
}
}`;
const vsBlendSource = `#version 300 es
layout(location=0) in vec3 inVertexPosition;
uniform mat4 uModelMatrix;
uniform mat4 uViewMatrix;
uniform mat4 uProjMatrix;
void main(void) {
gl_Position = uProjMatrix * uViewMatrix * uModelMatrix * vec4(inVertexPosition, 1.0);
}`;
const fsBlendSource = `#version 300 es
precision mediump float;
uniform sampler2D TempTex;
layout(location=0) out vec4 outColor;
void main(void) {
outColor = texture(TempTex, gl_FragCoord.xy);
// for occlusion query
if (outColor.a == 0.0) discard;
}`;
const vsFinalSource = `#version 300 es
layout(location=0) in vec3 inVertexPosition;
uniform mat4 uModelMatrix;
uniform mat4 uViewMatrix;
uniform mat4 uProjMatrix;
void main(void) {
gl_Position = uProjMatrix * uViewMatrix * uModelMatrix * vec4(inVertexPosition, 1.0);
}`;
const fsFinalSource = `#version 300 es
precision mediump float;
uniform sampler2D DepthBlenderTex;
uniform sampler2D FrontBlenderTex;
uniform sampler2D BackBlenderTex;
layout(location=0) out vec4 outColor;
void main(void)
{
vec4 frontColor = texture(FrontBlenderTex, gl_FragCoord.xy);
vec3 backColor = texture(BackBlenderTex, gl_FragCoord.xy).rgb;
float alphaMultiplier = 1.0 - frontColor.w;
// front + back
outColor.rgb = frontColor.rgb + backColor * alphaMultiplier;
// front blender
// outColor.rgb = frontColor.rgb + vec3(alphaMultiplier);
// back blender
// outColor.rgb = backColor;
}`;
<canvas id="glcanvas" width="640" height="480"></canvas>
Using the same Texture for reading and writing in the Framebuffer is not alowed GLSL (see Point 6.27 at https://www.khronos.org/registry/webgl/specs/latest/1.0/#6.26)
But you can create two FrameBuffers with seperate textures and swap then every Frame. So you can read from the last rendered and write to the second Texture and when done simply swap it.
I have a canvas with webgl. I initialize webgl, create a fragment and a vertex shader, two triangles to cover the whole and a texture.
const vertexShaderSource = `
attribute vec2 a_position;
attribute vec2 a_texCoord;
uniform vec2 u_resolution;
varying vec2 v_texCoord;
void main() {
// convert the rectangle from pixels to 0.0 to 1.0
vec2 zeroToOne = a_position / u_resolution;
// convert from 0->1 to 0->2
vec2 zeroToTwo = zeroToOne * 2.0;
// convert from 0->2 to -1->+1 (clipspace)
vec2 clipSpace = zeroToTwo - 1.0;
gl_Position = vec4(clipSpace * vec2(1, -1), 0, 1);
// pass the texCoord to the fragment shader
// The GPU will interpolate this value between points.
v_texCoord = a_texCoord;
}
`;
const fragmentShaderSource = `
precision mediump float;
// our texture
uniform sampler2D u_image;
// the texCoords passed in from the vertex shader.
varying vec2 v_texCoord;
void main() {
gl_FragColor = texture2D(u_image, v_texCoord);
}
`;
I create the texture from a ImageData object whose elements are randomly chosen (each pixel a different color). When I apply the texture, it seems to cover the whole area with the upper-left pixel color stretching to cover the whole canvas.
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, IMAGEDATA);
This is the code I have so far:
const width = 128;
const height = 128;
var createShader = function (gl, shaderSource, shaderType) {
const shader = gl.createShader(shaderType);
gl.shaderSource(shader, shaderSource);
gl.compileShader(shader);
const compiled = gl.getShaderParameter(shader, gl.COMPILE_STATUS);
if (!compiled) {
console.log(`Error compiling shader: ${gl.getShaderInfoLog(shader)}`);
return;
}
return shader;
};
var createProgram = function(gl, shaders, opt_attribs, opt_locations) {
const program = gl.createProgram();
for (var ii = 0; ii < shaders.length; ++ii) {
gl.attachShader(program, shaders[ii]);
}
if (opt_attribs) {
for (var ii = 0; ii < opt_attribs.length; ++ii) {
gl.bindAttribLocation(program, opt_locations ? opt_locations[ii] : ii, opt_attribs[ii]);
}
}
gl.linkProgram(program);
// Check the link status
var linked = gl.getProgramParameter(program, gl.LINK_STATUS);
if (!linked) {
console.log(`Error linking shader parameter: ${gl.getProgramInfoLog(program)}`);
gl.deleteProgram(program);
return;
}
return program;
}
// create canvas
const canvas = document.createElement('canvas');
canvas.width = width;
canvas.height = height;
// imagedata to update the texture
const imageData = {
imageData: new ImageData(width, height),
dataBuf: null,
dataBuf8: null,
data: null
};
imageData.dataBuf = new ArrayBuffer(imageData.imageData.data.length);
imageData.dataBuf8 = new Uint8ClampedArray(imageData.dataBuf);
imageData.data = new Uint32Array(imageData.dataBuf);
const gl = canvas.getContext('webgl') || canvas.getContext('experimental-webgl');
document.getElementById('container').appendChild(canvas);
// shaders
const vertexShaderSource = `
attribute vec2 a_position;
attribute vec2 a_texCoord;
uniform vec2 u_resolution;
varying vec2 v_texCoord;
void main() {
// convert the rectangle from pixels to 0.0 to 1.0
vec2 zeroToOne = a_position / u_resolution;
// convert from 0->1 to 0->2
vec2 zeroToTwo = zeroToOne * 2.0;
// convert from 0->2 to -1->+1 (clipspace)
vec2 clipSpace = zeroToTwo - 1.0;
gl_Position = vec4(clipSpace * vec2(1, -1), 0, 1);
// pass the texCoord to the fragment shader
// The GPU will interpolate this value between points.
v_texCoord = a_texCoord;
}
`;
const fragmentShaderSource = `
precision mediump float;
// our texture
uniform sampler2D u_image;
// the texCoords passed in from the vertex shader.
varying vec2 v_texCoord;
void main() {
gl_FragColor = texture2D(u_image, v_texCoord);
}
`;
const vertexShader = createShader(gl, vertexShaderSource, gl.VERTEX_SHADER);
const fragmentShader = createShader(gl, fragmentShaderSource, gl.FRAGMENT_SHADER);
const program = createProgram(gl, [vertexShader, fragmentShader]);
gl.useProgram(program);
// triangle and triangle buffers
var positionLocation = gl.getAttribLocation(program, "a_position");
var resolutionLocation = gl.getUniformLocation(program, "u_resolution");
gl.uniform2f(resolutionLocation, canvas.width, canvas.height);
const buffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, buffer);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([
0, 0,
canvas.width, 0,
0, canvas.height,
0, canvas.height,
canvas.width, canvas.height,
canvas.width, 0]), gl.STATIC_DRAW);
gl.enableVertexAttribArray(positionLocation);
gl.vertexAttribPointer(positionLocation, 2, gl.FLOAT, false, 0, 0);
var texCoordLocation = gl.getUniformLocation(program, "a_texCoord");
var texCoordBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, texCoordBuffer);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([
0, 0,
canvas.width, 0,
0, canvas.height,
0, canvas.height,
canvas.width, canvas.height,
canvas.width, 0]), gl.STATIC_DRAW);
gl.enableVertexAttribArray(texCoordLocation);
gl.vertexAttribPointer(texCoordLocation, 2, gl.FLOAT, false, 0, 0);
// texture
const texture = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
for (let i = 0; i < imageData.data.length; i++) {
const r = Math.round(Math.random() * 100);
const g = Math.round(Math.random() * 100);
const b = Math.round(Math.random() * 100);
imageData.data[i] = (255 << 24) | // alpha
(b << 16) | // blue
(g << 8) | // green
r; //
}
imageData.imageData.data.set(imageData.dataBuf8);
// bind texture and draw
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, imageData.imageData);
gl.drawArrays(gl.TRIANGLES, 0, 6);
<div id="container"></div>
How can I correctly place the texture?
Just two errors, Wrong location type for a_texCoords and wrong texture coords should be in the range 0 to 1.
See code below for corrections.
const width = 128;
const height = 128;
var createShader = function(gl, shaderSource, shaderType) {
const shader = gl.createShader(shaderType);
gl.shaderSource(shader, shaderSource);
gl.compileShader(shader);
const compiled = gl.getShaderParameter(shader, gl.COMPILE_STATUS);
if (!compiled) {
console.log(`Error compiling shader: ${gl.getShaderInfoLog(shader)}`);
return;
}
return shader;
};
var createProgram = function(gl, shaders, opt_attribs, opt_locations) {
const program = gl.createProgram();
for (var ii = 0; ii < shaders.length; ++ii) {
gl.attachShader(program, shaders[ii]);
}
if (opt_attribs) {
for (var ii = 0; ii < opt_attribs.length; ++ii) {
gl.bindAttribLocation(program, opt_locations ? opt_locations[ii] : ii, opt_attribs[ii]);
}
}
gl.linkProgram(program);
// Check the link status
var linked = gl.getProgramParameter(program, gl.LINK_STATUS);
if (!linked) {
console.log(`Error linking shader parameter: ${gl.getProgramInfoLog(program)}`);
gl.deleteProgram(program);
return;
}
return program;
}
// create canvas
const canvas = document.createElement('canvas');
canvas.width = width;
canvas.height = height;
// imagedata to update the texture
const imageData = {
imageData: new ImageData(width, height),
dataBuf: null,
dataBuf8: null,
data: null
};
imageData.dataBuf = new ArrayBuffer(imageData.imageData.data.length);
imageData.dataBuf8 = new Uint8ClampedArray(imageData.dataBuf);
imageData.data = new Uint32Array(imageData.dataBuf);
const gl = canvas.getContext('webgl') || canvas.getContext('experimental-webgl');
document.getElementById('container').appendChild(canvas);
// shaders
const vertexShaderSource = `
attribute vec2 a_position;
attribute vec2 a_texCoord;
uniform vec2 u_resolution;
varying vec2 v_texCoord;
void main() {
// convert the rectangle from pixels to 0.0 to 1.0
vec2 zeroToOne = a_position / u_resolution;
// convert from 0->1 to 0->2
vec2 zeroToTwo = zeroToOne * 2.0;
// convert from 0->2 to -1->+1 (clipspace)
vec2 clipSpace = zeroToTwo - 1.0;
gl_Position = vec4(clipSpace * vec2(1, -1), 0, 1);
// pass the texCoord to the fragment shader
// The GPU will interpolate this value between points.
v_texCoord = a_texCoord;
}
`;
const fragmentShaderSource = `
precision mediump float;
// our texture
uniform sampler2D u_image;
// the texCoords passed in from the vertex shader.
varying vec2 v_texCoord;
void main() {
gl_FragColor = texture2D(u_image, v_texCoord);
}
`;
const vertexShader = createShader(gl, vertexShaderSource, gl.VERTEX_SHADER);
const fragmentShader = createShader(gl, fragmentShaderSource, gl.FRAGMENT_SHADER);
const program = createProgram(gl, [vertexShader, fragmentShader]);
gl.useProgram(program);
// triangle and triangle buffers
var positionLocation = gl.getAttribLocation(program, "a_position");
var resolutionLocation = gl.getUniformLocation(program, "u_resolution");
gl.uniform2f(resolutionLocation, canvas.width, canvas.height);
const buffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, buffer);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([
0, 0,
canvas.width, 0,
0, canvas.height,
0, canvas.height,
canvas.width, canvas.height,
canvas.width, 0
]), gl.STATIC_DRAW);
gl.enableVertexAttribArray(positionLocation);
gl.vertexAttribPointer(positionLocation, 2, gl.FLOAT, false, 0, 0);
// You had the wrong location type getUniformLocation a_exCoord is an attribute
var texCoordLocation = gl.getAttribLocation(program, "a_texCoord");
var texCoordBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, texCoordBuffer);
// You had the wrong texture coords.Coords are in the range 0 to 1
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([
0, 0,
1, 0,
0, 1,
0, 1,
1, 1,
1, 0
]), gl.STATIC_DRAW);
gl.enableVertexAttribArray(texCoordLocation);
gl.vertexAttribPointer(texCoordLocation, 2, gl.FLOAT, false, 0, 0);
// texture
const texture = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
// changed this as I thought you where indexing a 8bit array then saw you
// correctly index a 32 bit array. Id did not change the code back but yours was
// correct as well.
for (let i = 0; i < imageData.dataBuf8.length; i += 4) {
imageData.dataBuf8[i] = Math.random() * 255;; // red
imageData.dataBuf8[i + 1] = Math.random() * 255; // green
imageData.dataBuf8[i + 2] = Math.random() * 255; // blue
imageData.dataBuf8[i + 3] = Math.random() * 255; // alpha //
}
imageData.imageData.data.set(imageData.dataBuf8);
// bind texture and draw
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, imageData.imageData);
gl.drawArrays(gl.TRIANGLES, 0, 6);
canvas {
border: 2px solid black;
}
<div id="container"></div>
I'm attempting to add video as texture in WebGL. The project is simple and only involves one object. The code I have works fine in Firefox but claims there is no video when loaded in Chrome. The ERROR seems to be that the video is not starting in Chrome for some reason even though I have a function to start the video:
function startVideo() {
videoElement.play();
intervalID = setInterval(draw, 15);
}
as well as
videoElement.preload = "auto";
tied to my video objectin JS.
Additionally, I also have
<video id="video" src="Firefox.ogv" autoplay muted>
Your browser doesn't appear to support the <code><video></code> element.
</video>
just to be sure, but it is not loadng the texture. The error is being referenced in this function for teximage2D:
function updateTexture() {
gl.bindTexture(gl.TEXTURE_2D, modelTexture);
gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, true);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, videoElement);
}
Subsequently, I also get the error:
RENDER WARNING: texture bound to texture unit 0 is not renderable. It maybe non-power-of-2 and have incompatible texture filtering.
I am using this tutorial combined with my own implemenation :
https://developer.mozilla.org/en-US/docs/Web/API/WebGL_API/Tutorial/Animating_textures_in_WebGL
My JS and HTML are located here:
https://github.com/TacoB0t/CSC43/tree/prog/prog5
Everything mostly takes place in the init() and draw() function of the JS
HTML:
<html>
<head>
<script type="text/javascript" src="webgl-utils.js"></script>
<script type="text/javascript" src="webgl-debug.js"></script>
<script type="text/javascript" src="cuon-utils.js"></script>
<script type="text/javascript" src="cuon-matrix.js"></script>
<script type="text/javascript" src="prog5.js"></script>
<script type="text/javascript" src="chest.js"></script>
<script type="text/javascript" src="cube.js"></script>
</head>
<body onload="init()">
<script id="vertexShader" type="x-shader/x-vertex">
precision mediump float;
uniform mat4 modelMatrix;
uniform mat4 viewMatrix;
uniform mat4 projectionMatrix;
uniform vec4 lightPosition;
attribute vec4 vertexPosition;
attribute vec3 vertexNormal;
attribute vec2 vertexTexCoord;
varying vec3 fragmentNormal;
varying vec3 fragmentLight;
varying vec3 fragmentView;
varying vec4 fragmentPosition;
varying vec2 fragmentTexCoord;
void main() {
mat4 modelViewMatrix = viewMatrix * modelMatrix;
vec4 p = modelViewMatrix * vertexPosition;
vec4 q = viewMatrix * lightPosition;
fragmentPosition = vertexPosition;
fragmentNormal = normalize(mat3(modelViewMatrix) * vertexNormal);
fragmentLight = normalize(vec3(q - p));
fragmentView = normalize(vec3(-p));
fragmentTexCoord = vertexTexCoord;
gl_Position = projectionMatrix * modelViewMatrix * vertexPosition;
}
</script>
<script id="lightingFragmentShader" type="x-shader/x-fragment">
precision mediump float;
varying vec3 fragmentNormal;
varying vec3 fragmentLight;
varying vec3 fragmentView;
varying vec4 fragmentPosition;
varying vec2 fragmentTexCoord;
uniform sampler2D modelTexture;
uniform vec3 modelColor;
uniform vec3 lightColor;
void main() {
vec3 n = normalize(fragmentNormal);
vec3 l = normalize(fragmentLight);
vec3 v = normalize(fragmentView);
vec3 h = normalize(l + v);
vec4 modelColor = texture2D(modelTexture, fragmentTexCoord);
float d = max(dot(l,n) , 0.0);
float s = pow(max(dot(h, n), 0.0), 10.0);
vec3 fragmentColor = vec3(modelColor) * lightColor * d + lightColor * s;
gl_FragColor = vec4(fragmentColor, 1.0);
}
</script>
<center>
<canvas id="webgl" width="500px" height="500px">
This content requires WebGL
</canvas>
<font face ="Arial">
<br>
Light Source Position
<br>
X-AXIS<input id="x-light" type="range" min="-5.0" max="5.0" value="0" step="0.1" oninput="refresh()">
<br>
Y-AXIS <input id="y-light" type="range" min="-5.0" max="5.0" value="0" step="0.1" oninput="refresh()">
<br>
Z-AXIS<input id="z-light" type="range" min="-5.0" max="5.0" value="0" step="0.1" oninput="refresh()">
</font>
</center>
<video id="video" src="Firefox.ogv" autoplay muted style="display: none;">
Your browser doesn't appear to support the <code><video></code> element.
</video>
</body>
</html>
JS:
var gl;
var canvas;
var dragging = false;
var texShader;
var chestModel;
var xValue = 0;
var yValue = 0;
var zValue = 0;
var modelRotationX = 0;
var modelRotationY = 0;
var lastClientX;
var lastClientY;
var videoElement;
var modelTexture;
var copyVideo;
//refresh function used to request animation frame after moving slider in HTML
function refresh(){
xValue = document.getElementById("x-light").value;
yValue = document.getElementById("y-light").value;
zValue = document.getElementById("z-light").value;
requestAnimationFrame(draw);
}
//define 'flatten' function to flatten tables to single array
function flatten(a) {
return a.reduce(function (b, v) { b.push.apply(b, v); return b }, [])
}
//create tumble interaction functions to click and drag cube
function onmousedown(event){
dragging = true;
lastClientX = event.clientX;
lastClientY = event.clientY;
}
function onmouseup(event){
dragging = false;
}
/*using clientX and clientY derived from click event, use to create modelX and Y
rotation before passing to model matrices rotation transformations*/
function onmousemove(event){
//console.log(event.clientX, event.clientY);
if (dragging){
var dX = event.clientX - lastClientX;
var dY = event.clientY - lastClientY;
modelRotationY = modelRotationY + dX;
modelRotationX = modelRotationX + dY;
if (modelRotationX > 90.0){
modelRotationX = 90.0;
}
if (modelRotationX < -90.0){
modelRotationX = -90.0;
}
requestAnimationFrame(draw);
}
lastClientX = event.clientX;
lastClientY = event.clientY;
}
function startVideo() {
videoElement.play();
intervalID = setInterval(draw, 15);
}
function videoDone() {
clearInterval(intervalID);
}
//define Shader object constructor function
function Shader(vertexId, fragmentId){
this.program = createProgram(gl, document.getElementById( vertexId).text,
document.getElementById(fragmentId).text);
this.modelMatrixLocation = gl.getUniformLocation(this.program, 'modelMatrix');
this.viewMatrixLocation = gl.getUniformLocation(this.program, 'viewMatrix');
this.projectionMatrixLocation = gl.getUniformLocation(this.program, 'projectionMatrix');
this.vertexPositionLocation = gl.getAttribLocation(this.program, 'vertexPosition');
this.lightPositionLocation = gl.getUniformLocation(this.program, 'lightPosition');
this.modelColorLocation = gl.getUniformLocation(this.program, 'modelColor');
this.lightColorLocation = gl.getUniformLocation(this.program, 'lightColor');
this.vertexNormalLocation = gl.getAttribLocation(this.program, 'vertexNormal');
this.vertexTexCoordLocation = gl.getAttribLocation(this.program, 'vertexTexCoord');
gl.enableVertexAttribArray(this.vertexPositionLocation);
gl.enableVertexAttribArray(this.vertexNormalLocation);
gl.enableVertexAttribArray(this.vertexTexCoordLocation);
}
//define use() method for Shader objects
Shader.prototype.use = function(projectionMatrix, modelMatrix, viewMatrix){
gl.useProgram(this.program);
gl.uniformMatrix4fv(this.modelMatrixLocation, false, modelMatrix.elements);
gl.uniformMatrix4fv(this.viewMatrixLocation, false, viewMatrix.elements);
gl.uniformMatrix4fv(this.projectionMatrixLocation, false, projectionMatrix.elements);
gl.uniform4f(this.lightPositionLocation, xValue, yValue, zValue, 0.0);
gl.uniform3f(this.modelColorLocation, 0.6, 0.3, 0.2);
gl.uniform3f(this.lightColorLocation, 1.0, 1.0, 1.0);
}
//define Model object constructor function
function Model(positions, triangles, normals, texCoords){
//initialize buffer objects
this.positionBuffer = gl.createBuffer();
this.triangleBuffer = gl.createBuffer();
this.normalsBuffer = gl.createBuffer();
this.texCoordBuffer = gl.createBuffer();
//copy vertex data from array in CPU onto GPU
this.positionArray = new Float32Array(flatten(positions));
gl.bindBuffer(gl.ARRAY_BUFFER, this.positionBuffer);
gl.bufferData(gl.ARRAY_BUFFER, this.positionArray, gl.STATIC_DRAW);
//copy triangle data from array in CPU onto GPU
this.triangleArray = new Uint16Array(flatten(triangles));
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, this.triangleBuffer);
gl.bufferData(gl.ELEMENT_ARRAY_BUFFER, this.triangleArray, gl.STATIC_DRAW);
this.normalsArray = new Float32Array(flatten(normals));
gl.bindBuffer(gl.ARRAY_BUFFER, this.normalsBuffer);
gl.bufferData(gl.ARRAY_BUFFER, this.normalsArray, gl.STATIC_DRAW);
this.textCoordArray = new Float32Array(flatten(texCoords));
gl.bindBuffer(gl.ARRAY_BUFFER, this.texCoordBuffer);
gl.bufferData(gl.ARRAY_BUFFER, this.textCoordArray, gl.STATIC_DRAW);
}
//define draw() method for Model objects to bind barray buffers
Model.prototype.draw = function(shader){
gl.bindBuffer(gl.ARRAY_BUFFER, this.positionBuffer);
gl.vertexAttribPointer(shader.vertexPositionLocation, 3, gl.FLOAT, false, 0, 0);
gl.bindBuffer(gl.ARRAY_BUFFER, this.normalsBuffer);
gl.vertexAttribPointer(shader.vertexNormalLocation, 3, gl.FLOAT, false, 0, 0);
gl.bindBuffer(gl.ARRAY_BUFFER, this.texCoordBuffer);
gl.vertexAttribPointer(shader.vertexTexCoordLocation, 2, gl.FLOAT, false, 0, 0);
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, this.triangleBuffer);
gl.drawElements(gl.TRIANGLES, this.triangleArray.length, gl.UNSIGNED_SHORT, 0);
}
//initizlize texture object
function loadTexture(image, texture){
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
//requestAnimationFrame(draw);
}
function init(){
//initialize GL context
canvas = document.getElementById('webgl');
gl = getWebGLContext(canvas, false);
canvas.onmousedown = onmousedown;
canvas.onmouseup = onmouseup;
canvas.onmousemove = onmousemove;
//instantiate shader objects for each defined shader
texShader = new Shader('vertexShader', 'lightingFragmentShader');
//instantiate model objects for each model
chestModel = new Model(chest.positions, chest.triangles, chest.normals, chest.texCoords);
videoElement = document.getElementById("video");
modelTexture = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, modelTexture);
gl.texImage2D(
gl.TEXTURE_2D, 0, gl.RGBA, 1, 1, 0, gl.RGBA, gl.UNSIGNED_BYTE,
new Uint8Array([255, 255, 255, 255]));
videoElement.addEventListener("canplaythrough", startVideo, true);
videoElement.addEventListener("playing", function (){ copyVideo = true; }, true);
videoElement.addEventListener("ended", videoDone, true);
/* videoElement.onload = function() {
loadTexture(videoElement, modelTexture);
}*/
loadTexture(videoElement, modelTexture);
videoElement.crossOrigin = "anonymous";
videoElement.src = "Firefox.ogv";
gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, true);
gl.enable(gl.DEPTH_TEST);
//request animation frame
requestAnimationFrame(draw);
}
function draw(){
if (copyVideo) {
updateTexture();
}
//compose matrices for transformations
var viewMatrix = new Matrix4();
var projectionMatrix = new Matrix4();
viewMatrix.translate(0.0, 0.0, -1.8);
projectionMatrix.perspective(90, 1, 1, 10);
//set color and refresh rendering for canvas
gl.clearColor(0.5, 0.5, 0.5, 1.0);
gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
/*instantiate model matrices for each respective model
and draw models with applied shader*/
var chestModelMatrix = new Matrix4();
chestModelMatrix.rotate(modelRotationX, 1, 0, 0 );
chestModelMatrix.rotate(modelRotationY, 0, 1, 0 );
chestModelMatrix.translate(0.0, 0.0, 0.0, 0.0 );
//set uniform locations and apply shader to designated model
texShader.use(projectionMatrix, chestModelMatrix, viewMatrix);
chestModel.draw(texShader);
}
function updateTexture() {
gl.bindTexture(gl.TEXTURE_2D, modelTexture);
gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, true);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, videoElement);
}
I'm not an HTML video expert but this works for me. You need to wait for the playing event before you start copying the video to a texture
var copyVideo; // if true we can call gl.texImage2D
var video = document.createElement("video");
video.src = someVideoUrl;
video.addEventListener("playing", function() {
copyVideo = true;
}, true);
video.addEventListener("ended", function() {
video.currentTime = 0;
video.play();
}, true);
video.play();
Working example:
"use strict";
var m4 = twgl.m4;
var gl = document.getElementById("c").getContext("webgl");
var programInfo = twgl.createProgramInfo(gl, ["vs", "fs"]);
var bufferInfo = twgl.primitives.createCubeBufferInfo(gl, 2);
var camera = m4.identity();
var view = m4.identity();
var viewProjection = m4.identity();
var texture = twgl.createTexture(gl, {
src: [0, 0, 255],
format: gl.RGB,
min: gl.LINEAR,
wrap: gl.CLAMP_TO_EDGE,
});
var uniforms = {
u_texture: texture,
u_worldViewProjection: m4.identity(),
};
var copyVideo;
var video = document.createElement("video");
video.src = "https://webglsamples.org/color-adjust/sample-video.mp4";
video.crossOrigin = "*";
video.volume = 0; // sample video has bad audio
video.addEventListener("playing", function() {
copyVideo = true;
}, true);
video.addEventListener("ended", function() {
video.currentTime = 0;
video.play();
}, true);
video.addEventListener("error", function() {
console.log("could not play video");
}, true);
video.play();
function render(time) {
time *= 0.001;
twgl.resizeCanvasToDisplaySize(gl.canvas);
gl.viewport(0, 0, gl.canvas.width, gl.canvas.height);
gl.enable(gl.DEPTH_TEST);
gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
var radius = 5;
var orbitSpeed = time;
var projection = m4.perspective(30 * Math.PI / 180, gl.canvas.clientWidth / gl.canvas.clientHeight, 0.5, 100);
var eye = [Math.cos(orbitSpeed) * radius, 3, Math.sin(orbitSpeed) * radius];
var target = [0, 0, 0];
var up = [0, 1, 0];
if (copyVideo) {
gl.bindTexture(gl.TEXTURE_2D, texture)
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGB, gl.RGB, gl.UNSIGNED_BYTE, video);
}
m4.lookAt(eye, target, up, camera);
m4.inverse(camera, view);
m4.multiply(projection, view, viewProjection);
m4.copy(viewProjection, uniforms.u_worldViewProjection);
gl.useProgram(programInfo.program);
twgl.setBuffersAndAttributes(gl, programInfo, bufferInfo)
twgl.setUniforms(programInfo, uniforms);
twgl.drawBufferInfo(gl, bufferInfo);
requestAnimationFrame(render);
}
requestAnimationFrame(render);
body { margin: 0; }
#c { width: 100vw; height: 100vh; display: block; }
<script id="vs" type="notjs">
uniform mat4 u_worldViewProjection;
attribute vec4 position;
attribute vec2 texcoord;
varying vec2 v_texCoord;
void main() {
v_texCoord = texcoord;
gl_Position = u_worldViewProjection * position;
}
</script>
<script id="fs" type="notjs">
precision mediump float;
varying vec2 v_texCoord;
uniform sampler2D u_texture;
void main() {
gl_FragColor = texture2D(u_texture, v_texCoord);
}
</script>
<script src="https://twgljs.org/dist/2.x/twgl-full.min.js"></script>
<canvas id="c"></canvas>
I've been trying to learn some webgl, and basically have two programs where the rendered pixels get stored in a texture and then used in the other program's shader.
webgl.modTexture(gl, webgl.texture[1], webgl.frameBuffer[0]);
This will call and attempt to readpixels of a screen and put the modified data back into the texture to be used in the next program render routine.
All I get is a black screen becoming more transparent gradually, but there should be the u_image being seen also (since this is being rendered in the first program where the pixels are read with readpixels).
If I comment out the second program's rendering:
gl.useProgram(webgl.program);
gl.clearColor(0, 0.5, 0, 1);
gl.clear(gl.COLOR_BUFFER_BIT);
gl.drawArrays(gl.TRIANGLES, 0, 6);
Then it will just render u_image without any fading.
So I'm not sure what is the problem. Seems like maybe it isn't reading the pixels correctly? But then, why would the fading be working if that's the case? Is there something wrong with trying to use the two textures?
Hopefully someone can look at the code and see what might be the issue.
//webgl stuff
var webgl = new webglData();
function webglData(){
this.then = 0; //used for deltatime in rendering
this.request; //requestanimationframe, used when stopping/starting
this.canvas;
this.context;
this.div;
this.program;
this.cellProgram;
this.texture = [];
this.frameBuffer = [];
this.cellVShader = `
attribute vec2 aVertexPosition;
attribute vec2 a_position;
attribute vec2 a_texCoord;
uniform vec2 u_resolution;
varying vec2 v_texCoord;
varying vec2 v_NW;
varying vec2 v_N;
varying vec2 v_NE;
varying vec2 v_W;
varying vec2 v_E;
varying vec2 v_SW;
varying vec2 v_S;
varying vec2 v_SE;
vec2 getOffset( int x, int y){
vec2 v = floor(a_texCoord * u_resolution);
v.x += float(x), v.y += float(y);
v /= u_resolution;
return v;
}
void main() {
//v_texCoord = a_texCoord;
//gl_Position = vec4(aVertexPosition, 0.0, 1.0);
// convert the rectangle from pixels to 0.0 to 1.0
vec2 zeroToOne = a_position / u_resolution;
// convert from 0->1 to 0->2
vec2 zeroToTwo = zeroToOne * 2.0;
// convert from 0->2 to -1->+1 (clipspace)
vec2 clipSpace = zeroToTwo - 1.0;
gl_Position = vec4(clipSpace * vec2(1, -1), 0, 1);
// pass the texCoord to the fragment shader
// The GPU will interpolate this value between points.
v_texCoord = a_texCoord;
v_NW = getOffset( -1, 1);
v_N = getOffset( 0, 1);
v_NE = getOffset( 1, 1);
v_W = getOffset( -1, 0);
v_E = getOffset( 1, 0);
v_SW = getOffset( -1, -1);
v_S = getOffset( 0, -1);
v_SE = getOffset( 1, -1);
}
`;
this.cellFShader = `
#ifdef GL_ES
precision highp float;
#endif
uniform sampler2D u_canvas1;
uniform vec4 uColor;
// our texture
uniform sampler2D u_before;
uniform sampler2D u_after;
uniform sampler2D u_image;
// the texCoords passed in from the vertex shader.
varying vec2 v_texCoord;
varying vec2 v_NW;
varying vec2 v_N;
varying vec2 v_NE;
varying vec2 v_W;
varying vec2 v_E;
varying vec2 v_SW;
varying vec2 v_S;
varying vec2 v_SE;
void main() {
// Look up a color from the texture.
gl_FragColor = texture2D(u_image, v_W);
//gl_FragColor = uColor;
}
`;
this.vertexShader = `
attribute vec2 aVertexPosition;
attribute vec2 a_position;
attribute vec2 a_texCoord;
uniform vec2 u_resolution;
varying vec2 v_texCoord;
void main() {
//v_texCoord = a_texCoord;
//gl_Position = vec4(aVertexPosition, 0.0, 1.0);
// convert the rectangle from pixels to 0.0 to 1.0
vec2 zeroToOne = a_position / u_resolution;
// convert from 0->1 to 0->2
vec2 zeroToTwo = zeroToOne * 2.0;
// convert from 0->2 to -1->+1 (clipspace)
vec2 clipSpace = zeroToTwo - 1.0;
gl_Position = vec4(clipSpace * vec2(1, -1), 0, 1);
// pass the texCoord to the fragment shader
// The GPU will interpolate this value between points.
v_texCoord = a_texCoord;
}
`;
this.fragmentShader = `
#ifdef GL_ES
precision highp float;
#endif
uniform sampler2D u_canvas1;
uniform vec4 uColor;
// our texture
uniform sampler2D u_before;
uniform sampler2D u_after;
uniform sampler2D u_image;
// the texCoords passed in from the vertex shader.
varying vec2 v_texCoord;
void main() {
// Look up a color from the texture.
gl_FragColor = texture2D(u_before, v_texCoord);
//gl_FragColor = uColor;
}
`;
this.init = function(){
this.div = innerDoc.getElementById('webglDiv');
gui.Window.get().showDevTools();
this.canvas = document.createElement('canvas');
this.context = this.canvas.getContext("experimental-webgl");
this.canvas.width = 512;
this.canvas.height = 512;
this.canvas.style.position = 'absolute';
this.canvas.style.zIndex = -1;
this.canvas.style.pointerEvents = 'none';
this.div.appendChild(this.canvas);
if(!this.context)return;
var gl = this.context;
gl.viewport(0, 0, this.canvas.width, this.canvas.height);
gl.clearColor(0, 0.5, 0, 1);
gl.clear(gl.COLOR_BUFFER_BIT);
//compile the shaders and create the program for webgl
var vs = gl.createShader(gl.VERTEX_SHADER);
gl.shaderSource(vs, this.vertexShader);
gl.compileShader(vs);
var fs = gl.createShader(gl.FRAGMENT_SHADER);
gl.shaderSource(fs, this.fragmentShader);
gl.compileShader(fs);
var cvs = gl.createShader(gl.VERTEX_SHADER);
gl.shaderSource(cvs, this.cellVShader);
gl.compileShader(cvs);
var cfs = gl.createShader(gl.FRAGMENT_SHADER);
gl.shaderSource(cfs, this.cellFShader);
gl.compileShader(cfs);
this.program = gl.createProgram();
gl.attachShader(this.program, vs);
gl.attachShader(this.program, fs);
gl.linkProgram(this.program);
//gl.useProgram(this.program);
this.cellProgram = gl.createProgram();
gl.attachShader(this.cellProgram, cvs);
gl.attachShader(this.cellProgram, cfs);
gl.linkProgram(this.cellProgram);
//gl.useProgram(this.cellProgram);
//output any errors
if (!gl.getShaderParameter(vs, gl.COMPILE_STATUS))console.log(gl.getShaderInfoLog(vs));
if (!gl.getShaderParameter(fs, gl.COMPILE_STATUS))console.log(gl.getShaderInfoLog(fs));
if (!gl.getProgramParameter(this.program, gl.LINK_STATUS))console.log(gl.getProgramInfoLog(this.program));
if (!gl.getShaderParameter(cvs, gl.COMPILE_STATUS))console.log(gl.getShaderInfoLog(cvs));
if (!gl.getShaderParameter(cfs, gl.COMPILE_STATUS))console.log(gl.getShaderInfoLog(cfs));
if (!gl.getProgramParameter(this.cellProgram, gl.LINK_STATUS))console.log(gl.getProgramInfoLog(this.cellProgram));
this.setupStuff(gl, this.program);
this.setupStuff(gl, this.cellProgram);
this.texture.push( this.setupTexture(tool.canvas, 0, "u_image") );
this.texture.push( this.createBlankTexture(gl, tool.canvas.width*tool.canvas.height*4, gl.RGBA, tool.canvas.width, tool.canvas.height, "u_before") );
this.texture.push( this.createBlankTexture(gl, tool.canvas.width*tool.canvas.height*4, gl.RGBA, tool.canvas.width, tool.canvas.height, "u_after") );
this.frameBuffer.push( gl.createFramebuffer() );
this.request = requestAnimationFrame(this.render);
}
this.render = function(now){
if(!webgl.context || config.tab!='scene'){cancelAnimationFrame(webgl.request); return;}
var gl = webgl.context;
// Convert the time to seconds
now *= 0.001;
// Subtract the previous time from the current time
var deltaTime = now - webgl.then;
// Remember the current time for the next frame.
webgl.then = now;
gl.useProgram(webgl.cellProgram);
gl.drawArrays(gl.TRIANGLES, 0, 6);
webgl.modTexture(gl, webgl.texture[1], webgl.frameBuffer[0]);
gl.useProgram(webgl.program);
gl.clearColor(0, 0.5, 0, 1);
gl.clear(gl.COLOR_BUFFER_BIT);
gl.drawArrays(gl.TRIANGLES, 0, 6);
this.request = requestAnimationFrame(webgl.render);
}
this.setupStuff = function(gl, program){
gl.useProgram(program);
// look up where the vertex data needs to go.
var positionLocation = gl.getAttribLocation(program, "a_position");
var texCoordLocation = gl.getAttribLocation(program, "a_texCoord");
// provide texture coordinates for the rectangle.
//this will be what the texture gets displayed on?
var texCoordBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, texCoordBuffer);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([
0.0, 0.0,
1.0, 0.0,
0.0, 1.0,
0.0, 1.0,
1.0, 0.0,
1.0, 1.0]), gl.STATIC_DRAW);
gl.enableVertexAttribArray(texCoordLocation);
gl.vertexAttribPointer(texCoordLocation, 2, gl.FLOAT, false, 0, 0);
// lookup uniforms
var resolutionLocation = gl.getUniformLocation(program, "u_resolution");
// set the resolution
gl.uniform2f(resolutionLocation, this.canvas.width, this.canvas.height);
// Create a buffer for the position of the rectangle corners.
// store the data for the texture coordinates that were defined above, into the a_position?
var buffer = gl.createBuffer();
//this.frameBuffer.push(buffer);
gl.bindBuffer(gl.ARRAY_BUFFER, buffer);
gl.enableVertexAttribArray(positionLocation);
gl.vertexAttribPointer(positionLocation, 2, gl.FLOAT, false, 0, 0);
// Set a rectangle the same size as the image.
//I guess this adjusts the buffer data that was just created?
this.setRectangle(gl, 0, 0, this.canvas.width, this.canvas.height);
//var tex2 = setupTexture(canvas2, 1, program, "u_canvas2");
// Draw the rectangle.
//gl.drawArrays(gl.TRIANGLES, 0, 6);
//gl.drawArrays(gl.TRIANGLES, 0, numItems);
}
this.refreshTexture = function(){
if(!this.context)return;
var gl = this.context;
gl.activeTexture(gl.TEXTURE0 + 0);
gl.bindTexture(gl.TEXTURE_2D, this.texture[0]);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, tool.canvas);
//gl.drawArrays(gl.TRIANGLES, 0, 6);
}
this.modTexture = function(gl, sTexture, framebuffer){
gl.bindFramebuffer(gl.FRAMEBUFFER, framebuffer);
gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, sTexture, 0);
if (gl.checkFramebufferStatus(gl.FRAMEBUFFER) == gl.FRAMEBUFFER_COMPLETE)
{
var sTextureSize = sTexture.width * sTexture.height * 4; // r, g, b, a
var pixels = new Uint8Array( sTextureSize );
gl.readPixels( 0, 0, sTexture.width, sTexture.height, gl.RGBA, gl.UNSIGNED_BYTE, pixels );
for( var i=0 ; i<sTextureSize ; i+=4 )
{
if( pixels[i+3] > 0 )
{
pixels[i+3] = Math.min( 255, pixels[i+3]*0.995 ); // set half alpha
}
}
// upload changes
gl.activeTexture(gl.TEXTURE0 + 1);
gl.bindTexture(gl.TEXTURE_2D, sTexture);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA,
sTexture.width, sTexture.height, 0,
gl.RGBA, gl.UNSIGNED_BYTE, pixels);
}
gl.bindFramebuffer(gl.FRAMEBUFFER, null);
}
this.setupFrameBuffer = function(canvas, textureUnit, program, uniformName) {
if(!this.context)return;
var gl = this.context;
var rttFramebuffer = gl.createFramebuffer();
gl.bindFramebuffer(gl.FRAMEBUFFER, rttFramebuffer);
rttFramebuffer.width = this.canvas.width;
rttFramebuffer.height = this.canvas.height;
var rttTexture = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, rttTexture);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR_MIPMAP_NEAREST);
gl.generateMipmap(gl.TEXTURE_2D);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, rttFramebuffer.width, rttFramebuffer.height, 0, gl.RGBA, gl.UNSIGNED_BYTE, null);
gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, rttTexture, 0);
gl.bindTexture(gl.TEXTURE_2D, null);
gl.bindRenderbuffer(gl.RENDERBUFFER, null);
gl.bindFramebuffer(gl.FRAMEBUFFER, null);
return rttFrameBuffer;
}
this.setupTexture = function(canvas, textureUnit, uniformName) {
var gl = this.context;
var tex = gl.createTexture();
this.updateTextureFromCanvas(tex, canvas, textureUnit);
tex.width = canvas.width;
tex.height= canvas.height;
// Set the parameters so we can render any size image.
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
gl.useProgram(this.program);
var location = gl.getUniformLocation(this.program, uniformName);
gl.uniform1i(location, textureUnit);
gl.useProgram(this.cellProgram);
location = gl.getUniformLocation(this.cellProgram, uniformName);
gl.uniform1i(location, textureUnit);
return tex;
}
this.updateTextureFromCanvas = function(tex, canvas, textureUnit) {
if(!this.context)return;
var gl = this.context;
gl.activeTexture(gl.TEXTURE0 + textureUnit);
gl.bindTexture(gl.TEXTURE_2D, tex);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, canvas);
}
this.createBlankTexture = function(gl, dataArray, type, width, height, uniformName) {
var dataTypedArray = new Uint8Array(dataArray); // Don't need to do this if the data is already in a typed array
for( var i=0 ; i<dataArray ; i+=4 )
{
dataTypedArray[i+3] = 255;
}
var texture = gl.createTexture();
texture.width = width;
texture.height= height;
gl.activeTexture(gl.TEXTURE0 + this.texture.length);
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.texImage2D(gl.TEXTURE_2D, 0, type, width, height, 0, type, gl.UNSIGNED_BYTE, dataTypedArray);
// Other texture setup here, like filter modes and mipmap generation
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
gl.useProgram(this.program);
var location = gl.getUniformLocation(this.program, uniformName);
gl.uniform1i(location, this.texture.length);
gl.useProgram(this.cellProgram);
location = gl.getUniformLocation(this.cellProgram, uniformName);
gl.uniform1i(location, this.texture.length);
return texture;
}
this.setRectangle = function(gl, x, y, width, height) {
var x1 = x;
var x2 = x + width;
var y1 = y;
var y2 = y + height;
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([
x1, y1,
x2, y1,
x1, y2,
x1, y2,
x2, y1,
x2, y2]), gl.STATIC_DRAW);
}
}
Okay I managed to get things working.
I had to learn what exactly needs to be called at different stages:
Do I have to create separate buffers per webgl program?
And I had to change the part with the framebuffer to use a different texture than the one it is putting the data into.
gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, sTexture, 0);
gl.activeTexture(gl.TEXTURE0 + 1);
gl.bindTexture(gl.TEXTURE_2D, sTexture);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA,
sTexture.width, sTexture.height, 0,
gl.RGBA, gl.UNSIGNED_BYTE, pixels);
so the sTexture needs to change like so:
gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, someOtherTexture, 0);
I wrote a code that tried to draw only the background image in a WebGL 3D scene. Here are the relevant parts of the code:
<script id="shader-fs" type="x-shader/x-fragment">
varying highp vec2 vTexCoord;
uniform sampler2D uSampler;
void main(void) {
gl_FragColor = texture2D(uSampler, vec2(vTexCoord.s, vTexCoord.t));
}
</script>
<script id="shader-vs" type="x-shader/x-vertex">
attribute vec3 aVertexPosition;
attribute vec2 aTextureCoord;
uniform mat4 uMVMatrix;
uniform mat4 uPMatrix;
varying highp vec2 vTexCoord;
void main(void) {
gl_Position = uPMatrix * uMVMatrix * vec4(aVertexPosition, 1.0);
vTexCoord = aTextureCoord;
}
</script>
<script>
var VBack = [
-24.0, 0.0, 0.0,
24.0, 0.0, 0.0,
24.0, 48.0, 0.0,
-24.0, 48.0, 0.0
];
var vTBack = [
0.0, 0.0,
1.0, 0.0,
1.0, 1.0,
0.0, 1.0
];
var VBuffer;
var vTBuffer;
function initMatrix() {
orthoMatrix = makeOrtho(-24.0, 24.0, 0.0, 48.0, 20.0, -20.0); // From glUtils.js
mvMatrix = Matrix.I(4);
...
}
function handleBkTex(tex) {
GL.bindTexture(GL.TEXTURE_2D, tex);
GL.pixelStorei(GL.UNPACK_FLIP_Y_WEBGL, true);
GL.texImage2D(GL.TEXTURE_2D, 0, GL.RGBA, GL.RGBA, GL.UNSIGNED_BYTE, tex.Img);
GL.texParameteri(GL.TEXTURE_2D, GL.TEXTURE_MAG_FILTER, GL.NEAREST);
GL.texParameteri(GL.TEXTURE_2D, GL.TEXTURE_MIN_FILTER, GL.NEAREST);
GL.bindTexture(GL.TEXTURE_2D, null);
}
function initBkgnd() {
backTex = GL.createTexture();
backTex.Img = new Image();
backTex.Img.onload = function() {
handleBkTex(backTex);
}
backTex.Img.src = "Bkgnd.jpg";
}
function drawBkgnd() {
GL.bindBuffer(GL.ARRAY_BUFFER, VBuffer);
GL.vertexAttribPointer(vertexPositionAttribute, 3, GL.FLOAT, false, 0, 0);
GL.bindBuffer(GL.ARRAY_BUFFER, vTBuffer);
GL.vertexAttribPointer(texCoordAttribute, 2, GL.FLOAT, false, 0, 0);
GL.activeTexture(GL.TEXTURE0);
GL.bindTexture(GL.TEXTURE_2D, backTex);
GL.uniform1i(GL.getUniformLocation(shaderProgram, "uSampler"), 0);
GL.drawArrays(GL.TRIANGLE_STRIP, 0, 4);
}
function start() {
var canvas = document.getElementById("glcanvas");
initWebGL(canvas);
if (GL) {
initShaders();
initBuffers();
initMatrix();
initBkgnd();
drawBkgnd();
}
}
The canvas size is 512 x 512, the same as the image size. But I don't get the correct image on the canvas. How to do this correctly?
Do you want the answer to be how to setup a 3D matrix to draw a 2D image or do you want the answer to be how to do this efficiently?
Here's the efficient answer. There's no reason to use 3D to draw 2D. Change your vertices to
var VBack = [
-1, -1,
1, -1,
1, 1,
-1, 1,
];
Change your call to vertexAttribPointer to
GL.vertexAttribPointer(vertexPositionAttribute, 2, GL.FLOAT, false, 0, 0);
Change your vertex shader to
attribute vec4 aVertexPosition;
attribute vec2 aTextureCoord;
varying highp vec2 vTexCoord;
void main(void) {
gl_Position = aVertexPosition;
vTexCoord = aTextureCoord;
}
And it should work.
See this set of articles for more about 2D in WebGL for more.
I found some small errors in my code. Here are they:
WebGL scene must always be redrawn. So the function drawBkgnd() must be called repeatedly, using setInterval() or requestAnimationFrame().
To use drawArrays() with GL_TRIANGLE_STRIP to draw a rectangle correctly, the order of the vertices must be:
*2 *3
*0 *1
or any equivalent order, but not:
*3 *2
*0 *1
So the order of the vertices in VBack must be changed to:
var VBack = [
-24.0, 0.0, 0.0,
24.0, 0.0, 0.0,
-24.0, 48.0, 0.0
24.0, 48.0, 0.0,
];
and the order of vertices in vTBack must also be changed accordingly.