Why does this not get the corresponding pixel - javascript

After drawing a triangle, I want to use the readPixels method to get the pixel information, but I can't get it with the following code.
const gl = document.querySelector("canvas").getContext("webgl2", {
preserveDrawingBuffer: true
});
render();
read(); // read in other event
function render() {
const fragment_center = `#version 300 es
precision highp float;
in vec3 HSV;
out vec4 fragColor;
vec3 hsv2rgb(vec3 c) {
vec4 K = vec4(1.0, 2.0 / 3.0, 1.0 / 3.0, 3.0);
vec3 p = abs(fract(c.xxx + K.xyz) * 6.0 - K.www);
return c.z * mix(K.xxx, clamp(p - K.xxx, 0.0, 1.0), c.y);
}
void main() {
vec3 color = hsv2rgb(HSV);
fragColor = vec4(color, 1.0);
}
`;
const vertex_center = `#version 300 es
precision highp float;
in vec2 position;
in vec2 offset;
in float hue;
out vec3 HSV;
void main() {
float x = offset.x + position.x + 0.15;
float y = offset.y + position.y + 0.3;
HSV = vec3(hue, y, x);
gl_Position = vec4(position, 1.0, 1.0);
}
`;
const program = initProgram(gl, vertex_center, fragment_center);
const radius = 1;
const x1 = Math.cos((30 / 180) * Math.PI) * radius;
const y1 = Math.sin((30 / 180) * Math.PI) * radius;
const v_triangle = new Float32Array([0, radius, x1, -y1, -x1, -y1]);
let buffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, buffer);
gl.bufferData(gl.ARRAY_BUFFER, v_triangle, gl.STATIC_DRAW);
let gl_position = gl.getAttribLocation(program, "position");
gl.vertexAttribPointer(gl_position, 2, gl.FLOAT, false, 0, 0);
gl.enableVertexAttribArray(gl_position);
gl.drawArrays(gl.TRIANGLE_STRIP, 0, v_triangle.length / 2);
}
function read() {
const pixel = new Uint8Array(4);
gl.readPixels(0, 0.5, 1, 1, gl.RGBA, gl.UNSIGNED_BYTE, pixel);
log(pixel);
}
function log(...args) {
const elem = document.createElement("pre");
elem.textContent = [...args].join(" ");
document.body.appendChild(elem);
}
function initProgram(gl, v, f) {
const vertexShader = gl.createShader(gl.VERTEX_SHADER);
gl.shaderSource(vertexShader, v);
gl.compileShader(vertexShader);
let status = gl.getShaderParameter(vertexShader, gl.COMPILE_STATUS);
if (!status) alert(gl.getShaderInfoLog(vertexShader));
const fragmentShader = gl.createShader(gl.FRAGMENT_SHADER);
gl.shaderSource(fragmentShader, f);
gl.compileShader(fragmentShader);
status = gl.getShaderParameter(fragmentShader, gl.COMPILE_STATUS);
if (!status) alert(gl.getShaderInfoLog(fragmentShader));
const program = gl.createProgram();
gl.attachShader(program, vertexShader);
gl.attachShader(program, fragmentShader);
gl.linkProgram(program);
gl.useProgram(program);
return program;
}
<canvas></canvas>
The above code does not get the corresponding pixels properly.
However, if I change the implementation to not use drawArrays and use a method like clearColor, I can get the pixels.
const gl = document.querySelector("canvas").getContext("webgl");
render();
read(); // read in same event
function render() {
gl.clearColor(.25, .5, .75, 1);
gl.clear(gl.COLOR_BUFFER_BIT);
}
function read() {
const pixel = new Uint8Array(4);
gl.readPixels(0, 0, 1, 1, gl.RGBA, gl.UNSIGNED_BYTE, pixel);
log(pixel);
}
function log(...args) {
const elem = document.createElement("pre");
elem.textContent = [...args].join(' ');
document.body.appendChild(elem);
}
<canvas></canvas>

The coordinates for gl.readPixels are window (frambuffer) coordinates. These coordinates are integral and the top left is (0, 0). You actually read the pixel from the top left corner of the framebuffer. This fragment is not changed when you draw the triangle and contains the clear color. Read a fragment from a different position:
gl.readPixels(0, 0.5, 1, 1, gl.RGBA, gl.UNSIGNED_BYTE, pixel);
gl.readPixels(200, 100, 1, 1, gl.RGBA, gl.UNSIGNED_BYTE, pixel);
const canvas = document.querySelector("canvas");
const gl = canvas.getContext("webgl2", {
preserveDrawingBuffer: true
});
render();
read(); // read in other event
function render() {
const fragment_center = `#version 300 es
precision highp float;
in vec3 HSV;
out vec4 fragColor;
vec3 hsv2rgb(vec3 c) {
vec4 K = vec4(1.0, 2.0 / 3.0, 1.0 / 3.0, 3.0);
vec3 p = abs(fract(c.xxx + K.xyz) * 6.0 - K.www);
return c.z * mix(K.xxx, clamp(p - K.xxx, 0.0, 1.0), c.y);
}
void main() {
vec3 color = hsv2rgb(HSV);
fragColor = vec4(color, 1.0);
}
`;
const vertex_center = `#version 300 es
precision highp float;
in vec2 position;
in vec2 offset;
in float hue;
out vec3 HSV;
void main() {
float x = offset.x + position.x + 0.15;
float y = offset.y + position.y + 0.3;
HSV = vec3(hue, y, x);
gl_Position = vec4(position, 1.0, 1.0);
}
`;
const program = initProgram(gl, vertex_center, fragment_center);
const radius = 1;
const x1 = Math.cos((30 / 180) * Math.PI) * radius;
const y1 = Math.sin((30 / 180) * Math.PI) * radius;
const v_triangle = new Float32Array([0, radius, x1, -y1, -x1, -y1]);
let buffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, buffer);
gl.bufferData(gl.ARRAY_BUFFER, v_triangle, gl.STATIC_DRAW);
let gl_position = gl.getAttribLocation(program, "position");
gl.vertexAttribPointer(gl_position, 2, gl.FLOAT, false, 0, 0);
gl.enableVertexAttribArray(gl_position);
gl.drawArrays(gl.TRIANGLE_STRIP, 0, v_triangle.length / 2);
}
function read() {
const pixel = new Uint8Array(4);
const x = canvas.width/2;
const y = canvas.height/2;
gl.readPixels(x, y, 1, 1, gl.RGBA, gl.UNSIGNED_BYTE, pixel);
log('(' + x + ', ' + y + '): ' + pixel);
}
function log(...args) {
const elem = document.createElement("pre");
elem.textContent = [...args].join(" ");
document.body.appendChild(elem);
}
function initProgram(gl, v, f) {
const vertexShader = gl.createShader(gl.VERTEX_SHADER);
gl.shaderSource(vertexShader, v);
gl.compileShader(vertexShader);
let status = gl.getShaderParameter(vertexShader, gl.COMPILE_STATUS);
if (!status) alert(gl.getShaderInfoLog(vertexShader));
const fragmentShader = gl.createShader(gl.FRAGMENT_SHADER);
gl.shaderSource(fragmentShader, f);
gl.compileShader(fragmentShader);
status = gl.getShaderParameter(fragmentShader, gl.COMPILE_STATUS);
if (!status) alert(gl.getShaderInfoLog(fragmentShader));
const program = gl.createProgram();
gl.attachShader(program, vertexShader);
gl.attachShader(program, fragmentShader);
gl.linkProgram(program);
gl.useProgram(program);
return program;
}
<canvas></canvas>

Related

WebGL into p5.js canvas getting distorted

I'm relatively new to shaders and am having trouble putting shader code into a p5.js canvas. The smoke.js script works fine on when I append it to an HTML canvas, but when I try to put it into the canvas created in p5, it only appears in the top right corner. Any idea what's going on here?
see upper right corner for where shader is being placed.
//smoke.js
//taken from https://codepen.io/PavelDoGreat/details/zdWzEL/
'use strict';
window.addEventListener('load', function () {
const canvas = document.getElementById('myCanvas');
console.log(canvas);
const { gl, ext } = getWebGLContext(canvas);
let config = {
TEXTURE_DOWNSAMPLE: 1,
DENSITY_DISSIPATION: 0.98,
VELOCITY_DISSIPATION: 0.99,
PRESSURE_DISSIPATION: 0.8,
PRESSURE_ITERATIONS: 25,
CURL: 30,
SPLAT_RADIUS: 0.005
}
let pointers = [];
let splatStack = [];
function getWebGLContext(canvas) {
const params = { alpha: false, depth: false, stencil: false, antialias: false };
let gl = canvas.getContext('webgl2', params);
const isWebGL2 = !!gl;
if (!isWebGL2)
gl = canvas.getContext('webgl', params) || canvas.getContext('experimental-webgl', params);
let halfFloat;
let supportLinearFiltering;
if (isWebGL2) {
gl.getExtension('EXT_color_buffer_float');
supportLinearFiltering = gl.getExtension('OES_texture_float_linear');
} else {
halfFloat = gl.getExtension('OES_texture_half_float');
supportLinearFiltering = gl.getExtension('OES_texture_half_float_linear');
}
gl.clearColor(0.0, 0.0, 0.0, 1.0);
const halfFloatTexType = isWebGL2 ? gl.HALF_FLOAT : halfFloat.HALF_FLOAT_OES;
let formatRGBA;
let formatRG;
let formatR;
if (isWebGL2) {
formatRGBA = getSupportedFormat(gl, gl.RGBA16F, gl.RGBA, halfFloatTexType);
formatRG = getSupportedFormat(gl, gl.RG16F, gl.RG, halfFloatTexType);
formatR = getSupportedFormat(gl, gl.R16F, gl.RED, halfFloatTexType);
}
else {
formatRGBA = getSupportedFormat(gl, gl.RGBA, gl.RGBA, halfFloatTexType);
formatRG = getSupportedFormat(gl, gl.RGBA, gl.RGBA, halfFloatTexType);
formatR = getSupportedFormat(gl, gl.RGBA, gl.RGBA, halfFloatTexType);
}
return {
gl,
ext: {
formatRGBA,
formatRG,
formatR,
halfFloatTexType,
supportLinearFiltering
}
};
}
function getSupportedFormat(gl, internalFormat, format, type) {
if (!supportRenderTextureFormat(gl, internalFormat, format, type)) {
switch (internalFormat) {
case gl.R16F:
return getSupportedFormat(gl, gl.RG16F, gl.RG, type);
case gl.RG16F:
return getSupportedFormat(gl, gl.RGBA16F, gl.RGBA, type);
default:
return null;
}
}
return {
internalFormat,
format
}
}
function supportRenderTextureFormat(gl, internalFormat, format, type) {
let texture = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texImage2D(gl.TEXTURE_2D, 0, internalFormat, 4, 4, 0, format, type, null);
let fbo = gl.createFramebuffer();
gl.bindFramebuffer(gl.FRAMEBUFFER, fbo);
gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, texture, 0);
const status = gl.checkFramebufferStatus(gl.FRAMEBUFFER);
if (status != gl.FRAMEBUFFER_COMPLETE)
return false;
return true;
}
function pointerPrototype() {
this.id = -1;
this.x = 0;
this.y = 0;
this.dx = 0;
this.dy = 0;
this.down = false;
this.moved = false;
this.color = [30, 0, 300];
}
pointers.push(new pointerPrototype());
class GLProgram {
constructor(vertexShader, fragmentShader) {
this.uniforms = {};
this.program = gl.createProgram();
gl.attachShader(this.program, vertexShader);
gl.attachShader(this.program, fragmentShader);
gl.linkProgram(this.program);
if (!gl.getProgramParameter(this.program, gl.LINK_STATUS))
throw gl.getProgramInfoLog(this.program);
const uniformCount = gl.getProgramParameter(this.program, gl.ACTIVE_UNIFORMS);
for (let i = 0; i < uniformCount; i++) {
const uniformName = gl.getActiveUniform(this.program, i).name;
this.uniforms[uniformName] = gl.getUniformLocation(this.program, uniformName);
}
}
bind() {
gl.useProgram(this.program);
}
}
function compileShader(type, source) {
const shader = gl.createShader(type);
gl.shaderSource(shader, source);
gl.compileShader(shader);
if (!gl.getShaderParameter(shader, gl.COMPILE_STATUS))
throw gl.getShaderInfoLog(shader);
return shader;
};
const baseVertexShader = compileShader(gl.VERTEX_SHADER, `
precision highp float;
precision mediump sampler2D;
attribute vec2 aPosition;
varying vec2 vUv;
varying vec2 vL;
varying vec2 vR;
varying vec2 vT;
varying vec2 vB;
uniform vec2 texelSize;
void main () {
vUv = aPosition * 0.5 + 0.5;
vL = vUv - vec2(texelSize.x, 0.0);
vR = vUv + vec2(texelSize.x, 0.0);
vT = vUv + vec2(0.0, texelSize.y);
vB = vUv - vec2(0.0, texelSize.y);
gl_Position = vec4(aPosition, 0.0, 1.0);
}
`);
const clearShader = compileShader(gl.FRAGMENT_SHADER, `
precision highp float;
precision mediump sampler2D;
varying vec2 vUv;
uniform sampler2D uTexture;
uniform float value;
void main () {
gl_FragColor = value * texture2D(uTexture, vUv);
}
`);
const displayShader = compileShader(gl.FRAGMENT_SHADER, `
precision highp float;
precision mediump sampler2D;
varying vec2 vUv;
uniform sampler2D uTexture;
void main () {
gl_FragColor = texture2D(uTexture, vUv);
}
`);
const splatShader = compileShader(gl.FRAGMENT_SHADER, `
precision highp float;
precision mediump sampler2D;
varying vec2 vUv;
uniform sampler2D uTarget;
uniform float aspectRatio;
uniform vec3 color;
uniform vec2 point;
uniform float radius;
void main () {
vec2 p = vUv - point.xy;
p.x *= aspectRatio;
vec3 splat = exp(-dot(p, p) / radius) * color;
vec3 base = texture2D(uTarget, vUv).xyz;
gl_FragColor = vec4(base + splat, 1.0);
}
`);
const advectionManualFilteringShader = compileShader(gl.FRAGMENT_SHADER, `
precision highp float;
precision mediump sampler2D;
varying vec2 vUv;
uniform sampler2D uVelocity;
uniform sampler2D uSource;
uniform vec2 texelSize;
uniform float dt;
uniform float dissipation;
vec4 bilerp (in sampler2D sam, in vec2 p) {
vec4 st;
st.xy = floor(p - 0.5) + 0.5;
st.zw = st.xy + 1.0;
vec4 uv = st * texelSize.xyxy;
vec4 a = texture2D(sam, uv.xy);
vec4 b = texture2D(sam, uv.zy);
vec4 c = texture2D(sam, uv.xw);
vec4 d = texture2D(sam, uv.zw);
vec2 f = p - st.xy;
return mix(mix(a, b, f.x), mix(c, d, f.x), f.y);
}
void main () {
vec2 coord = gl_FragCoord.xy - dt * texture2D(uVelocity, vUv).xy;
gl_FragColor = dissipation * bilerp(uSource, coord);
gl_FragColor.a = 1.0;
}
`);
const advectionShader = compileShader(gl.FRAGMENT_SHADER, `
precision highp float;
precision mediump sampler2D;
varying vec2 vUv;
uniform sampler2D uVelocity;
uniform sampler2D uSource;
uniform vec2 texelSize;
uniform float dt;
uniform float dissipation;
void main () {
vec2 coord = vUv - dt * texture2D(uVelocity, vUv).xy * texelSize;
gl_FragColor = dissipation * texture2D(uSource, coord);
gl_FragColor.a = 1.0;
}
`);
const divergenceShader = compileShader(gl.FRAGMENT_SHADER, `
precision highp float;
precision mediump sampler2D;
varying vec2 vUv;
varying vec2 vL;
varying vec2 vR;
varying vec2 vT;
varying vec2 vB;
uniform sampler2D uVelocity;
vec2 sampleVelocity (in vec2 uv) {
vec2 multiplier = vec2(1.0, 1.0);
if (uv.x < 0.0) { uv.x = 0.0; multiplier.x = -1.0; }
if (uv.x > 1.0) { uv.x = 1.0; multiplier.x = -1.0; }
if (uv.y < 0.0) { uv.y = 0.0; multiplier.y = -1.0; }
if (uv.y > 1.0) { uv.y = 1.0; multiplier.y = -1.0; }
return multiplier * texture2D(uVelocity, uv).xy;
}
void main () {
float L = sampleVelocity(vL).x;
float R = sampleVelocity(vR).x;
float T = sampleVelocity(vT).y;
float B = sampleVelocity(vB).y;
float div = 0.5 * (R - L + T - B);
gl_FragColor = vec4(div, 0.0, 0.0, 1.0);
}
`);
const curlShader = compileShader(gl.FRAGMENT_SHADER, `
precision highp float;
precision mediump sampler2D;
varying vec2 vUv;
varying vec2 vL;
varying vec2 vR;
varying vec2 vT;
varying vec2 vB;
uniform sampler2D uVelocity;
void main () {
float L = texture2D(uVelocity, vL).y;
float R = texture2D(uVelocity, vR).y;
float T = texture2D(uVelocity, vT).x;
float B = texture2D(uVelocity, vB).x;
float vorticity = R - L - T + B;
gl_FragColor = vec4(vorticity, 0.0, 0.0, 1.0);
}
`);
const vorticityShader = compileShader(gl.FRAGMENT_SHADER, `
precision highp float;
precision mediump sampler2D;
varying vec2 vUv;
varying vec2 vT;
varying vec2 vB;
uniform sampler2D uVelocity;
uniform sampler2D uCurl;
uniform float curl;
uniform float dt;
void main () {
float T = texture2D(uCurl, vT).x;
float B = texture2D(uCurl, vB).x;
float C = texture2D(uCurl, vUv).x;
vec2 force = vec2(abs(T) - abs(B), 0.0);
force *= 1.0 / length(force + 0.00001) * curl * C;
vec2 vel = texture2D(uVelocity, vUv).xy;
gl_FragColor = vec4(vel + force * dt, 0.0, 1.0);
}
`);
const pressureShader = compileShader(gl.FRAGMENT_SHADER, `
precision highp float;
precision mediump sampler2D;
varying vec2 vUv;
varying vec2 vL;
varying vec2 vR;
varying vec2 vT;
varying vec2 vB;
uniform sampler2D uPressure;
uniform sampler2D uDivergence;
vec2 boundary (in vec2 uv) {
uv = min(max(uv, 0.0), 1.0);
return uv;
}
void main () {
float L = texture2D(uPressure, boundary(vL)).x;
float R = texture2D(uPressure, boundary(vR)).x;
float T = texture2D(uPressure, boundary(vT)).x;
float B = texture2D(uPressure, boundary(vB)).x;
float C = texture2D(uPressure, vUv).x;
float divergence = texture2D(uDivergence, vUv).x;
float pressure = (L + R + B + T - divergence) * 0.25;
gl_FragColor = vec4(pressure, 0.0, 0.0, 1.0);
}
`);
const gradientSubtractShader = compileShader(gl.FRAGMENT_SHADER, `
precision highp float;
precision mediump sampler2D;
varying vec2 vUv;
varying vec2 vL;
varying vec2 vR;
varying vec2 vT;
varying vec2 vB;
uniform sampler2D uPressure;
uniform sampler2D uVelocity;
vec2 boundary (in vec2 uv) {
uv = min(max(uv, 0.0), 1.0);
return uv;
}
void main () {
float L = texture2D(uPressure, boundary(vL)).x;
float R = texture2D(uPressure, boundary(vR)).x;
float T = texture2D(uPressure, boundary(vT)).x;
float B = texture2D(uPressure, boundary(vB)).x;
vec2 velocity = texture2D(uVelocity, vUv).xy;
velocity.xy -= vec2(R - L, T - B);
gl_FragColor = vec4(velocity, 0.0, 1.0);
}
`);
const circShader = compileShader(gl.FRAGMENT_SHADER, `
precision mediump float;
uniform vec2 u_resolution;
uniform vec2 u_mouse;
uniform float u_time;
float circle(in vec2 _st, in float _radius){
vec2 dist = _st-vec2(0.5);
return 1.-smoothstep(_radius-(_radius*0.01),
_radius+(_radius*0.01),
dot(dist,dist)*4.0);
}
void main(){
vec2 st = gl_FragCoord.xy/u_resolution.xy;
vec3 color = vec3(circle(st,0.9));
gl_FragColor = vec4( color, 1.0 );
}
`);
let textureWidth;
let textureHeight;
let density;
let velocity;
let divergence;
let curl;
let pressure;
initFramebuffers();
const clearProgram = new GLProgram(baseVertexShader, clearShader);
const displayProgram = new GLProgram(baseVertexShader, displayShader);
const splatProgram = new GLProgram(baseVertexShader, splatShader);
const advectionProgram = new GLProgram(baseVertexShader, ext.supportLinearFiltering ? advectionShader : advectionManualFilteringShader);
const divergenceProgram = new GLProgram(baseVertexShader, divergenceShader);
const curlProgram = new GLProgram(baseVertexShader, curlShader);
const vorticityProgram = new GLProgram(baseVertexShader, vorticityShader);
const pressureProgram = new GLProgram(baseVertexShader, pressureShader);
const gradienSubtractProgram = new GLProgram(baseVertexShader, gradientSubtractShader);
const circProgram = new GLProgram(baseVertexShader, circShader);
function initFramebuffers() {
// textureWidth = gl.drawingBufferWidth >> config.TEXTURE_DOWNSAMPLE;
// textureHeight = gl.drawingBufferHeight >> config.TEXTURE_DOWNSAMPLE;
textureWidth = canvas.width;
textureHeight = canvas.height;
const texType = ext.halfFloatTexType;
const rgba = ext.formatRGBA;
const rg = ext.formatRG;
const r = ext.formatR;
density = createDoubleFBO(2, textureWidth, textureHeight, rgba.internalFormat, rgba.format, texType, ext.supportLinearFiltering ? gl.LINEAR : gl.NEAREST);
velocity = createDoubleFBO(0, textureWidth, textureHeight, rg.internalFormat, rg.format, texType, ext.supportLinearFiltering ? gl.LINEAR : gl.NEAREST);
divergence = createFBO(4, textureWidth, textureHeight, r.internalFormat, r.format, texType, gl.NEAREST);
curl = createFBO(5, textureWidth, textureHeight, r.internalFormat, r.format, texType, gl.NEAREST);
pressure = createDoubleFBO(6, textureWidth, textureHeight, r.internalFormat, r.format, texType, gl.NEAREST);
}
function createFBO(texId, w, h, internalFormat, format, type, param) {
gl.activeTexture(gl.TEXTURE0 + texId);
let texture = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, param);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, param);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texImage2D(gl.TEXTURE_2D, 0, internalFormat, w, h, 0, format, type, null);
let fbo = gl.createFramebuffer();
gl.bindFramebuffer(gl.FRAMEBUFFER, fbo);
gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, texture, 0);
// gl.viewport(0, 0, w, h);
gl.viewport(0, 0, canvas.width, canvas.height);
gl.clear(gl.COLOR_BUFFER_BIT);
return [texture, fbo, texId];
}
function createDoubleFBO(texId, w, h, internalFormat, format, type, param) {
let fbo1 = createFBO(texId, w, h, internalFormat, format, type, param);
let fbo2 = createFBO(texId + 1, w, h, internalFormat, format, type, param);
return {
get read() {
return fbo1;
},
get write() {
return fbo2;
},
swap() {
let temp = fbo1;
fbo1 = fbo2;
fbo2 = temp;
}
}
}
const blit = (() => {
gl.bindBuffer(gl.ARRAY_BUFFER, gl.createBuffer());
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([-1, -1, -1, 1, 1, 1, 1, -1]), gl.STATIC_DRAW);
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, gl.createBuffer());
gl.bufferData(gl.ELEMENT_ARRAY_BUFFER, new Uint16Array([0, 1, 2, 0, 2, 3]), gl.STATIC_DRAW);
gl.vertexAttribPointer(0, 2, gl.FLOAT, false, 0, 0);
gl.enableVertexAttribArray(0);
return (destination) => {
gl.bindFramebuffer(gl.FRAMEBUFFER, destination);
gl.drawElements(gl.TRIANGLES, 6, gl.UNSIGNED_SHORT, 0);
}
})();
let lastTime = Date.now();
multipleSplats(parseInt(Math.random() * 20) + 5);
update();
function update() {
// resizeCanvas();
// webglUtils.resizeCanvasToDisplaySize(canvas);
const dt = Math.min((Date.now() - lastTime) / 1000, 0.016);
lastTime = Date.now();
gl.viewport(0, 0, canvas.width, canvas.height);
// gl.viewport(0, 0, canvas.clientWidth, canvas.clientHeight);
circProgram.bind();
if (splatStack.length > 0)
multipleSplats(splatStack.pop());
advectionProgram.bind();
gl.uniform2f(advectionProgram.uniforms.texelSize, 1.0 / textureWidth, 1.0 / textureHeight);
gl.uniform1i(advectionProgram.uniforms.uVelocity, velocity.read[2]);
gl.uniform1i(advectionProgram.uniforms.uSource, velocity.read[2]);
gl.uniform1f(advectionProgram.uniforms.dt, dt);
gl.uniform1f(advectionProgram.uniforms.dissipation, config.VELOCITY_DISSIPATION);
blit(velocity.write[1]);
velocity.swap();
gl.uniform1i(advectionProgram.uniforms.uVelocity, velocity.read[2]);
gl.uniform1i(advectionProgram.uniforms.uSource, density.read[2]);
gl.uniform1f(advectionProgram.uniforms.dissipation, config.DENSITY_DISSIPATION);
blit(density.write[1]);
density.swap();
for (var i = 0; i < pointers.length; i++) {
const pointer = pointers[i];
if (pointer.moved) {
splat(pointer.x, pointer.y, pointer.dx, pointer.dy, pointer.color);
pointer.moved = false;
}
}
curlProgram.bind();
gl.uniform2f(curlProgram.uniforms.texelSize, 1.0 / textureWidth, 1.0 / textureHeight);
gl.uniform1i(curlProgram.uniforms.uVelocity, velocity.read[2]);
blit(curl[1]);
vorticityProgram.bind();
gl.uniform2f(vorticityProgram.uniforms.texelSize, 1.0 / textureWidth, 1.0 / textureHeight);
gl.uniform1i(vorticityProgram.uniforms.uVelocity, velocity.read[2]);
gl.uniform1i(vorticityProgram.uniforms.uCurl, curl[2]);
gl.uniform1f(vorticityProgram.uniforms.curl, config.CURL);
gl.uniform1f(vorticityProgram.uniforms.dt, dt);
blit(velocity.write[1]);
velocity.swap();
divergenceProgram.bind();
gl.uniform2f(divergenceProgram.uniforms.texelSize, 1.0 / textureWidth, 1.0 / textureHeight);
gl.uniform1i(divergenceProgram.uniforms.uVelocity, velocity.read[2]);
blit(divergence[1]);
clearProgram.bind();
let pressureTexId = pressure.read[2];
gl.activeTexture(gl.TEXTURE0 + pressureTexId);
gl.bindTexture(gl.TEXTURE_2D, pressure.read[0]);
gl.uniform1i(clearProgram.uniforms.uTexture, pressureTexId);
gl.uniform1f(clearProgram.uniforms.value, config.PRESSURE_DISSIPATION);
blit(pressure.write[1]);
pressure.swap();
pressureProgram.bind();
gl.uniform2f(pressureProgram.uniforms.texelSize, 1.0 / textureWidth, 1.0 / textureHeight);
gl.uniform1i(pressureProgram.uniforms.uDivergence, divergence[2]);
pressureTexId = pressure.read[2];
gl.uniform1i(pressureProgram.uniforms.uPressure, pressureTexId);
gl.activeTexture(gl.TEXTURE0 + pressureTexId);
for (let i = 0; i < config.PRESSURE_ITERATIONS; i++) {
gl.bindTexture(gl.TEXTURE_2D, pressure.read[0]);
blit(pressure.write[1]);
pressure.swap();
}
gradienSubtractProgram.bind();
gl.uniform2f(gradienSubtractProgram.uniforms.texelSize, 1.0 / textureWidth, 1.0 / textureHeight);
gl.uniform1i(gradienSubtractProgram.uniforms.uPressure, pressure.read[2]);
gl.uniform1i(gradienSubtractProgram.uniforms.uVelocity, velocity.read[2]);
blit(velocity.write[1]);
velocity.swap();
gl.viewport(0, 0, canvas.width, canvas.height);
displayProgram.bind();
gl.uniform1i(displayProgram.uniforms.uTexture, density.read[2]);
blit(null);
requestAnimationFrame(update);
}
function splat(x, y, dx, dy, color) {
splatProgram.bind();
gl.uniform1i(splatProgram.uniforms.uTarget, velocity.read[2]);
gl.uniform1f(splatProgram.uniforms.aspectRatio, canvas.width / canvas.height);
gl.uniform2f(splatProgram.uniforms.point, x / canvas.width, 1.0 - y / canvas.height);
gl.uniform3f(splatProgram.uniforms.color, dx, -dy, 1.0);
gl.uniform1f(splatProgram.uniforms.radius, config.SPLAT_RADIUS);
blit(velocity.write[1]);
velocity.swap();
gl.uniform1i(splatProgram.uniforms.uTarget, density.read[2]);
gl.uniform3f(splatProgram.uniforms.color, color[0] * 0.3, color[1] * 0.3, color[2] * 0.3);
blit(density.write[1]);
density.swap();
}
function multipleSplats(amount) {
for (let i = 0; i < amount; i++) {
const color = [Math.random() * 10, Math.random() * 10, Math.random() * 10];
const x = canvas.width * Math.random();
const y = canvas.height * Math.random();
const dx = 1000 * (Math.random() - 0.5);
const dy = 1000 * (Math.random() - 0.5);
splat(x, y, dx, dy, color);
}
}
function resizeCanvas() {
if (canvas.width != canvas.clientWidth || canvas.height != canvas.clientHeight) {
canvas.width = canvas.clientWidth;
canvas.height = canvas.clientHeight;
// canvas.width = document.getElementById("myCanvas").width;
// canvas.height = document.getElementById("myCanvas").height;
initFramebuffers();
}
// console.log(canvas.width, canvas.height);
// console.log("client height: ", canvas.clientWidth, canvas.clientHeight);
}
canvas.addEventListener('mousemove', (e) => {
pointers[0].moved = pointers[0].down;
pointers[0].dx = (e.offsetX - pointers[0].x) * 10.0;
pointers[0].dy = (e.offsetY - pointers[0].y) * 10.0;
pointers[0].x = e.offsetX;
pointers[0].y = e.offsetY;
});
canvas.addEventListener('touchmove', (e) => {
e.preventDefault();
const touches = e.targetTouches;
for (let i = 0; i < touches.length; i++) {
let pointer = pointers[i];
pointer.moved = pointer.down;
pointer.dx = (touches[i].pageX - pointer.x) * 10.0;
pointer.dy = (touches[i].pageY - pointer.y) * 10.0;
pointer.x = touches[i].pageX;
pointer.y = touches[i].pageY;
}
}, false);
canvas.addEventListener('mousedown', () => {
pointers[0].down = true;
pointers[0].color = [Math.random() + 0.2, Math.random() + 0.2, Math.random() + 0.2];
});
canvas.addEventListener('touchstart', (e) => {
e.preventDefault();
const touches = e.targetTouches;
for (let i = 0; i < touches.length; i++) {
if (i >= pointers.length)
pointers.push(new pointerPrototype());
pointers[i].id = touches[i].identifier;
pointers[i].down = true;
pointers[i].x = touches[i].pageX;
pointers[i].y = touches[i].pageY;
pointers[i].color = [Math.random() + 0.2, Math.random() + 0.2, Math.random() + 0.2];
}
});
window.addEventListener('mouseup', () => {
pointers[0].down = false;
});
window.addEventListener('touchend', (e) => {
const touches = e.changedTouches;
for (let i = 0; i < touches.length; i++)
for (let j = 0; j < pointers.length; j++)
if (touches[i].identifier == pointers[j].id)
pointers[j].down = false;
});
})
<!DOCTYPE html>
<html>
<head>
<title>Fluid Sim</title>
<style type="text/css">
html,
body {
overflow: hidden;
}
body {
margin: 0;
position: absolute;
width: 100%;
height: 100%;
}
canvas {
width: 100%;
height: 100%;
}
</style>
<script src="https://cdnjs.cloudflare.com/ajax/libs/p5.js/0.9.0/p5.min.js"></script>
<script src="https://cdnjs.cloudflare.com/ajax/libs/p5.js/0.9.0/addons/p5.dom.js"></script>
<!-- <script id="text/javascript" src="circle.js"></script> -->
</head>
<body>
<div id="container"></div>
</body>
<script id="text/javascript">
function setup() {
let cnv = createCanvas(window.innerWidth, window.innerHeight, WEBGL);
cnv.id("myCanvas");
noStroke();
// cnv.position(0, 0);
// document.getElementById('container').appendChild(cnv.canvas);
}
function draw() {
background(127);
push();
translate(-width / 2, -height / 2, 0)
fill(255);
ellipse(windowWidth / 2, windowHeight / 2, 100, 100);
pop();
}
function windowResized() {
resizeCanvas(windowWidth, windowHeight);
}
</script>
<script id="text/javascript" src="smoke.js"></script>
</html>
1.You created a canvas twice
2.I don't think that regular javascript can communicate with P5.
Thats it.

Trying to pass texture from framebuffer results in L_INVALID_OPERATION : glDrawArrays: Source and destination textures of the draw are the same

I'm new to vanilla WebGL and trying to utilize framebuffers for post processing/advanced shaders. When I run my code I get the warning:
GL_INVALID_OPERATION : glDrawArrays: Source and destination textures of the draw are the same.
Here's my code so far. If anyone could point me to the right direction how to correctly utilize framebuffers to pass textures to the next pass. It's wrapped in a vue.js component but that shouldn't matter.
<template lang='pug'>
canvas
</template>
<script>
import { mapGetters } from 'vuex'
export default {
name: 'webGl',
created ()
{
this.static = {
af: null,
gl: null,
fr: 0,
shaders:
{
vertex: `
attribute vec2 a_position;
void main() {
gl_Position = vec4(a_position, 0, 1);
}`,
fragment: `
#ifdef GL_ES
precision mediump float;
#endif
uniform float u_time;
uniform vec2 u_size;
uniform int u_frame;
uniform sampler2D u_texture;
const int maxIter = 15;
vec2 getPos() {
vec2 pos = ( gl_FragCoord.xy / u_size.xy ) - vec2(0.5);
pos.x *= u_size.x / u_size.y;
return pos;
}
vec2 cmult(vec2 a, vec2 b){
return vec2(a.x*b.x-a.y*b.y,a.x*b.y+a.y*b.x);
}
float length2(vec2 v){
return v.x*v.x+v.y*v.y;
}
vec2 map(vec2 pos){
return pos;
return vec2(pos.x * sqrt(1.-pos.y*pos.y*3.), pos.y * sqrt(1.-pos.x*pos.x*2.));
}
vec2 iterate(vec2 p, vec2 c){
vec2 p2 = cmult(p,p);
return p2 + c;
}
bool checkAbort(vec2 p, vec2 c){
return length2(p) > 400.;
}
float l2 = log(2.);
vec4 defaultColor ( void )
{
return vec4(0.35,0.35,0.35,1.0);
}
vec4 color(int iterations, vec2 p){
float col = .20 + (float(iterations) - log(log(length2(p)))/l2) / float(maxIter);
return defaultColor() * vec4(col);
}
void main( void ){
if (u_frame < 300)
{
vec2 c = map(getPos())*0.8 - vec2(0.5);
vec2 p = c + vec2(sin(-u_time), cos(u_time)) * 0.2;
float m;
for(int i = 0; i < maxIter ;i++) {
p = iterate(p,c);
if(checkAbort(p,c)){
gl_FragColor = color(i,p);
return;
}
}
gl_FragColor = defaultColor();
}
else
{
gl_FragColor = texture2D(u_texture, gl_FragCoord.xy / u_size.xy);
}
}`,
program: null,
attributes: {},
uniforms: {},
time: 0
}
}
},
mounted ()
{
this.setInitWebGlContext()
this.setInitShaderProgram()
this.setInitAttributes(['a_position'])
this.setInitUniforms(['u_size', 'u_time', 'u_frame', 'u_texture'])
this.setInitGeometryBuffer()
this.setRenderLoop()
},
beforeDestroy ()
{
window.cancelAnimationFrame(this.static.af)
},
computed:
{
...mapGetters([
'getCalcs'
])
},
methods:
{
setInitWebGlContext ()
{
this.static.gl = this.$el.getContext('webgl')
if (this.static.gl === null)
{
console.log('Unable to initialize WebGL. Your browser or machine may not support it.')
}
},
setInitShaderProgram ()
{
const gl = this.static.gl
this.static.shaders.program = gl.createProgram()
const vertexShader = gl.createShader(gl.VERTEX_SHADER)
const fragmentShader = gl.createShader(gl.FRAGMENT_SHADER)
gl.shaderSource(vertexShader, this.static.shaders.vertex)
gl.shaderSource(fragmentShader, this.static.shaders.fragment)
gl.compileShader(vertexShader)
gl.compileShader(fragmentShader)
gl.attachShader(this.static.shaders.program, vertexShader)
gl.attachShader(this.static.shaders.program, fragmentShader)
gl.linkProgram(this.static.shaders.program)
gl.useProgram(this.static.shaders.program)
},
setInitAttributes (keys)
{
const gl = this.static.gl
const program = this.static.shaders.program
for (let i = 0; i < keys.length; i++)
{
this.static.shaders.attributes[keys[i]] = gl.getAttribLocation(program, keys[i])
}
},
setInitUniforms (keys)
{
const gl = this.static.gl
const program = this.static.shaders.program
for (let i = 0; i < keys.length; i++)
{
this.static.shaders.uniforms[keys[i]] = gl.getUniformLocation(program, keys[i])
}
},
setInitGeometryBuffer ()
{
const gl = this.static.gl
const buffer = gl.createBuffer()
gl.bindBuffer(this.static.gl.ARRAY_BUFFER, buffer)
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([-1.0, -1.0, 1.0, -1.0, -1.0, 1.0, -1.0, 1.0, 1.0, -1.0, 1.0, 1.0]), gl.STATIC_DRAW)
},
setCreateTexture ()
{
const gl = this.static.gl
const width = this.getCalcs.vw
const height = this.getCalcs.vh
const texture = gl.createTexture()
gl.bindTexture(gl.TEXTURE_2D, texture)
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, width, height, 0, gl.RGBA, gl.UNSIGNED_BYTE, null)
return texture
},
setCreateFramebuffer ()
{
const gl = this.static.gl
const buffer = gl.createFramebuffer()
gl.bindFramebuffer(gl.FRAMEBUFFER, buffer)
const texture = this.setCreateTexture()
gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, texture, 0)
return {
texture: texture,
buffer: buffer
}
},
setRenderLoop ()
{
this.static.af = window.requestAnimationFrame(this.setRenderLoop)
const gl = this.static.gl
const fb = this.static.fb
const width = this.getCalcs.vw
const height = this.getCalcs.vh
const attributes = this.static.shaders.attributes
const uniforms = this.static.shaders.uniforms
const mouse = this.static.shaders.mouse
const fr = this.static.fr
this.$el.width = width
this.$el.height = height
const bufferA = this.setCreateFramebuffer()
gl.viewport(0, 0, width, height)
gl.clearColor(0.0, 0.0, 0.0, 0.0)
gl.clear(gl.COLOR_BUFFER_BIT)
gl.enableVertexAttribArray(attributes.a_position)
gl.vertexAttribPointer(attributes.a_position, 2, gl.FLOAT, false, 0, 0)
gl.uniform2f(uniforms.u_size, width, height)
gl.uniform1f(uniforms.u_time, window.performance.now() / 3000)
gl.uniform1i(uniforms.u_frame, fr)
gl.drawArrays(gl.TRIANGLES, 0, 6)
gl.bindTexture(gl.TEXTURE_2D, bufferA.texture)
gl.bindFramebuffer(gl.FRAMEBUFFER, null)
gl.viewport(0, 0, width, height)
gl.clearColor(0.0, 0.0, 0.0, 0.0)
gl.clear(gl.COLOR_BUFFER_BIT)
gl.enableVertexAttribArray(attributes.a_position)
gl.vertexAttribPointer(attributes.a_position, 2, gl.FLOAT, false, 0, 0)
gl.uniform2f(uniforms.u_size, width, height)
gl.uniform1f(uniforms.u_time, window.performance.now() / 3000)
gl.uniform1i(uniforms.u_frame, fr)
gl.uniform1i(uniforms.u_texture, 0)
gl.drawArrays(gl.TRIANGLES, 0, 6)
this.static.fr++
}
}
}
</script>
I'm now some steps further this code below is now working
<template lang='pug'>
canvas
</template>
<script>
import { mapGetters } from 'vuex'
import forEach from 'lodash/forEach'
export default {
name: 'webGl',
created ()
{
this.static = {
af: null,
gl: null,
fr: 0,
shaders:
{
noise:
{
vertex: `
attribute vec2 a_position;
void main() {
gl_Position = vec4(a_position, 0, 1);
}`,
fragment: `
#ifdef GL_ES
precision mediump float;
#endif
uniform vec2 u_size;
vec2 hash( vec2 p ) {
p = vec2( dot(p,vec2(127.1,311.7)),
dot(p,vec2(269.5,183.3)));
return -1.0 + 2.0 * fract(sin(p) * 43758.5453123);
}
float noise( vec2 p ) {
const float K1 = 0.366025404;
const float K2 = 0.211324865;
vec2 i = floor(p + (p.x + p.y) * K1);
vec2 a = p - i + (i.x + i.y) * K2;
vec2 o = step(a.yx, a.xy);
vec2 b = a - o + K2;
vec2 c = a - 1.0 + 2.0 * K2;
vec3 h = max(0.5 - vec3(dot(a,a), dot(b,b), dot(c,c)), 0.0);
vec3 n = h * h * h * h * vec3(dot(a, hash(i + 0.0)), dot(b, hash(i + o)), dot(c, hash(i + 1.0)));
return dot(n, vec3(70.0));
}
void main( void ) {
vec2 vUv = gl_FragCoord.xy / u_size.xy;
vec3 rnd = vec3(noise(16.0 * vUv + 1.1), noise(16.0 * vUv + 2.2), noise(16.0 * vUv + 3.3));
gl_FragColor = vec4(rnd, 1.0);
}`,
program: null,
attributes:
{
a_position: null
},
uniforms:
{
u_size: null
}
},
fluid:
{
vertex: `
attribute vec2 a_position;
void main() {
gl_Position = vec4(a_position, 0, 1);
}`,
fragment: `
#ifdef GL_ES
precision mediump float;
#endif
uniform vec2 u_size;
uniform sampler2D u_image;
vec2 normz(vec2 x) {
return x == vec2(0.0, 0.0) ? vec2(0.0, 0.0) : normalize(x);
}
vec3 advect(vec2 ab, vec2 vUv, vec2 step, float sc) {
vec2 aUv = vUv - ab * sc * step;
const float _G0 = 0.25; // center weight
const float _G1 = 0.125; // edge-neighbors
const float _G2 = 0.0625; // vertex-neighbors
// 3x3 neighborhood coordinates
float step_x = step.x;
float step_y = step.y;
vec2 n = vec2(0.0, step_y);
vec2 ne = vec2(step_x, step_y);
vec2 e = vec2(step_x, 0.0);
vec2 se = vec2(step_x, -step_y);
vec2 s = vec2(0.0, -step_y);
vec2 sw = vec2(-step_x, -step_y);
vec2 w = vec2(-step_x, 0.0);
vec2 nw = vec2(-step_x, step_y);
vec3 uv = texture2D(u_image, fract(aUv)).xyz;
vec3 uv_n = texture2D(u_image, fract(aUv+n)).xyz;
vec3 uv_e = texture2D(u_image, fract(aUv+e)).xyz;
vec3 uv_s = texture2D(u_image, fract(aUv+s)).xyz;
vec3 uv_w = texture2D(u_image, fract(aUv+w)).xyz;
vec3 uv_nw = texture2D(u_image, fract(aUv+nw)).xyz;
vec3 uv_sw = texture2D(u_image, fract(aUv+sw)).xyz;
vec3 uv_ne = texture2D(u_image, fract(aUv+ne)).xyz;
vec3 uv_se = texture2D(u_image, fract(aUv+se)).xyz;
return _G0*uv + _G1*(uv_n + uv_e + uv_w + uv_s) + _G2*(uv_nw + uv_sw + uv_ne + uv_se);
}
void main( void ) {
const float _K0 = -20.0/6.0; // center weight
const float _K1 = 4.0/6.0; // edge-neighbors
const float _K2 = 1.0/6.0; // vertex-neighbors
const float cs = -0.6; // curl scale
const float ls = 0.05; // laplacian scale
const float ps = -0.8; // laplacian of divergence scale
const float ds = -0.05; // divergence scale
const float dp = -0.04; // divergence update scale
const float pl = 0.3; // divergence smoothing
const float ad = 6.0; // advection distance scale
const float pwr = 1.0; // power when deriving rotation angle from curl
const float amp = 1.0; // self-amplification
const float upd = 0.8; // update smoothing
const float sq2 = 0.6; // diagonal weight
vec2 vUv = gl_FragCoord.xy / u_size.xy;
vec2 texel = 1. / u_size.xy;
float step_x = texel.x;
float step_y = texel.y;
vec2 n = vec2(0.0, step_y);
vec2 ne = vec2(step_x, step_y);
vec2 e = vec2(step_x, 0.0);
vec2 se = vec2(step_x, -step_y);
vec2 s = vec2(0.0, -step_y);
vec2 sw = vec2(-step_x, -step_y);
vec2 w = vec2(-step_x, 0.0);
vec2 nw = vec2(-step_x, step_y);
vec3 uv = texture2D(u_image, fract(vUv)).xyz;
vec3 uv_n = texture2D(u_image, fract(vUv+n)).xyz;
vec3 uv_e = texture2D(u_image, fract(vUv+e)).xyz;
vec3 uv_s = texture2D(u_image, fract(vUv+s)).xyz;
vec3 uv_w = texture2D(u_image, fract(vUv+w)).xyz;
vec3 uv_nw = texture2D(u_image, fract(vUv+nw)).xyz;
vec3 uv_sw = texture2D(u_image, fract(vUv+sw)).xyz;
vec3 uv_ne = texture2D(u_image, fract(vUv+ne)).xyz;
vec3 uv_se = texture2D(u_image, fract(vUv+se)).xyz;
vec3 lapl = _K0*uv + _K1*(uv_n + uv_e + uv_w + uv_s) + _K2*(uv_nw + uv_sw + uv_ne + uv_se);
float sp = ps * lapl.z;
float curl = uv_n.x - uv_s.x - uv_e.y + uv_w.y + sq2 * (uv_nw.x + uv_nw.y + uv_ne.x - uv_ne.y + uv_sw.y - uv_sw.x - uv_se.y - uv_se.x);
float sc = cs * sign(curl) * pow(abs(curl), pwr);
float div = uv_s.y - uv_n.y - uv_e.x + uv_w.x + sq2 * (uv_nw.x - uv_nw.y - uv_ne.x - uv_ne.y + uv_sw.x + uv_sw.y + uv_se.y - uv_se.x);
float sd = uv.z + dp * div + pl * lapl.z;
vec2 norm = normz(uv.xy);
vec3 ab = advect(vec2(uv.x, uv.y), vUv, texel, ad);
float ta = amp * ab.x + ls * lapl.x + norm.x * sp + uv.x * ds * sd;
float tb = amp * ab.y + ls * lapl.y + norm.y * sp + uv.y * ds * sd;
float a = ta * cos(sc) - tb * sin(sc);
float b = ta * sin(sc) + tb * cos(sc);
vec3 abd = upd * uv + (1.0 - upd) * vec3(a,b,sd);
abd.z = clamp(abd.z, -1.0, 1.0);
abd.xy = clamp(length(abd.xy) > 1.0 ? normz(abd.xy) : abd.xy, -1.0, 1.0);
gl_FragColor = vec4(abd, 0.0);
}`,
program: null,
attributes:
{
a_position: null
},
uniforms:
{
u_size: null
}
},
colorize:
{
vertex: `
attribute vec2 a_position;
void main() {
gl_Position = vec4(a_position, 0, 1);
}`,
fragment: `
#ifdef GL_ES
precision mediump float;
#endif
uniform vec2 u_size;
uniform sampler2D u_image;
void main( void ) {
vec2 texel = 1. / u_size.xy;
vec2 uv = gl_FragCoord.xy / u_size.xy;
vec3 c = texture2D(u_image, uv).xyz;
vec3 norm = normalize(c);
vec3 div = vec3(0.1) * norm.z;
vec3 rbcol = 0.5 + 0.6 * cross(norm.xyz, vec3(0.5, -0.4, 0.5));
gl_FragColor = vec4(rbcol + div, 1.0);
}`,
program: null,
attributes:
{
a_position: null
},
uniforms: {
u_size: null
}
}
},
textures:
{
default: null
}
}
},
mounted ()
{
this.setInitWebGlContext()
this.setInitGeometryBuffer()
this.setInitShaderPrograms()
this.setRenderLoop()
},
beforeDestroy ()
{
window.cancelAnimationFrame(this.static.af)
},
computed:
{
...mapGetters([
'getCalcs'
])
},
methods:
{
setInitWebGlContext ()
{
this.static.gl = this.$el.getContext('webgl')
if (this.static.gl === null)
{
console.log('Unable to initialize WebGL. Your browser or machine may not support it.')
}
},
setInitShaderPrograms ()
{
const gl = this.static.gl
forEach(this.static.shaders, shader =>
{
shader.program = gl.createProgram()
const vertexShader = gl.createShader(gl.VERTEX_SHADER)
const fragmentShader = gl.createShader(gl.FRAGMENT_SHADER)
gl.shaderSource(vertexShader, shader.vertex)
gl.shaderSource(fragmentShader, shader.fragment)
gl.compileShader(vertexShader)
gl.compileShader(fragmentShader)
gl.attachShader(shader.program, vertexShader)
gl.attachShader(shader.program, fragmentShader)
gl.linkProgram(shader.program)
})
},
setInitGeometryBuffer ()
{
const gl = this.static.gl
const buffer = gl.createBuffer()
gl.bindBuffer(this.static.gl.ARRAY_BUFFER, buffer)
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([-1.0, -1.0, 1.0, -1.0, -1.0, 1.0, -1.0, 1.0, 1.0, -1.0, 1.0, 1.0]), gl.STATIC_DRAW)
},
setDraw (width, height)
{
const gl = this.static.gl
gl.clearColor(0.0, 0.0, 0.0, 0.0)
gl.clear(gl.COLOR_BUFFER_BIT)
gl.viewport(0, 0, width, height)
gl.drawArrays(gl.TRIANGLES, 0, 6)
},
setProgram (shader)
{
const gl = this.static.gl
const program = this.static.shaders[shader].program
const attributes = this.static.shaders[shader].attributes
const uniforms = this.static.shaders[shader].uniforms
gl.useProgram(program)
forEach(attributes, (attribute, key) =>
{
attributes[key] = gl.getAttribLocation(program, key)
})
forEach(uniforms, (uniform, key) =>
{
uniforms[key] = gl.getUniformLocation(program, key)
})
},
setFrameBuffer (width, height)
{
const gl = this.static.gl
const texture = gl.createTexture()
gl.bindTexture(gl.TEXTURE_2D, texture)
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, width, height, 0, gl.RGBA, gl.UNSIGNED_BYTE, null)
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR)
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE)
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE)
const framebuffer = gl.createFramebuffer()
gl.bindFramebuffer(gl.FRAMEBUFFER, framebuffer)
gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, texture, 0)
return {
frameBuffer: framebuffer,
texture: texture
}
},
setNoise (width, height)
{
const gl = this.static.gl
const attributes = this.static.shaders.noise.attributes
const uniforms = this.static.shaders.noise.uniforms
gl.enableVertexAttribArray(attributes.a_position)
gl.vertexAttribPointer(attributes.a_position, 2, gl.FLOAT, false, 0, 0)
gl.uniform2f(uniforms.u_size, width, height)
},
setFluid (width, height)
{
const gl = this.static.gl
const attributes = this.static.shaders.fluid.attributes
const uniforms = this.static.shaders.fluid.uniforms
gl.enableVertexAttribArray(attributes.a_position)
gl.vertexAttribPointer(attributes.a_position, 2, gl.FLOAT, false, 0, 0)
gl.uniform2f(uniforms.u_size, width, height)
},
setColorize (width, height)
{
const gl = this.static.gl
const attributes = this.static.shaders.colorize.attributes
const uniforms = this.static.shaders.colorize.uniforms
gl.enableVertexAttribArray(attributes.a_position)
gl.vertexAttribPointer(attributes.a_position, 2, gl.FLOAT, false, 0, 0)
gl.uniform2f(uniforms.u_size, width, height)
},
setRenderLoop ()
{
this.static.af = window.requestAnimationFrame(this.setRenderLoop)
const gl = this.static.gl
const width = this.getCalcs.vw
const height = this.getCalcs.vh
this.$el.width = width
this.$el.height = height
if (!this.static.fr)
{
const noiseBuffer = this.setFrameBuffer(width, height)
this.setProgram('noise')
this.setNoise(width, height)
this.setDraw(width, height)
this.static.textures.default = noiseBuffer.texture
}
const fluidBuffer = this.setFrameBuffer(width, height)
gl.bindTexture(gl.TEXTURE_2D, this.static.textures.default)
this.setProgram('fluid')
this.setFluid(width, height)
this.setDraw(width, height)
this.static.textures.default = fluidBuffer.texture
gl.bindFramebuffer(gl.FRAMEBUFFER, null)
this.setProgram('colorize')
this.setColorize(width, height)
this.setDraw(width, height)
this.static.fr++
}
}
}
</script>
However i'm trying to adopt a shader from shadertoy https://www.shadertoy.com/view/XddSRX and if i rum my code it's behaving very differently.
The issue is exactly as stated in the error.
Source and destination textures of the draw are the same.
Looking at your code there is one shader, it references a texture, there is one texture, it's attached to the framebuffer AND it's bound to texture unit 0 the default. So, when you draw it's being used as both an input (u_texture) and as the output (the current framebuffer). That's not allowed.
The simple solution is you need another texture. Bind that texture when drawing to the framebuffer.
The better solution is you need 2 different shader programs. One for when drawing to the framebuffer that uses no texture as input and another for drawing to the canvas . As it is you have one shader that branches on u_frame. Remove that branch and separate things into 2 shader programs. The one that computes colors then u_frame < 300 and the one that use a texture. Use the computing one to draw to the framebuffer and the texture one to draw the framebuffer's texture to the canvas.
A few links that may or may not be helpful: drawing multiple things, render targets, image processing.

How do people pass functions to OpenGL ES GLSL functions?

I want to rotate and translate a 2d shape made with signed distance functions.
The docs say this is the method:
vec3 opTx( in vec3 p, in transform t, in sdf3d primitive )
{
return primitive( invert(t)*p );
}
It looks to me like primitive is some kind of function (or a struct) I can call, Is there a way to pass functions like that (or how does this make sense)?
Firstly I don't know what transform and sdf3d types are, and what is the invert function. Secondly how do I apply this to 2d?
const fShaderSource = `#version 300 es
precision mediump float;
uniform vec2 u_resolution;
out vec4 outColor;
float sdLine( in vec2 p, in vec2 a, in vec2 b )
{
vec2 pa = p-a, ba = b-a;
float h = clamp( dot(pa,ba)/dot(ba,ba), 0.0, 1.0 );
return length( pa - ba*h );
}
vec2 screenToWorld(vec2 screen) {
vec2 result = 2.0 * (screen/u_resolution.xy - 0.5);
result.x *= u_resolution.x/u_resolution.y;
return result;
}
void main() {
vec2 p = screenToWorld(gl_FragCoord.xy);
float sd = sdLine(p, vec2(0.0), vec2(0.0, 0.5));
vec3 col = vec3(0.0);
col += 1.0 - smoothstep(0.0, 0.04, abs(sd));
outColor = vec4(col, 1.0);
}
`;
const vShaderSource = `#version 300 es
precision mediump float;
in vec2 a_position;
uniform vec2 u_resolution;
void main() {
gl_Position = vec4(a_position, 0, 1);
}
`;
main(document.getElementById('app'));
function main(element) {
const canvas = document.createElement('canvas'),
gl = canvas.getContext('webgl2');
element.append(canvas);
const displayWidth = canvas.clientWidth,
displayHeight = canvas.clientHeight;
canvas.width = displayWidth;
canvas.height = displayHeight;
let graphics = new Graphics({width: displayWidth, height: displayHeight}, gl);
new Loop(() => {
graphics.render();
}).start();
}
function Graphics(state, gl) {
const { width, height } = state;
let vShader = createShader(gl, gl.VERTEX_SHADER, vShaderSource);
let fShader = createShader(gl, gl.FRAGMENT_SHADER, fShaderSource);
let program = createProgram(gl, vShader, fShader);
let posAttrLocation = gl.getAttribLocation(program, "a_position");
let posBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, posBuffer);
/*
(-1, 1).( 1, 1)
.
(-1,-1).( 1,-1)
*/
let positions = [
-1, 1,
-1, -1,
1, -1,
-1, 1,
1,-1,
1, 1
];
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(positions), gl.STATIC_DRAW);
let vao = gl.createVertexArray();
gl.bindVertexArray(vao);
gl.enableVertexAttribArray(posAttrLocation);
let size = 2,
type = gl.FLOAT,
normalize = false,
stride = 0,
offset = 0;
gl.vertexAttribPointer(posAttrLocation,
size,
type,
normalize,
stride,
offset);
let resUniformLocation = gl.getUniformLocation(program, "u_resolution");
gl.clearColor(0, 0, 0, 0);
this.render = () => {
gl.viewport(0, 0, gl.canvas.width, gl.canvas.height);
gl.clear(gl.COLOR_BUFFER_BIT);
gl.useProgram(program);
gl.uniform2f(resUniformLocation, gl.canvas.width, gl.canvas.height);
gl.bindVertexArray(vao);
gl.drawArrays(gl.TRIANGLES, 0, 6);
};
}
function createShader(gl, type, source) {
let shader = gl.createShader(type);
gl.shaderSource(shader, source);
gl.compileShader(shader);
let success = gl.getShaderParameter(shader, gl.COMPILE_STATUS);
if (success) {
return shader;
}
console.error(gl.getShaderInfoLog(shader));
gl.deleteShader(shader);
return null;
};
function createProgram(gl, vShader, fShader) {
let program = gl.createProgram();
gl.attachShader(program, vShader);
gl.attachShader(program, fShader);
gl.linkProgram(program);
let success = gl.getProgramParameter(program, gl.LINK_STATUS);
if (success) {
return program;
}
console.error(gl.getProgramInfoLog(program));
gl.deleteProgram(program);
return null;
}
// Loop Library
function Loop(fn) {
const perf = window.performance !== undefined ? window.performance : Date;
const now = () => perf.now();
const raf = window.requestAnimationFrame;
let running = false,
lastUpdate = now(),
frame = 0;
this.start = () => {
if (running) {
return this;
}
running = true;
lastUpdate = now();
frame = raf(tick);
return this;
};
this.stop = () => {
running = false;
if (frame != 0) {
raf.cancel(frame);
}
frame = 0;
return this;
};
const tick = () => {
frame = raf(tick);
const time = now();
const dt = time - lastUpdate;
fn(dt);
lastUpdate = time;
};
}
#app canvas {
position: fixed;
top: 50%;
bottom: 0;
left: 50%;
right: 0;
width: 100vmin;
height: 70vmin;
transform: translate(-50%, -25%);
image-rendering: optimizeSpeed;
cursor: none;
margin: auto;
}
<div id="app">
</div>
GLSL does not allow you to pass functions as parameters. The snippet you linked is more of a macro, where you are supposed to manually inline the primitive.
Just above the code you copy-pasted, the definition of transform is stated:
This code bellow assumes that transform encodes only a rotation and a translation (as a 3x4 matrix for example, or as a quaternion and a vector), and that it does not contain any scaling factors in it.
To work in 2D, you use 3x3 matrices, where the upper 2x2 matrix encodes a rotation and the two first columns of the bottom row encode a translation.
Putting it all together: (replace the mainImage function of https://www.shadertoy.com/view/MldcD7 with this)
void mainImage( out vec4 fragColor, in vec2 fragCoord )
{
vec2 p = (2.0*fragCoord-iResolution.xy)/iResolution.y;
p *= 1.1;
// iFrame is a uniform that shadertoy provides: the current frame number
float angle = float(iFrame) / 60.0;
// Rotation part: rotate by `angle`, or once every 60 fps.
// Translation part: Move across the screen left to right, every 60 fps.
mat3 transform = mat3(
cos(angle), sin(angle), 0.0,
-sin(angle),cos(angle), 0.0,
(float(iFrame % 60)/60.0 - 0.5) * 2.0, 0.0, 1.0
);
vec2 tri = vec2(0.3,-1.1); // width, height
// Here, we first apply the inverse transform to our input, then pass the resulting point to our primitive, here sdTriangleIsosceles
float d = sdTriangleIsosceles( tri, (inverse(transform) * vec3(p, 1.0)).xy );
vec3 col = vec3(1.0) - sign(d)*vec3(0.1,0.4,0.7);
col *= 1.0 - exp(-2.0*abs(d));
col *= 0.8 + 0.2*cos(140.0*d);
col = mix( col, vec3(1.0), 1.0-smoothstep(0.0,0.02,abs(d)) );
fragColor = vec4(col*1.2,1.0);
}

How to wrap space to make a black hole using uv textures in webgl

I render a grid texture. I want to manipulate uv coordinates(vQuadCoord) in fragment shader to make the black hole effect, that is make the gaps between lines go further as it approaches to the center. also with a circular effect
I think this would be possible, since if I do vQuadCoord = vQuadCoord * vQuadCoord it achieves a similar effect but in the corners.
const fShaderSource = `#version 300 es
precision mediump float;
out vec4 outColor;
uniform sampler2D u_texture;
in vec2 vQuadCoord;
void main() {
outColor = texture(u_texture, vQuadCoord);
}
`;
const vShaderSource = `#version 300 es
precision mediump float;
in vec2 a_position;
out vec2 vQuadCoord;
void main() {
vQuadCoord = (a_position + 1.0) / 2.0;
gl_Position = vec4(a_position, 0, 1);
}
`;
main(document.getElementById('app'));
function main(element) {
const canvas = document.createElement('canvas'),
gl = canvas.getContext('webgl2');
element.append(canvas);
const displayWidth = canvas.clientWidth,
displayHeight = canvas.clientHeight;
canvas.width = displayWidth;
canvas.height = displayHeight;
let graphics = new Graphics({width: displayWidth, height: displayHeight}, gl);
new Loop(() => {
graphics.render();
}).start();
}
function Graphics(state, gl) {
const { width, height } = state;
gl.clearColor(0, 0, 0, 0);
gl.blendFunc(gl.SRC_ALPHA, gl.ONE_MINUS_SRC_ALPHA);
//gl.blendFunc(gl.SRC_ALPHA, gl.ONE);
gl.enable(gl.BLEND);
gl.disable(gl.DEPTH_TEST);
let minibatch = [];
const redText = makeGlQuad(gl, fShaderSource, canvasTexture());
this.render = () => {
minibatch.push(redText);
gl.viewport(0, 0, gl.canvas.width, gl.canvas.height);
gl.clear(gl.COLOR_BUFFER_BIT);
minibatch.forEach(({
program,
resUniformLocation,
vao,
glTexture
}) => {
gl.useProgram(program);
gl.uniform2f(resUniformLocation, gl.canvas.width, gl.canvas.height);
if (glTexture) {
gl.activeTexture(gl.TEXTURE0);
gl.bindTexture(gl.TEXTURE_2D, glTexture);
}
gl.bindVertexArray(vao);
gl.drawArrays(gl.TRIANGLES, 0, 6);
});
minibatch = [];
};
}
function makeGlQuad(gl, fShaderSource, texture) {
let vShader = createShader(gl, gl.VERTEX_SHADER, vShaderSource);
let fShader = createShader(gl, gl.FRAGMENT_SHADER, fShaderSource);
let program = createProgram(gl, vShader, fShader);
let posAttrLocation = gl.getAttribLocation(program, "a_position");
let posBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, posBuffer);
let left = -1,
right = 1,
down = -1,
up = 1;
/*
(-1, 1).( 1, 1)
.
(-1,-1).( 1,-1)
*/
let positions = [
left, down,
left, up,
right, down,
left, up,
right, down,
right, up
];
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(positions), gl.STATIC_DRAW);
let vao = gl.createVertexArray();
gl.bindVertexArray(vao);
gl.enableVertexAttribArray(posAttrLocation);
let size = 2,
type = gl.FLOAT,
normalize = false,
stride = 0,
offset = 0;
gl.vertexAttribPointer(posAttrLocation,
size,
type,
normalize,
stride,
offset);
let glTexture;
if (texture) {
glTexture = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, glTexture);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, texture);
//gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, 1, 1, 0, gl.RGBA, gl.UNSIGNED_BYTE, new Uint8Array([0, 0, 255, 255]));
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
}
let resUniformLocation = gl.getUniformLocation(program, "u_resolution");
let texUniformLocation = gl.getUniformLocation(program, "u_texture");
return {
program,
resUniformLocation,
vao,
glTexture
}
}
function canvasTexture() {
return withCanvasTexture(256, 256, (w, h, canvas, ctx) => {
const gap = w * 0.07;
ctx.fillStyle = 'green';
ctx.fillRect(0, 0, 10, 10);
ctx.strokeStyle = 'red';
ctx.lineWidth = 1;
ctx.beginPath();
for (let i = 0; i < w; i+= gap) {
ctx.moveTo(i, 0);
ctx.lineTo(i, h);
}
for (let i = 0; i < h; i+= gap) {
ctx.moveTo(0, i);
ctx.lineTo(w, i);
}
ctx.stroke();
return canvas;
});
function withCanvasTexture(width, height, f) {
var canvas = document.createElement('canvas');
canvas.width = width;
canvas.height = height;
f(width, height, canvas, canvas.getContext('2d'));
const texture = canvas;
document.body.append(canvas);
return texture;
}
}
function createShader(gl, type, source) {
let shader = gl.createShader(type);
gl.shaderSource(shader, source);
gl.compileShader(shader);
let success = gl.getShaderParameter(shader, gl.COMPILE_STATUS);
if (success) {
return shader;
}
console.error(gl.getShaderInfoLog(shader));
gl.deleteShader(shader);
return null;
};
function createProgram(gl, vShader, fShader) {
let program = gl.createProgram();
gl.attachShader(program, vShader);
gl.attachShader(program, fShader);
gl.linkProgram(program);
let success = gl.getProgramParameter(program, gl.LINK_STATUS);
if (success) {
return program;
}
console.error(gl.getProgramInfoLog(program));
gl.deleteProgram(program);
return null;
}
// Loop Library
function Loop(fn) {
const perf = window.performance !== undefined ? window.performance : Date;
const now = () => perf.now();
const raf = window.requestAnimationFrame;
let running = false,
lastUpdate = now(),
frame = 0;
this.start = () => {
if (running) {
return this;
}
running = true;
lastUpdate = now();
frame = raf(tick);
return this;
};
this.stop = () => {
running = false;
if (frame != 0) {
raf.cancel(frame);
}
frame = 0;
return this;
};
const tick = () => {
frame = raf(tick);
const time = now();
const dt = time - lastUpdate;
fn(dt);
lastUpdate = time;
};
}
#app canvas {
background: #ccc;
position: fixed;
top: 50%;
bottom: 0;
left: 50%;
right: 0;
width: 100vmin;
height: 70vmin;
transform: translate(-50%, -25%);
image-rendering: optimizeSpeed;
cursor: none;
margin: auto;
}
<div id="app">
</div>
[...] in fragment shader to make the black hole effect, that is make the gaps between lines go further as it approaches to the center.
You've to do something like (1.0 - (1.0 - abs(x)) * (1.0 - abs(x))). x is a coordinate, where (0,0) is in the center of the texture.
Convert the texture coordinates from the range [0, 1] to the range [-1, 1]:
vec2 p = vQuadCoord * 2.0 - 1.0;
Calculate the "black hole effect" coordinate:
p = sign(p) * (1.0 - (1.0 - abs(p)) * (1.0 - abs(p)));
Convert back from the range [-1, 1] to [0, 1]:
vec2 uv = p * 0.5 + 0.5;
For a circular effect you've to multiply the normalized direction vector by a factor which depends on the square distance to the center or distance to the border:
p = normalize(p) * length(p) * length(p);
or
p = normalize(p) * (1.0 - (1.0 - length(p)) * (1.0 - length(p)))
Fragment shader:
precision mediump float;
out vec4 outColor;
uniform sampler2D u_texture;
in vec2 vQuadCoord;
void main() {
vec2 p = vQuadCoord * 2.0 - 1.0;
//p = sign(p) * (1.0 - (1.0 - abs(p)) * (1.0 - abs(p)));
//p = normalize(p) * (1.0 - (1.0 - length(p)) * (1.0 - length(p)));
p = normalize(p) * length(p) * length(p);
vec2 uv = p * 0.5 + 0.5;
outColor = texture(u_texture, uv);
}
To y-flip the texture you can set the UNPACK_FLIP_Y_WEBGL flag. See WebGL 2.0, 5.14.8 Texture objects:
gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, true);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, texture);

glDrawElements: Source and destination textures of the draw are the same

I'm busy transferring some code from OpenGL to WebGL2 (to do duel depth peeling) but I'm getting a warning in my console that I cannot make sense of and the output is just black.
I've gone through the process of drawing some of the buffers individually and discovered the warning only appears inside the loop for (var p = 1; p < numPasses; p++) when I do the geometry passes.
The shaders I based mine off also made extensive use of gl_NormalMatrix, gl_ModelViewMatrix, and gl_Vertex that I think could also be a result of the black output. I assume just replacing gl_ModelViewMatrix * gl_Vertex with uProjMatrix * uViewMatrix * uModelMatrix * vec4(inVertexPosition, 1.0); will yield the same result.
(function() {
var script = document.createElement("script");
script.onload = function() {
main();
};
script.src = "https://mdn.github.io/webgl-examples/tutorial/gl-matrix.js";
document.head.appendChild(script);
})();
var initShader, peelShader, blendShader, finalShader;
var accumTex0, accumTex1;
var backBlenderFBO, peelingSingleFBO;
var depthTex = [], frontBlenderTex = [], backTempTex = [], backBlenderTex = [];
var quadVAO;
var drawBuffers;
function main() {
const canvas = document.querySelector("#glcanvas");
const gl = canvas.getContext("webgl2", { alpha: false });
if (!gl) {
alert("Unable to initialize WebGL. Your browser or machine may not support it.");
return;
}
var ext = gl.getExtension("EXT_color_buffer_float");
if (!ext) { alert("Unable to initialize WebGL. Your browser or machine may not support it."); return; }
quadVAO = newMesh(gl);
drawBuffers = [gl.COLOR_ATTACHMENT0, gl.COLOR_ATTACHMENT1, gl.COLOR_ATTACHMENT2, gl.COLOR_ATTACHMENT3, gl.COLOR_ATTACHMENT4, gl.COLOR_ATTACHMENT5, gl.COLOR_ATTACHMENT6];
// Dual Peeling Render Targets
backBlenderTex = newTexture(gl, gl.TEXTURE_2D, gl.RGBA32F, 640, 480, gl.RGBA, gl.FLOAT, null);
backBlenderFBO = newFramebuffer(gl, [backBlenderTex]);
depthTex[0] = newTexture(gl, gl.TEXTURE_2D, gl.RG32F, 640, 480, gl.RG, gl.FLOAT, null);
frontBlenderTex[0] = newTexture(gl, gl.TEXTURE_2D, gl.RGBA32F, 640, 480, gl.RGBA, gl.FLOAT, null);
backTempTex[0] = newTexture(gl, gl.TEXTURE_2D, gl.RGBA32F, 640, 480, gl.RGBA, gl.FLOAT, null);
depthTex[1] = newTexture(gl, gl.TEXTURE_2D, gl.RG32F, 640, 480, gl.RG, gl.FLOAT, null);
frontBlenderTex[1] = newTexture(gl, gl.TEXTURE_2D, gl.RGBA32F, 640, 480, gl.RGBA, gl.FLOAT, null);
backTempTex[1] = newTexture(gl, gl.TEXTURE_2D, gl.RGBA32F, 640, 480, gl.RGBA, gl.FLOAT, null);
peelingSingleFBO = newFramebuffer(gl, [depthTex[0], frontBlenderTex[0], backTempTex[0], depthTex[1], frontBlenderTex[1], backTempTex[1], backBlenderTex]);
bindFramebuffer(gl, null);
initShader = newShader(gl, vsInitSource, fsInitSource);
peelShader = newShader(gl, vsPeelSource, fsPeelSource);
blendShader = newShader(gl, vsBlendSource, fsBlendSource);
finalShader = newShader(gl, vsFinalSource, fsFinalSource);
gl.disable(gl.CULL_FACE);
draw(gl);
}
// See below link to make sense of this function
// https://stackoverflow.com/questions/37381980/get-some-trounble-when-using-drawbuffers-in-webgl2
function getDrawBuffers(gl, ...idx) {
var buffers = [gl.NONE, gl.NONE, gl.NONE, gl.NONE, gl.NONE, gl.NONE, gl.NONE];
for (var i = 0; i < idx.length; i++) {
if (i == idx[i]) buffers[i] = drawBuffers[i];
}
return buffers;
}
function draw(gl) {
// setup MVP
const proj = mat4.create();
const cameraSize = 0.2;
mat4.ortho(proj, 0.0, 1.0, 0.0, 1.0, 0.0001, 10.0);
const view = mat4.create();
mat4.lookAt(view, [0, 0, 2], [0, 0, 0], [0, 1, 0]);
gl.disable(gl.DEPTH_TEST);
gl.enable(gl.BLEND);
bindFramebuffer(gl, peelingSingleFBO);
gl.drawBuffers(getDrawBuffers(gl, 1, 2));
gl.clearColor(0, 0, 0, 0);
gl.clear(gl.COLOR_BUFFER_BIT);
const maxDepth = 1;
gl.drawBuffers(getDrawBuffers(gl, 0));
gl.clearColor(-maxDepth, -maxDepth, 0, 0);
gl.clear(gl.COLOR_BUFFER_BIT);
gl.blendEquation(gl.MAX);
// init
// bindFramebuffer(gl, null); // to test with
// gl.drawBuffers([gl.BACK]); // to test with
gl.useProgram(initShader);
drawMesh(gl, initShader, proj, view, { x: 0.0, y: 0.0, z: 0.0 }, { r: 1.0, g: 0.0, b: 0.0, a: 1.0 });
gl.useProgram(null);
// return; // to test with
// peeling & blending
gl.drawBuffers(getDrawBuffers(gl, 6));
var backgroundColor = [1, 1, 1];
gl.clearColor(backgroundColor[0], backgroundColor[1], backgroundColor[2], 0);
gl.clear(gl.COLOR_BUFFER_BIT);
// return; // to test with
// for each pass
var numPasses = 4;
var currID = 0;
for (var p = 1; p < numPasses; p++) {
currID = p % 2;
var prevID = 1 - currID;
var bufID = currID * 3;
gl.drawBuffers(getDrawBuffers(gl, bufID + 1, bufID + 2));
gl.clearColor(0, 0, 0, 0);
gl.clear(gl.COLOR_BUFFER_BIT);
gl.drawBuffers(getDrawBuffers(gl, bufID));
gl.clearColor(-maxDepth, -maxDepth, 0, 0);
gl.clear(gl.COLOR_BUFFER_BIT);
// all three blending render targets
gl.drawBuffers(getDrawBuffers(gl, bufID, bufID + 1, bufID + 2));
gl.blendEquation(gl.MAX);
gl.useProgram(peelShader);
bindTexture(gl, gl.TEXTURE0, gl.TEXTURE_2D, depthTex[prevID]); // DepthBlenderTex
bindTexture(gl, gl.TEXTURE1, gl.TEXTURE_2D, frontBlenderTex[prevID]); // FrontBlenderTex
gl.uniform1f(gl.getUniformLocation(peelShader, "uAlpha"), 0.6);
drawMesh(gl, peelShader, proj, view, { x: 0.0, y: 0.0, z: 0.0 }, { r: 1.0, g: 0.0, b: 0.0, a: 1.0 });
gl.useProgram(null);
// alpha blend the back color
gl.drawBuffers(getDrawBuffers(gl, 6));
gl.blendEquation(gl.FUNC_ADD);
gl.blendFunc(gl.SRC_ALPHA, gl.ONE_MINUS_SRC_ALPHA);
gl.useProgram(blendShader);
bindTexture(gl, gl.TEXTURE0, gl.TEXTURE_2D, backTempTex[currID]); // TempTex
drawFullscreenQuad(gl);
gl.useProgram(null);
}
gl.disable(gl.BLEND);
// final pass
bindFramebuffer(gl, null);
gl.drawBuffers([gl.BACK]);
gl.useProgram(finalShader);
bindTexture(gl, gl.TEXTURE0, gl.TEXTURE_2D, depthTex[currID]); // DepthBlenderTex
bindTexture(gl, gl.TEXTURE1, gl.TEXTURE_2D, frontBlenderTex[currID]); // FrontBlenderTex
bindTexture(gl, gl.TEXTURE2, gl.TEXTURE_2D, backBlenderTex); // BackBlenderTex
drawFullscreenQuad(gl);
gl.useProgram(null);
}
function newShader(gl, vsSource, fsSource) {
const vertexShader = loadSource(gl, gl.VERTEX_SHADER, vsSource);
const fragmentShader = loadSource(gl, gl.FRAGMENT_SHADER, fsSource);
const shaderProgram = gl.createProgram();
gl.attachShader(shaderProgram, vertexShader);
gl.attachShader(shaderProgram, fragmentShader);
gl.linkProgram(shaderProgram);
if (!gl.getProgramParameter(shaderProgram, gl.LINK_STATUS)) {
alert("Unable to initialize the shader program: " + gl.getProgramInfoLog(shaderProgram));
return null;
}
return shaderProgram;
}
function loadSource(gl, type, source) {
const shader = gl.createShader(type);
gl.shaderSource(shader, source);
gl.compileShader(shader);
if (!gl.getShaderParameter(shader, gl.COMPILE_STATUS)) {
alert("An error occurred compiling the shaders: " + gl.getShaderInfoLog(shader));
gl.deleteShader(shader);
return null;
}
return shader;
}
function newMesh(gl) {
var vertices = [1.0, 1.0, 1.0, -1.0, -1.0, -1.0, -1.0, 1.0];
var indicies = [0, 1, 3, 1, 2, 3];
const vao = gl.createVertexArray();
gl.bindVertexArray(vao);
const vb = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, vb);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(vertices), gl.STATIC_DRAW);
const eb = gl.createBuffer();
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, eb);
gl.bufferData(
gl.ELEMENT_ARRAY_BUFFER,
new Uint16Array(indicies),
gl.STATIC_DRAW
);
gl.vertexAttribPointer(0, 2, gl.FLOAT, false, 2 * 4, 0);
gl.enableVertexAttribArray(null);
gl.bindVertexArray(null);
return vao;
}
function drawFullscreenQuad(gl) {
gl.bindVertexArray(quadVAO);
gl.drawElements(gl.TRIANGLES, 6, gl.UNSIGNED_SHORT, 0);
gl.bindVertexArray(null);
}
function drawMesh(gl, prog, proj, view, pos, col) {
gl.uniformMatrix4fv(gl.getUniformLocation(prog, "uProjMatrix"), false, proj);
gl.uniformMatrix4fv(gl.getUniformLocation(prog, "uViewMatrix"), false, view);
gl.bindVertexArray(quadVAO);
const model = mat4.create();
var trans = vec3.create();
vec3.set(trans, pos.x, pos.y, pos.z);
mat4.translate(model, model, trans);
gl.uniform4fv(gl.getUniformLocation(prog, "uColor"), [col.r, col.g, col.b, col.a]);
gl.uniformMatrix4fv(gl.getUniformLocation(prog, "uModelMatrix"), false, model);
gl.drawElements(gl.TRIANGLES, 6, gl.UNSIGNED_SHORT, 0);
gl.bindVertexArray(null);
}
function newTexture(gl, target, internalFormat, height, width, format, type, pixels) {
var tid = gl.createTexture();
gl.bindTexture(target, tid);
gl.texParameteri(target, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(target, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texParameteri(target, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
gl.texParameteri(target, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
gl.texImage2D(target, 0, internalFormat, width, height, 0, format, type, pixels);
return tid;
}
function bindTexture(gl, idx, target, id) {
gl.activeTexture(idx);
gl.bindTexture(target, id);
// wait should I be doing glUniforml1(id, ...) here?
}
function newFramebuffer(gl, colorAttachments) {
var fib = gl.createFramebuffer();
gl.bindFramebuffer(gl.FRAMEBUFFER, fib);
for (var i = 0; i < colorAttachments.length; i++) {
gl.framebufferTexture2D(
gl.FRAMEBUFFER,
drawBuffers[i],
gl.TEXTURE_2D,
colorAttachments[i],
0
);
}
if (gl.checkFramebufferStatus(gl.FRAMEBUFFER) !== gl.FRAMEBUFFER_COMPLETE) {
alert(gl.checkFramebufferStatus(gl.FRAMEBUFFER).toString(16));
}
return fib;
}
function bindFramebuffer(gl, fib) {
gl.bindFramebuffer(gl.FRAMEBUFFER, fib);
}
const vsInitSource = `#version 300 es
layout(location=0) in vec3 inVertexPosition;
uniform mat4 uModelMatrix;
uniform mat4 uViewMatrix;
uniform mat4 uProjMatrix;
void main(void) {
gl_Position = uProjMatrix * uViewMatrix * uModelMatrix * vec4(inVertexPosition, 1.0);
}`;
const fsInitSource = `#version 300 es
precision mediump float;
layout(location=0) out vec2 outColor;
void main(void) {
// This seems very important because it is based on the near/far values
// What is the correct value I can expect here?
outColor.xy = vec2(-gl_FragCoord.z, gl_FragCoord.z);
}`;
const vsPeelSource = `#version 300 es
layout(location=0) in vec3 inVertexPosition;
uniform mat4 uModelMatrix;
uniform mat4 uViewMatrix;
uniform mat4 uProjMatrix;
// I believe this is what gives the model the green and white stripes
// Not required?
// vec3 ShadeVertex() {
// float diffuse = abs(normalize(gl_NormalMatrix * gl_Normal).z);
// return vec3(gl_Vertex.xy, diffuse);
// }
void main(void) {
gl_Position = uProjMatrix * uViewMatrix * uModelMatrix * vec4(inVertexPosition, 1.0);
//gl_TexCoord[0].xyz = ShadeVertex();
}`;
const fsPeelSource = `#version 300 es
precision mediump float;
uniform float uAlpha;
#define COLOR_FREQ 30.0
#define ALPHA_FREQ 30.0
vec4 ShadeFragment() {
vec4 color;
color.rgb = vec3(.4,.85,.0);
color.a = uAlpha;
return color;
}
uniform sampler2D DepthBlenderTex;
uniform sampler2D FrontBlenderTex;
#define MAX_DEPTH 1.0
layout(location=0) out vec4 outFragData0;
layout(location=1) out vec4 outFragData1;
layout(location=2) out vec4 outFragData2;
void main(void) {
// window-space depth interpolated linearly in screen space
float fragDepth = gl_FragCoord.z;
vec2 depthBlender = texture(DepthBlenderTex, gl_FragCoord.xy).xy;
vec4 forwardTemp = texture(FrontBlenderTex, gl_FragCoord.xy);
// Depths and 1.0-alphaMult always increase
// so we can use pass-through by default with MAX blending
outFragData0.xy = depthBlender;
// Front colors always increase (DST += SRC*ALPHA_MULT)
// so we can use pass-through by default with MAX blending
outFragData1 = forwardTemp;
// Because over blending makes color increase or decrease,
// we cannot pass-through by default.
// Each pass, only one fragment writes a color greater than 0
outFragData2 = vec4(0.0);
float nearestDepth = -depthBlender.x;
float farthestDepth = depthBlender.y;
float alphaMultiplier = 1.0 - forwardTemp.w;
if (fragDepth < nearestDepth || fragDepth > farthestDepth) {
// Skip this depth in the peeling algorithm
outFragData0.xy = vec2(-MAX_DEPTH);
return;
}
if (fragDepth > nearestDepth && fragDepth < farthestDepth) {
// This fragment needs to be peeled again
outFragData0.xy = vec2(-fragDepth, fragDepth);
return;
}
// If we made it here, this fragment is on the peeled layer from last pass
// therefore, we need to shade it, and make sure it is not peeled any farther
vec4 color = ShadeFragment();
outFragData0.xy = vec2(-MAX_DEPTH);
if (fragDepth == nearestDepth) {
outFragData1.xyz += color.rgb * color.a * alphaMultiplier;
outFragData1.w = 1.0 - alphaMultiplier * (1.0 - color.a);
} else {
outFragData2 += color;
}
}`;
const vsBlendSource = `#version 300 es
layout(location=0) in vec3 inVertexPosition;
uniform mat4 uModelMatrix;
uniform mat4 uViewMatrix;
uniform mat4 uProjMatrix;
void main(void) {
gl_Position = uProjMatrix * uViewMatrix * uModelMatrix * vec4(inVertexPosition, 1.0);
}`;
const fsBlendSource = `#version 300 es
precision mediump float;
uniform sampler2D TempTex;
layout(location=0) out vec4 outColor;
void main(void) {
outColor = texture(TempTex, gl_FragCoord.xy);
// for occlusion query
if (outColor.a == 0.0) discard;
}`;
const vsFinalSource = `#version 300 es
layout(location=0) in vec3 inVertexPosition;
uniform mat4 uModelMatrix;
uniform mat4 uViewMatrix;
uniform mat4 uProjMatrix;
void main(void) {
gl_Position = uProjMatrix * uViewMatrix * uModelMatrix * vec4(inVertexPosition, 1.0);
}`;
const fsFinalSource = `#version 300 es
precision mediump float;
uniform sampler2D DepthBlenderTex;
uniform sampler2D FrontBlenderTex;
uniform sampler2D BackBlenderTex;
layout(location=0) out vec4 outColor;
void main(void)
{
vec4 frontColor = texture(FrontBlenderTex, gl_FragCoord.xy);
vec3 backColor = texture(BackBlenderTex, gl_FragCoord.xy).rgb;
float alphaMultiplier = 1.0 - frontColor.w;
// front + back
outColor.rgb = frontColor.rgb + backColor * alphaMultiplier;
// front blender
// outColor.rgb = frontColor.rgb + vec3(alphaMultiplier);
// back blender
// outColor.rgb = backColor;
}`;
<canvas id="glcanvas" width="640" height="480"></canvas>
Using the same Texture for reading and writing in the Framebuffer is not alowed GLSL (see Point 6.27 at https://www.khronos.org/registry/webgl/specs/latest/1.0/#6.26)
But you can create two FrameBuffers with seperate textures and swap then every Frame. So you can read from the last rendered and write to the second Texture and when done simply swap it.

Categories