The Fastest Way to Batch Calls in WebGL - javascript

I'm trying to rewrite my canvas-based rendering for my 2d game engine. I've made good progress and can render textures to the webgl context fine, complete with scaling, rotation and blending. But my performance sucks. On my test laptop, I can get 30 fps in vanilla 2d canvas with 1,000 entities on screen at once; in WebGL, I get 30 fps with 500 entities on screen. I'd expect the situation to be reverse!
I have a sneaking suspicion that the culprit is all this Float32Array buffer garbage I'm tossing around. Here's my render code:
// boilerplate code and obj coordinates
// grab gl context
var canvas = sys.canvas;
var gl = sys.webgl;
var program = sys.glProgram;
// width and height
var scale = sys.scale;
var tileWidthScaled = Math.floor(tileWidth * scale);
var tileHeightScaled = Math.floor(tileHeight * scale);
var normalizedWidth = tileWidthScaled / this.width;
var normalizedHeight = tileHeightScaled / this.height;
var worldX = targetX * scale;
var worldY = targetY * scale;
this.bindGLBuffer(gl, this.vertexBuffer, sys.glWorldLocation);
this.bufferGLRectangle(gl, worldX, worldY, tileWidthScaled, tileHeightScaled);
gl.activeTexture(gl.TEXTURE0);
gl.bindTexture(gl.TEXTURE_2D, this.texture);
var frameX = (Math.floor(tile * tileWidth) % this.width) * scale;
var frameY = (Math.floor(tile * tileWidth / this.width) * tileHeight) * scale;
// fragment (texture) shader
this.bindGLBuffer(gl, this.textureBuffer, sys.glTextureLocation);
this.bufferGLRectangle(gl, frameX, frameY, normalizedWidth, normalizedHeight);
gl.drawArrays(gl.TRIANGLES, 0, 6);
bufferGLRectangle: function (gl, x, y, width, height) {
var left = x;
var right = left + width;
var top = y;
var bottom = top + height;
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([
left, top,
right, top,
left, bottom,
left, bottom,
right, top,
right, bottom
]), gl.STATIC_DRAW);
},
bindGLBuffer: function (gl, buffer, location) {
gl.bindBuffer(gl.ARRAY_BUFFER, buffer);
gl.vertexAttribPointer(location, 2, gl.FLOAT, false, 0, 0);
},
And here's my simple test shaders (these are missing blending, scaling & rotation):
// fragment (texture) shader
precision mediump float;
uniform sampler2D image;
varying vec2 texturePosition;
void main() {
gl_FragColor = texture2D(image, texturePosition);
}
// vertex shader
attribute vec2 worldPosition;
attribute vec2 vertexPosition;
uniform vec2 canvasResolution;
varying vec2 texturePosition;
void main() {
vec2 zeroToOne = worldPosition / canvasResolution;
vec2 zeroToTwo = zeroToOne * 2.0;
vec2 clipSpace = zeroToTwo - 1.0;
gl_Position = vec4(clipSpace * vec2(1, -1), 0, 1);
texturePosition = vertexPosition;
}
Any ideas on how to get better performance? Is there a way to batch my drawArrays? Is there a way to cut down on the buffer garbage?
Thanks!

There's two big issues I can see here that will adversely affect your performance.
You're creating a lot of temporary Float32Arrays, which are currently expensive to construct (That should get better in the future). It would be far better in this case to create a single array and set the vertices each time like so:
verts[0] = left; verts[1] = top;
verts[2] = right; verts[3] = top;
// etc...
gl.bufferData(gl.ARRAY_BUFFER, verts, gl.STATIC_DRAW);
The bigger issue by far, however, is that you're only drawing a single quad at a time. 3D APIs simply aren't designed to do this efficiently. What you want to do is try and squeeze as many triangles as possible into each drawArrays/drawElements call you make.
There's several ways to do that, the most straightforward being to fill up a buffer with as many quads as you can that share the same texture, then draw them all in one go. In psuedocode:
var MAX_QUADS_PER_BATCH = 100;
var VERTS_PER_QUAD = 6;
var FLOATS_PER_VERT = 2;
var verts = new Float32Array(MAX_QUADS_PER_BATCH * VERTS_PER_QUAD * FLOATS_PER_VERT);
var quadCount = 0;
function addQuad(left, top, bottom, right) {
var offset = quadCount * VERTS_PER_QUAD * FLOATS_PER_VERT;
verts[offset] = left; verts[offset+1] = top;
verts[offset+2] = right; verts[offset+3] = top;
// etc...
quadCount++;
if(quadCount == MAX_QUADS_PER_BATCH) {
flushQuads();
}
}
function flushQuads() {
gl.bindBuffer(gl.ARRAY_BUFFER, vertsBuffer);
gl.bufferData(gl.ARRAY_BUFFER, verts, gl.STATIC_DRAW); // Copy the buffer we've been building to the GPU.
// Make sure vertexAttribPointers are set, etc...
gl.drawArrays(gl.TRIANGLES, 0, quadCount + VERTS_PER_QUAD);
}
// In your render loop
for(sprite in spriteTypes) {
gl.bindTexture(gl.TEXTURE_2D, sprite.texture);
for(instance in sprite.instances) {
addQuad(instance.left, instance.top, instance.right, instance.bottom);
}
flushQuads();
}
That's an oversimplification, and there's ways to batch even more, but hopefully that gives you an idea of how to start batching your calls for better performance.

If you use WebGL Inspector you'll see in the trace if you do any unnecessary GL instructions (they're marked with bright yellow background). This might give you an idea on how to optimize your rendering.
Generally speaking, sort your draw calls so all using the same program, then attributes, then textures and then uniforms are done in order. This way you'll have as few GL instructions (and JS instructions) as possible.

Related

3D Object Faces Disappearing in p5.js

I am trying to make a 3D box with a pattern on each side using the following code but when viewed from certain angles, the back faces disappear when looking through the transparent parts of the forward faces. I was also wondering if it is possible to have a different pattern on each face? Many thanks in advance!
let r = 10
let a = 0
let c = 20
let angle = 0
let art
function setup() {
createCanvas(windowWidth, windowHeight, WEBGL);
art = createGraphics(800, 800)
}
function draw() {
background(0);
let x = r + c * cos(a)
let y = r + c * sin(a)
art.fill(r, a, c)
art.ellipse(x + 400, y + 400, 10, 10)
c += 0.2
a += 1.8
push()
texture(art)
rotateX(angle)
rotateY(angle)
rotateZ(angle)
box(400)
angle += 0.0003
pop()
orbitControl();
}
html, body { margin: 0; overflow: hidden; }
<script src="https://cdnjs.cloudflare.com/ajax/libs/p5.js/1.4.0/p5.js"></script>
This happens because in in WebGL, once a pixels is drawn, regardless of the level of level of transparency of that pixel, if another triangle would draw to that same pixel, but at a further depth, it is discarded (I think the alpha information from the original pixel(s) may no longer be available). In order for transparency to work properly in WebGL it is necessary to draw all triangles in depth order (furthest from the camera first). And even then if two triangles intersect there will still be problems.
In your case because you have many pixels that are completely transparent and others that are completely opaque there is another solution: a custom fragment shader that discards pixels if the texture alpha is below some threshold.
const vert = `
uniform mat4 uModelViewMatrix;
uniform mat4 uProjectionMatrix;
attribute vec3 aPosition;
attribute vec2 aTexCoord;
varying vec2 vTexCoord;
void main() {
vTexCoord = aTexCoord;
vec4 viewModelPosition = uModelViewMatrix * vec4(aPosition, 1.0);
gl_Position = uProjectionMatrix * viewModelPosition;
}`;
const frag = `
precision mediump float;
// ranges from 0..1
varying vec2 vTexCoord;
uniform sampler2D uSampler;
void main() {
vec4 tex = texture2D(uSampler, vTexCoord);
if (tex.a < 0.05) {
discard;
}
gl_FragColor = tex;
}`;
let r = 10
let a = 0
let c = 20
let angle = 0
let art
let discardShader;
function setup() {
createCanvas(windowWidth, windowHeight, WEBGL);
art = createGraphics(800, 800)
discardShader = createShader(vert, frag)
textureMode(NORMAL)
}
function draw() {
background(0);
let x = r + c * cos(a)
let y = r + c * sin(a)
art.fill(r, a, c)
art.ellipse(x + 400, y + 400, 10, 10)
c += 0.2
a += 1.8
push()
noStroke()
texture(art)
shader(discardShader)
rotateX(angle)
rotateY(angle)
rotateZ(angle)
box(400)
angle += 0.0003
pop()
orbitControl();
}
html,
body {
margin: 0;
overflow: hidden;
}
<script src="https://cdnjs.cloudflare.com/ajax/libs/p5.js/1.4.1/p5.js"></script>
Note #1: It is important that you use p5.js v1.4.1 for this to work because prior to that there was a bug that prevented user shaders from working with textures.
Note #2: If your texture had partial opacity then this would not work and instead you would want to render each plane of the box separately and in the correct order (farthest from the camera first).

Is passing just the length of the sides of a triangle, sufficient to draw a triangle in WebGL?

The question may not be clear, but I am gonna clear it here. Let's consider an array which have th co-ordinates of a triangle in clip space :
var coor = [
-0.4, 0.6,
0.0, -0.5,
-0.5, 0.0,
0.5, -0.5,
0.0
]
So, the coor array will help us to draw a triangle using WebGL. But instead of it I want to go something like this :
var coor2 = [ 100, 100, 100 ]
In the coor2 I gave the measure of sides to draw a triangle. It will be an equilateral trinagle. So, can I do something such that I enter the three sides of a triangle and a script converts them to co-ordinate which can be cliped in the clip space and can be read by WebGL ?
Thanks In Advance
WebGL only cares that you set gl_Position to clip space values. It doesn't care how you do it. It's up to you to decide how to do that. Inside the vertex shader, at least in WebGL1, things are stateless. Meaning when you draw a triangle your vertex shader will be called 3 times. It needs to set gl_Position to 3 different values in clipspace with no state between calls. So given 100 three times, how would you compute a different value each time your vertex shader is called?
Just imagine it in JavaScript
const gl_Position1 = vertexShader(100);
const gl_Position2 = vertexShader(100);
const gl_Position3 = vertexShader(100);
How is the function vertexShader supposed to produce 3 different values with no other input? Unless there is some other state it can't produce a different value. vertex shaders in WebGL1 don't have any other state.
You need to pass in some data that changes every iteration.
Of course you if the values changed then you could produce a triangle. Example
function vertexShader(v) {
const angle = v / 3 * Math.PI * 2;
return [Math.cos(angle), Math.sin(angle), 0, 1];
}
const gl_Position1 = vertexShader(0);
const gl_Position2 = vertexShader(1);
const gl_Position3 = vertexShader(2);
Would produce a triangle. Converting that to GLSL and WebGL
const gl = document.querySelector('canvas').getContext('webgl');
const vs = `
attribute float v;
#define PI radians(180.0)
void main() {
float angle = v / 3.0 * PI * 2.0;
gl_Position = vec4(cos(angle), sin(angle), 0, 1);
}
`;
const fs = `
precision mediump float;
void main() {
gl_FragColor = vec4(1, 0, 0, 1);
}
`;
const program = twgl.createProgram(gl, [vs, fs]);
const vLocation = gl.getAttribLocation(program, 'v');
const buf = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, buf);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([0, 1, 2]), gl.STATIC_DRAW);
gl.enableVertexAttribArray(vLocation);
gl.vertexAttribPointer(vLocation, 1, gl.FLOAT, false, 0, 0);
gl.useProgram(program);
gl.drawArrays(gl.TRIANGLES, 0, 3);
canvas { border: 1px solid black; }
<script src="https://twgljs.org/dist/4.x/twgl.min.js"></script>
<canvas></canvas>
Of course given we only passed in [0, 1, 2] we can't easily specifiy a position but the point is at least now that we have a value that changes we can produce a triangle which is just to re-iterate WebGL only cares that you set gl_Position to clip space values. It doesn't care how you do it.
See this article for more

Three.js: Passing array of textures to shaderMaterial

Short question: How can I pass a list of textures to shaders and access the nth texture within a fragment shader (where n is a value passed as a varying from the vertex shader)?
Longer question: I'm working on a Three.js scene that represents multiple images. Each image uses one of multiple textures, and each texture is an atlas containing several thumbnails. I'm working on implementing custom shaderMaterial to optimize performance, but am confused on how to use multiple textures in the shaders.
My goal is to pass a list of textures and a number that represents the number of vertices per texture so that I can identify the texture that should be used for each image's vertices/pixels. I thought I could accomplish this by passing the following data:
// Create a texture loader so we can load our image file
var loader = new THREE.TextureLoader();
// specify the url to the texture
var catUrl = 'https://s3.amazonaws.com/duhaime/blog/tsne-webgl/assets/cat.jpg';
var dogUrl = 'https://s3.amazonaws.com/duhaime/blog/tsne-webgl/assets/dog.jpg';
var material = new THREE.ShaderMaterial({
uniforms: {
verticesPerTexture: new Float32Array([4.0]), // count of vertices per texture
textures: {
type: 'tv', // type for texture array
value: [loader.load(catUrl), loader.load(dogUrl)],
}
},
vertexShader: document.getElementById('vertex-shader').textContent,
fragmentShader: document.getElementById('fragment-shader').textContent
});
However, if I do this, the vertex shader can't seem to use the uniforms to tell the fragment shader which texture it should use, as vertex shaders evidently can't pass sampler2d objects as varyings to the fragment shader. How can I pass a list of textures to the shaders?
Full code (which doesn't successfully pass a list of textures):
/**
* Generate a scene object with a background color
**/
function getScene() {
var scene = new THREE.Scene();
scene.background = new THREE.Color(0xffffff);
return scene;
}
/**
* Generate the camera to be used in the scene. Camera args:
* [0] field of view: identifies the portion of the scene
* visible at any time (in degrees)
* [1] aspect ratio: identifies the aspect ratio of the
* scene in width/height
* [2] near clipping plane: objects closer than the near
* clipping plane are culled from the scene
* [3] far clipping plane: objects farther than the far
* clipping plane are culled from the scene
**/
function getCamera() {
var aspectRatio = window.innerWidth / window.innerHeight;
var camera = new THREE.PerspectiveCamera(75, aspectRatio, 0.1, 1000);
camera.position.set(0, 1, 10);
return camera;
}
/**
* Generate the renderer to be used in the scene
**/
function getRenderer() {
// Create the canvas with a renderer
var renderer = new THREE.WebGLRenderer({antialias: true});
// Add support for retina displays
renderer.setPixelRatio(window.devicePixelRatio);
// Specify the size of the canvas
renderer.setSize(window.innerWidth, window.innerHeight);
// Add the canvas to the DOM
document.body.appendChild(renderer.domElement);
return renderer;
}
/**
* Generate the controls to be used in the scene
* #param {obj} camera: the three.js camera for the scene
* #param {obj} renderer: the three.js renderer for the scene
**/
function getControls(camera, renderer) {
var controls = new THREE.TrackballControls(camera, renderer.domElement);
controls.zoomSpeed = 0.4;
controls.panSpeed = 0.4;
return controls;
}
/**
* Load image
**/
function loadImage() {
var geometry = new THREE.BufferGeometry();
/*
Now we need to push some vertices into that geometry to identify the coordinates the geometry should cover
*/
// Identify the image size
var imageSize = {width: 10, height: 7.5};
// Identify the x, y, z coords where the image should be placed
var coords = {x: -5, y: -3.75, z: 0};
// Add one vertex for each corner of the image, using the
// following order: lower left, lower right, upper right, upper left
var vertices = new Float32Array([
coords.x, coords.y, coords.z, // bottom left
coords.x+imageSize.width, coords.y, coords.z, // bottom right
coords.x+imageSize.width, coords.y+imageSize.height, coords.z, // upper right
coords.x, coords.y+imageSize.height, coords.z, // upper left
])
// set the uvs for this box; these identify the following corners:
// lower-left, lower-right, upper-right, upper-left
var uvs = new Float32Array([
0.0, 0.0,
1.0, 0.0,
1.0, 1.0,
0.0, 1.0,
])
// store the texture index of each object to be rendered
var textureIndices = new Float32Array([0.0, 0.0, 0.0, 0.0]);
// indices = sequence of index positions in `vertices` to use as vertices
// we make two triangles but only use 4 distinct vertices in the object
// the second argument to THREE.BufferAttribute is the number of elements
// in the first argument per vertex
geometry.setIndex([0,1,2, 2,3,0])
geometry.addAttribute('position', new THREE.BufferAttribute(vertices, 3));
geometry.addAttribute('uv', new THREE.BufferAttribute(uvs, 2));
// Create a texture loader so we can load our image file
var loader = new THREE.TextureLoader();
// specify the url to the texture
var catUrl = 'https://s3.amazonaws.com/duhaime/blog/tsne-webgl/assets/cat.jpg';
var dogUrl = 'https://s3.amazonaws.com/duhaime/blog/tsne-webgl/assets/dog.jpg';
// specify custom uniforms and attributes for shaders
// Uniform types: https://github.com/mrdoob/three.js/wiki/Uniforms-types
var material = new THREE.ShaderMaterial({
uniforms: {
verticesPerTexture: new Float32Array([4.0]), // store the count of vertices per texture
cat_texture: {
type: 't',
value: loader.load(catUrl),
},
dog_texture: {
type: 't',
value: loader.load(dogUrl),
},
textures: {
type: 'tv', // type for texture array
value: [loader.load(catUrl), loader.load(dogUrl)],
}
},
vertexShader: document.getElementById('vertex-shader').textContent,
fragmentShader: document.getElementById('fragment-shader').textContent
});
// Combine our image geometry and material into a mesh
var mesh = new THREE.Mesh(geometry, material);
// Set the position of the image mesh in the x,y,z dimensions
mesh.position.set(0,0,0)
// Add the image to the scene
scene.add(mesh);
}
/**
* Render!
**/
function render() {
requestAnimationFrame(render);
renderer.render(scene, camera);
controls.update();
};
var scene = getScene();
var camera = getCamera();
var renderer = getRenderer();
var controls = getControls(camera, renderer);
loadImage();
render();
html, body { width: 100%; height: 100%; background: #000; }
body { margin: 0; overflow: hidden; }
canvas { width: 100%; height: 100%; }
<script src='https://cdnjs.cloudflare.com/ajax/libs/three.js/92/three.min.js'></script>
<script src='https://threejs.org/examples/js/controls/TrackballControls.js'></script>
<script type='x-shader/x-vertex' id='vertex-shader'>
/**
* The vertex shader's main() function must define `gl_Position`,
* which describes the position of each vertex in the space.
*
* To do so, we can use the following variables defined by Three.js:
*
* uniform mat4 modelViewMatrix - combines:
* model matrix: maps a point's local coordinate space into world space
* view matrix: maps world space into camera space
*
* uniform mat4 projectionMatrix - maps camera space into screen space
*
* attribute vec3 position - sets the position of each vertex
*
* attribute vec2 uv - determines the relationship between vertices and textures
*
* `uniforms` are constant across all vertices
*
* `attributes` can vary from vertex to vertex and are defined as arrays
* with length equal to the number of vertices. Each index in the array
* is an attribute for the corresponding vertex
*
* `varyings` are values passed from the vertex to the fragment shader
*
* Specifying attributes that are not passed to the vertex shader will not pevent shader compiling
**/
// declare uniform vals
uniform float verticesPerTexture; // store the vertices per texture
// declare variables to pass to fragment shaders
varying vec2 vUv; // pass the uv coordinates of each vertex to the frag shader
varying float textureIndex; // pass the texture idx
// initialize counters
float vertexIdx = 0.0; // stores the index position of the current vertex
float textureIdx = 1.0; // store the index position of the current texture
void main() {
// keep track of which texture each vertex belongs to
vertexIdx = vertexIdx + 1.0;
if (vertexIdx == verticesPerTexture) {
textureIdx = textureIdx + 1.0;
vertexIdx = 0.0;
}
vUv = uv;
gl_Position = projectionMatrix * modelViewMatrix * vec4(position, 1.0);
}
</script>
<script type='x-shader/x-fragment' id='fragment-shader'>
/**
* The fragment shader's main() function must define `gl_FragColor`,
* which describes the pixel color of each pixel on the screen.
*
* To do so, we can use uniforms passed into the shader and varyings
* passed from the vertex shader
*
* Attempting to read a varying not generated by the vertex shader will
* throw a warning but won't prevent shader compiling
*
* Each attribute must contain n_vertices * n_components, where n_components
* is the length of the given datatype (e.g. vec2 n_components = 2;
* float n_components = 1)
**/
precision highp float; // set float precision (optional)
varying vec2 vUv; // identify the uv values as a varying attribute
varying float textureIndex; // identify the texture indices as a varying attribute
uniform sampler2D cat_texture; // identify the texture as a uniform argument
uniform sampler2D dog_texture; // identify the texture as a uniform argument
//uniform sampler2D textures;
// TODO pluck out textures[textureIndex];
//uniform sampler2D textures[int(textureIndex)];
void main() {
int textureIdx = int(textureIndex);
// float point arithmetic prevents strict equality checking
if ( (textureIndex - 1.0) < 0.1 ) {
gl_FragColor = texture2D(cat_texture, vUv);
} else {
gl_FragColor = texture2D(dog_texture, vUv);
}
}
</script>
Having slept on it, here's another method you can try, more akin to how you'd do this with built-in materials:
function createMaterial ( texture ) {
return new ShaderMaterial({
uniforms: {
texture: { value: texture }
}
})
}
var mat1 = createMaterial( dogTexture );
var mat2 = createMaterial( catTexture );
geometry.faces[ 0 ].materialIndex = 0;
geometry.faces[ 1 ].materialIndex = 0;
geometry.faces[ 2 ].materialIndex = 1;
geometry.faces[ 3 ].materialIndex = 1;
var mesh = new Mesh( geometry, [ mat1, mat2 ] );
You’ve written the vertex shader as if main is a for loop and it will iterate through all the vertices and update vertexIdx and textureIdx as it goes along, but that’s not how shaders work. Shaders run in parallel, processing every vertex at the same time. So you can’t share what the shader computes about one vertex with another vertex.
Use an attribute on the geometry instead:
geometry.addAttribute( 'texIndex', new THREE.BufferAttribute( [ 0, 0, 0, 0, 1, 1, 1, 1 ], 1 ) )
I’m getting a little out of my depth here but I think you then pass it through the vertex shader to a varying:
attribute int texIndex;
varying int vTexIndex;
void main () { vTexIndex = texIndex; }
Finally, in the fragment shader:
varying int vTexIndex;
uniform sampler2D textures[ 2 ];
...
sampler2D tex = textures[ vTexIndex ];

How to render images in WebGL from ArrayBuffer

I am having a image that I am reading in server side and pushing to web browser via AJAX call. I have a requirement where I have to render them line by line using WebGL.
For Example : Image is 640X480 where 640 is width and 480 is height. Now the total number of pixels will be 640*480 = 307200 pixels. So, I want to render the whole image in 640(total width) intervals in a loop using WebGL.
Now I have texture2D(as per my knowledge) in webgl to do so, but not getting any idea of where to start . I also having the ArrayBuffer with me , only thing is using Texture2D I want to render it slowly ,line by line.
I am ready to go for any js libraries ,if they are satisfying the requirements.
So, to write a image line by line we can do something like this.
Vertex Shader
attribute vec2 a_position;?
attribute vec2 a_texCoord;?
void main() {
???
}
Fragment Shader
#ifdef GL_ES
precision mediump float;
#endif
uniform float time;
uniform vec2 mouse;
uniform vec2 resolution;
void main( void ) {
vec2 position = 1.0 - gl_FragCoord.xy / resolution;
vec3 color = vec3(1.0);
if (time > position.y * 10.0) {
color = texture2D(uImage0, uv);
}
gl_FragColor = vec4(color, 1.0);
}
Javascript For rendering pixel by pixel
function createTextureFromArray(gl, dataArray, type, width, height) {
var data = new Uint8Array(dataArray);
var texture = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.texImage2D(gl.TEXTURE_2D, 0, type, width, height, 0, type, gl.UNSIGNED_BYTE, data);
return texture;
}
var arrayBuffer = new ArrayBuffer(640*480);
for (var i=0; i < 640; i++) {
for (var j=0; j < 480; j++) {
arrayBuffer[i] = Math.floor(Math.random() * 255) + 0; //filling buffer with random data between 0 and 255 which will be further filled to the texture
//NOTE : above data is just dummy data , I will get this data from server pixel by pixel.
}
}
var gl = canvas.getContext('webgl');
// setup GLSL program
var program = createProgramFromScripts(gl, ["2d-vertex-shader", "2d-fragment-shader"]);
gl.useProgram(program);
//what should I add after this ?
Can anybody complete the code , I have no idea of how to write code to accomplish this.
OpenGL is not designed to draw images "line by line." However, you can achieve this effect in software by writing to an array, uploading it as a texture, and sampling from it in your shader when drawing a full screen polygon.
To go about this you should create an unsigned byte array. For each pixel in your image you can have some combination of red, green, blue, and alpha channels. The simplest case would be RGB, 3 unsigned bytes for each pixel. The final array should be sized according to the component size (3), times your width (640), times your height (480). You should initialize the values in your array according to what you want your background color to be, then upload it to the gpu using texImage2D.
To 'draw line by line' would be to update 'width' pixels at a time given a row. Each time you change the image data you should then reupload the image to the gpu then draw the fullscreen polygon.
The fullscreen polygon is simply two triangles that cover the entire clip space of the screen. The screen goes from -1 to 1 in x and y dimensions, so make an array buffer accordingly, upload it with the two triangles, and call drawArrays as you update the texture. The UV's for the polygon should go from 0 to 1, so in your vertex shader you should have a 'varying' output variable that will be 0.5 * position + 0.5. This is used in the fragment shader to sample from the texture.
The official documentation is one of the best places to learn from. The official reference pages for openGL ES or openGL 3 contain relevant information, while the reference card https://www.khronos.org/files/webgl/webgl-reference-card-1_0.pdf show the available functions in WebGL that correspond roughly to the same api.
It's not clear at all what you're trying to accomplish and why you are using WebGL at all. Are you sending one line of data at a time and you want to render that one individual line of data when its received? Are you sending all the data and you just want reveal it a line at time horizontally?
If you have the entire image available then you can just render a larger and larger portion of it using canvas2d. The drawImage function takes optional source and destination rectangles.
// at init time
var x = 0;
// at render time
while (x < img.width) {
var srcX = x;
var srcY = 0;
var srcWidth = 1; // one pixel per frame
var srcHeight = img.height;
var dstX = x;
var dstY = 0;
var dstWidth = 1;
var dstHeight = img.height;
ctx.drawImage(img, srcX, srcY, srcWidth, srcHeight, dstX, dstY, dstWidth, dstHeight);
++x;
}
If you're sending them 1 line of data at a time you can use ImageData to make a 1xheight image and use putImageData to draw it.
// at init time or whenever you know the height
var imageData = ctx.createImageData(1, height);
var x = 0;
// on received next line of data
for (var ii = 0; ii < imageData.length; ++ii) {
imageData.data[ii] = receivedColumnOfPixels[ii];
}
ctx.putImageData(imageData, x, 0);
++x;
If you want to scale the ImageData put it in a second canvas and use that canvas as input to drawImage using the first technique.
You can do the same things in WebGL. If you have the entire texture in memory then each frame adjust your positions and texture coordinates to draw a different part of it. If you're receiving 1 column of data at a time then just use a texture that's 1 x height and draw that at the appropriate place. OR, copy that 1 x height data into the fullsize texture using gl.texSubImage2D and then adjust the positions and texture coordinates appropriately to draw the part of the texture you want to draw to the part of the canvas you want to draw it.
drawImage implemented in WebGL would look something like this. I'm using twgl.js because WebGL is too verbose.
var m4 = twgl.m4;
var gl = document.getElementById("c").getContext("webgl");
// compiles shader, links and looks up locations
var programInfo = twgl.createProgramInfo(gl, ["vs", "fs"]);
// a unit quad
var arrays = {
position: {
numComponents: 2,
data: [
0, 0,
1, 0,
0, 1,
0, 1,
1, 0,
1, 1,
],
},
};
// calls gl.createBuffer, gl.bindBuffer, gl.bufferData for each array
var bufferInfo = twgl.createBufferInfoFromArrays(gl, arrays);
// create a texture using a canvas so we don't have to download one
var ctx = document.createElement("canvas").getContext("2d");
ctx.fillStyle = "blue";
ctx.fillRect(0, 0, ctx.canvas.width, ctx.canvas.height);
ctx.lineWidth = 20;
["red", "orange", "yellow"].forEach(function(color, ndx, array) {
ctx.strokeStyle = color;
ctx.beginPath();
ctx.arc((ndx + 1) / (array.length + 1) * ctx.canvas.width, ctx.canvas.height / 2, ctx.canvas.height * 0.4, 0, Math.PI * 2, false);
ctx.stroke();
});
ctx.fillStyle = "white";
ctx.font = "40px sans-serif";
ctx.textAlign = "center";
ctx.textBaseline = "middle";
ctx.fillText("DrawImage", ctx.canvas.width / 2, ctx.canvas.height / 2);
// calls gl.createTexture, gl.bindTexture, gl.texImage2D, gl.texParameteri
var tex = twgl.createTexture(gl, { src: ctx.canvas });
var texWidth = ctx.canvas.width;
var texHeight = ctx.canvas.height;
// we pass in texWidth and texHeight because unlike images
// we can't look up the width and height of a texture
// we pass in targetWidth and targetHeight to tell it
// the size of the thing we're drawing too. We could look
// up the size of the canvas with gl.canvas.width and
// gl.canvas.height but maybe we want to draw to a framebuffer
// etc.. so might as well pass those in.
// srcX, srcY, srcWidth, srcHeight are in pixels
// computed from texWidth and texHeight
// dstX, dstY, dstWidth, dstHeight are in pixels
// computed from targetWidth and targetHeight
function drawImage(
tex, texWidth, texHeight,
srcX, srcY, srcWidth, srcHeight,
dstX, dstY, dstWidth, dstHeight,
targetWidth, targetHeight) {
var mat = m4.identity();
var tmat = m4.identity();
var uniforms = {
matrix: mat,
textureMatrix: tmat,
texture: tex,
};
// these adjust the unit quad to generate texture coordinates
// to select part of the src texture
// NOTE: no check is done that srcX + srcWidth go outside of the
// texture or are in range in any way. Same for srcY + srcHeight
m4.translate(tmat, [srcX / texWidth, srcY / texHeight, 0], tmat);
m4.scale(tmat, [srcWidth / texWidth, srcHeight / texHeight, 1], tmat);
// these convert from pixels to clip space
m4.translate(mat, [-1, 1, 0], mat);
m4.scale(mat, [2 / targetWidth, -2 / targetHeight, 1], mat);
// these move and scale the unit quad into the size we want
// in the target as pixels
m4.translate(mat, [dstX, dstY, 0], mat);
m4.scale(mat, [dstWidth, dstHeight, 1], mat);
gl.useProgram(programInfo.program);
// calls gl.bindBuffer, gl.enableVertexAttribArray, gl.vertexAttribPointer
twgl.setBuffersAndAttributes(gl, programInfo, bufferInfo);
// calls gl.uniformXXX, gl.activeTexture, gl.bindTexture
twgl.setUniforms(programInfo, uniforms);
// calls gl.drawArray or gl.drawElements
twgl.drawBufferInfo(gl, gl.TRIANGLES, bufferInfo);
}
function render(time) {
time *= 0.001;
var targetWidth = gl.canvas.width;
var targetHeight = gl.canvas.height;
// pick some various src rects and dst rects
var srcX = Math.abs(Math.sin(time * 1 )) * texWidth;
var srcY = Math.abs(Math.sin(time * 1.81)) * texHeight;
var srcWidth = (texWidth - srcX) * Math.abs(Math.sin(time * 2.12));
var srcHeight = (texHeight - srcY) * Math.abs(Math.sin(time * 1.53));
var dstX = Math.abs(Math.sin(time * 0.34)) * targetWidth;
var dstY = Math.abs(Math.sin(time * 2.75)) * targetHeight;
var dstWidth = (targetWidth - dstX) * Math.abs(Math.sin(time * 1.16));
var dstHeight = (targetHeight - dstY) * Math.abs(Math.sin(time * 1.17));
drawImage(
tex, texWidth, texHeight,
srcX, srcY, srcWidth, srcHeight,
dstX, dstY, dstWidth, dstHeight,
targetWidth, targetHeight);
requestAnimationFrame(render);
}
requestAnimationFrame(render);
canvas { border: 1px solid black; }
<script src="https://twgljs.org/dist/twgl-full.min.js"></script>
<script id="vs" type="not-js">
// we will always pass a 0 to 1 unit quad
// and then use matrices to manipulate it
attribute vec4 position;
uniform mat4 matrix;
uniform mat4 textureMatrix;
varying vec2 texcoord;
void main () {
gl_Position = matrix * position;
texcoord = (textureMatrix * position).xy;
}
</script>
<script id="fs" type="not-js">
precision mediump float;
varying vec2 texcoord;
uniform sampler2D texture;
void main() {
gl_FragColor = texture2D(texture, texcoord);
}
</script>
<canvas id="c"></canvas>
To understand them matrix math see these articles and work your way backward or forward in those articles.

Animating in 2D WebGL

I'm attempting to animate a projectile's trajectory (in the form of a cannon ball) given an angle and initial velocity. I've built the "cannon" in the form of a line and the target I'm aiming for in the form of a box, which I know is elementary but I just want to get the projectile motion down for now. Currently, I'm messing around with hardcoded angles and velocity, but eventually would like to input the angle and velocity and have the cannon shoot following the input. The target is parallel to the launch point, so I know that the x value of the cannon will be (initialVelocity)cos(angle)(time), and the y will be (initialVelocity)sin(angle)(time) - (g*t^2)/2, where g is the length or distance. Currently what I have is a cannon ball moving linearly across the screen, and it doesn't even start in the right spot.
I'm not asking for code to be written for me, I'd just like a starting point as to how to get the cannon to move from the right spot, and to know where I'm going completely wrong. I'm confident I can get it to hit the target if I'm taught how to manipulate the shaders correctly.
Shaders:
<script id="vertex-shader" type="x-shader/x-vertex">
precision mediump float;
attribute vec4 vPosition;
attribute vec4 vColor;
varying vec4 fColor;
uniform float time;
void main()
{
/*old code from manipulating clock hands*/
/* fColor = vColor;
float length = sqrt(vPosition.x*vPosition.x + vPosition.y * vPosition.y);
gl_Position.x = length*cos(theta);
gl_Position.y = length*sin(theta);
gl_Position.z = 0.0;
gl_Position.w = 1.0; */
fColor = vColor;
gl_Position = vPosition;
}
</script>
<script id="background-vertex-shader" type="x-shader/x-vertex">
precision mediump float;
attribute vec4 vPosition;
attribute vec4 vColor;
varying vec4 fColor;
void main()
{
fColor = vColor;
gl_Position = vPosition;
}
</script>
<script id="fragment-shader" type="x-shader/x-fragment">
precision mediump float;
varying vec4 fColor;
void main()
{
gl_FragColor = fColor;
}
</script>
WebGL code:
var gl;
var points = [];
var colors = [];
var cannonpoints = [];
var circlepoints;
var squarepoints;
var baseColors = [
vec3(1.0,0.0,0.0),
vec3(0.0,1.0,0.0),
vec3(0.0,0.0,1.0),
vec3(1.0,1.0,1.0),
vec3(0.0,0.0,0.0)
];
var program;
var backgroundprogram;
var Time;
var thetaLoc;
var angle;
var initialVel;
var vx;
var vy;
var ballX = -0.5;
var ballY = -0.5;
window.onload = function init(){
var canvas = document.getElementById("gl-canvas");
gl = WebGLUtils.setupWebGL(canvas);
if(!gl) {
alert("webGL isn't available");
}
// configuring WebGL
gl.viewport(0,0,
canvas.width,canvas.height);
gl.clearColor(0.0,0.0,1.0,1.0); // set background color to black.
// load the shaders and initialize
// the attrbibute buffers.
program = initShaders(gl, "vertex-shader", "fragment-shader");
backgroundprogram = initShaders(gl, "background-vertex-shader", "fragment- shader");
document.getElementById("shoot").onclick = function() {
velocity = document.getElementById("velocity").value;
angle = document.getElementById("angle").value;
console.log("angle="+angle);
vx = (Math.cos(angle*(Math.PI/180))*velocity);
console.log("vx="+vx);
vy = (Math.sin(angle*(Math.PI/180))*velocity);
console.log("vy="+vy);
}
Time = 0.0;
thetaLoc = gl.getUniformLocation(program,"time");
initBackground();
/******************
initBall(Time,1);
*******************/
initBall(Time);
//render();
setInterval(render, 100);
};
function render(){
gl.clear(gl.COLOR_BUFFER_BIT);
/* draw the circle */
gl.drawArrays(gl.TRIANGLE_FAN,0,circlepoints);
/* draw the square(s) */
gl.drawArrays(gl.TRIANGLES,circlepoints,squarepoints);
//draw the cannon
gl.drawArrays(gl.LINES,circlepoints+squarepoints,2);
//draw the cannon ball
//starting index is the amount of points already drawn
//amount of points for circle + amount of points for square + amount of points for line
var start = circlepoints + squarepoints + 2;
Time += 0.01;
initBall(Time); //,1);
gl.uniform1f(thetaLoc,Time);
//amount of points to draw is length of points array minus the start index
gl.drawArrays(gl.TRIANGLE_FAN,start,points.length-start);
}
function initBall(Time) { //,r){
gl.useProgram(program);
/*******************************************************
filled_circle(vec2(r*Math.cos(Time),r*Math.sin(Time)),0.05,4);*/
vx= (Math.cos(60*(Math.PI/180))*1);
vy= (Math.sin(60*(Math.PI/180))*1);
filled_circle(vec2(-0.8+(vx*Time),-0.3+(vy*Time)),0.05,4);
// Load the data into the GPU
var bufferId = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, bufferId);
gl.bufferData(gl.ARRAY_BUFFER,
flatten(points),
gl.STATIC_DRAW);
// Associate our shader variables with
// the data buffer.
var vPosition = gl.getAttribLocation(program,"vPosition");
gl.vertexAttribPointer(vPosition,2,gl.FLOAT,false,0,0);
gl.enableVertexAttribArray(vPosition);
// load color data to the gpu
var cBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER,
cBuffer);
gl.bufferData(gl.ARRAY_BUFFER,
flatten(colors),
gl.STATIC_DRAW);
var vColor = gl.getAttribLocation(
program, "vColor");
gl.vertexAttribPointer(vColor,3,
gl.FLOAT, false, 0, 0);
gl.enableVertexAttribArray(vColor);
}
I think the easiest way to do it is give your projectile a starting position, velocity and acceleration. Then the position of the projectile at any time is position + velocity * time + acceleration * time * time. The angle of the projectile would just be the angle of the projectile's current velocity.
If you want to eventually adding other stuff like collisions then its probably a good idea to make the projectile track its current velocity and acceleration; and on each frame the position and velocity changes based on the elapsed time between each frame. Like so:
Projectile.prototype.update = function(dt){
this.velocity += this.acceleration * dt;
this.position += this.velocity * dt;
this.angle = getAngle(this.velocity);
};
And on each frame, call projectile.update(dt) where dt = currentFrameTime - lastFrameTime.

Categories