How to render binary data on canvas using WebGl? - javascript

I am using PNaCl ffmpeg to open, read and decode RTSP stream. I am now having raw video frames which I need to transfer to WebGl to render on the canvas.
How can I render binary data on the canvas?
I am running the following code: I presume that I should get a grey canvas after running this code, because I am passing RGBA values of (120,120,120,1) to the synthetic data.
var canvas = document.getElementById('canvas');
var gl = initWebGL(canvas); //function initializes webgl
initViewport(gl, canvas); //initializes view port
console.log('viewport initialized');
var data = [];
for (var i = 0 ; i < 256; i++){
data.push(120,120,120,1.0);
}
console.log(data);
var pixels = new Uint8Array(data); // 16x16 RGBA image
var texture = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.texImage2D(
gl.TEXTURE_2D, // target
0, // mip level
gl.RGBA, // internal format
16, 16, // width and height
0, // border
gl.RGBA, //format
gl.UNSIGNED_BYTE, // type
pixels // texture data
);
console.log('pixels');
console.log(pixels);
<canvas id="canvas"></canvas>
I should get a grey 16x16 box being represented on the canvas, but I am not getting that. What additional steps do I need to take to correctly render the 2D bitmap on the canvas?
PS. I am taking help from this article.
Console output:

As pointed out in the comments, alpha in WebGL in the type of texture you're creating is 0 to 255. You're putting in 1.0 which = 1/255 or an alpha of 0.004
But on top of that you say
I am running the following code: I presume that I should get a grey canvas after running this code
That code is not enough for WebGL. WebGL requires you to supply a vertex shader and fragment shader, vertex data for vertices and then call either gl.drawArrays or gl.drawElements to render something. The code you provided doesn't do those things and without those things we can't tell what else you're doing.
You're also only supplying mip level 0. You either need to supply mips or set texture filtering so only the first level is used otherwise the texture is unrenderable (you'll get a warning about it in the JavaScript console of most browsers).
Here's a working example
var canvas = document.getElementById('canvas');
var gl = canvas.getContext("webgl");
var data = [];
for (var i = 0 ; i < 256; i++){
data.push(120,120,120,255);
}
var pixels = new Uint8Array(data); // 16x16 RGBA image
var texture = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.texImage2D(
gl.TEXTURE_2D, // target
0, // mip level
gl.RGBA, // internal format
16, 16, // width and height
0, // border
gl.RGBA, //format
gl.UNSIGNED_BYTE, // type
pixels // texture data
);
gl.generateMipmap(gl.TEXTURE_2D); // you need to do this or set filtering
// compiles and links the shaders and looks up uniform and attribute locations
var programInfo = twgl.createProgramInfo(gl, ["vs", "fs"]);
var arrays = {
position: [
-1, -1, 0,
1, -1, 0,
-1, 1, 0,
-1, 1, 0,
1, -1, 0,
1, 1, 0,
],
};
// calls gl.createBuffer, gl.bindBuffer, gl.bufferData for each array
var bufferInfo = twgl.createBufferInfoFromArrays(gl, arrays);
var uniforms = {
u_texture: texture,
};
gl.useProgram(programInfo.program);
// calls gl.bindBuffer, gl.enableVertexAttribArray, gl.vertexAttribPointer
twgl.setBuffersAndAttributes(gl, programInfo, bufferInfo);
// calls gl.activeTexture, gl.bindTexture, gl.uniformXXX
twgl.setUniforms(programInfo, uniforms);
// calls gl.drawArrays or gl.drawElements
twgl.drawBufferInfo(gl, gl.TRIANGLES, bufferInfo);
canvas { border: 1px solid black; }
<script id="vs" type="notjs">
attribute vec4 position;
varying vec2 v_texcoord;
void main() {
gl_Position = position;
// Since we know we'll be passing in -1 to +1 for position
v_texcoord = position.xy * 0.5 + 0.5;
}
</script>
<script id="fs" type="notjs">
precision mediump float;
uniform sampler2D u_texture;
varying vec2 v_texcoord;
void main() {
gl_FragColor = texture2D(u_texture, v_texcoord);
}
</script>
<script src="https://twgljs.org/dist/twgl.min.js"></script>
<canvas id="canvas"></canvas>

Related

Prevent texture rounding error for pixel graphics

To prevent XY-problems, I'll describe the problem, followed by my current idea for a solution. Given this has to have been solved a thousand times by other people, I need a sanity check, whether I am running down the completely wrong path, or not.
In 2D pixel graphics (GL_NEAREST), when translating by half of a screen pixel, rounding behavior determines the color of a fragment. This means frequently choosing the wrong texel.
From my understanding, in the following image:
On the left side, fragments' UV-coordinates will be exactly between texels (by construction, UVs selecting size of the quad pixels from the texture).
The rasterizer won't include some fragments (e.g. left side, red stripes, not shown on the right), due to conflict rules for seamless connections between triangles.
Default rounding will only be correct, if UV-space up is also screen-space up. This fails immediately when rotating, see the right side.
E.g. for a y-flipped texture, the selected texels are one row off. Clamping to prevent texture bleeds would result in one row being used twice, and one missing¹. When freely rotating sprites, these errors may occur for certain rows or columns.
I'm aware of two common approaches, which both seem to fail:
Rounding vertices to pixel-corners, which I don't see working when sprites can rotate.
Having transparent border pixels in the texture-atlas. This is a somewhat dirty patch, and has costs. It destroys power-of-two sized textures (unless increasing the size by four times), and required border-size depends on mipmap-level.
For 2D pixel-graphics, almost everything is a textured quad. Given this, my current idea is to render each quad slightly larger (to have both side's fragments included in the problem-case). I'd then manually round to middle of texels in the fragment-shader, and drop fragments, which would be outside of the intended texture.
Is this a bad idea? How do other people solve this?
(1): Example webgl code always needs a lot of boilerplate. The problem imho does not require a demonstration, but people do love clicking things which display something.
The following has y-flipped UVs, and clamps in the fragment-shader. To be able to see texels selected, a colorful texture is generated:
The entire thing is on 8x8 pixels, using a quad constructed to be 5x5, upscaled for ease-of-view.
const assert = (condition, message) => {
if (!condition) throw new Error(message);
};
const canvas = document.getElementById('canvas');
const gl = canvas.getContext('webgl2', { antialias: false });
assert(gl !== null, `WebGL2 was unexpectedly not supported.`);
const distribution = (x) => Math.E ** (-((1.6 * x) ** 2));
const rollover = 11;
const getColors = (i) => [0, 1, 2].map(e => Math.round(0xff * distribution((((e + 0.5) + (i % rollover) / rollover * 3) % 3) - 1.5)));
const textureSize = 8;
const testImage = new Uint8Array(Array.from(
{ length: textureSize * textureSize },
(_, i) => [...getColors(i), 0xff],
).flat());
const positionPerPixel = 2 / 8;
const selectionInPixels = 5;
const offsetY = -0.5;
const vertices = [
[0, 0 + offsetY],
[selectionInPixels, 0 + offsetY],
[selectionInPixels, selectionInPixels + offsetY],
[0, selectionInPixels + offsetY],
].map(coordPair => coordPair.map(coord => (coord - 2) * positionPerPixel));
const pixelOffset = 2;
const normalizedCoordPerPixel = 1 / textureSize;
const textureCoords = [
[0, selectionInPixels],
[selectionInPixels, selectionInPixels],
[selectionInPixels, 0],
[0, 0],
].map(coordPair => coordPair.map(coord => (coord + pixelOffset) * normalizedCoordPerPixel));
const vData = new Float32Array(vertices.map((e, i) => [...e, ...textureCoords[i]]).flat());
const indices = new Uint16Array([0, 1, 2, 0, 2, 3]);
const texture = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, 8, 8, 0, gl.RGBA, gl.UNSIGNED_BYTE, testImage);
const vao = gl.createVertexArray();
gl.bindVertexArray(vao);
gl.enableVertexAttribArray(0);
gl.enableVertexAttribArray(1);
const vBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, vBuffer);
gl.vertexAttribPointer(0, 2, gl.FLOAT, false, 16, 0);
gl.vertexAttribPointer(1, 2, gl.FLOAT, false, 16, 8);
gl.bufferData(gl.ARRAY_BUFFER, vData, gl.STATIC_DRAW);
const iBuffer = gl.createBuffer();
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, iBuffer);
gl.bufferData(gl.ELEMENT_ARRAY_BUFFER, indices, gl.STATIC_DRAW);
const vertexShaderSrc = `#version 300 es
precision highp float;
layout(location = 0) in vec2 aPosition;
layout(location = 1) in vec2 aTextureCoord;
out vec2 vTextureCoord;
void main(void) {
gl_Position = vec4(aPosition, 0.0, 1.0);
vTextureCoord = aTextureCoord;
}`;
const maxTextureCoord = Math.max(...textureCoords.flat());
const minTextureCoord = Math.min(...textureCoords.flat());
const fragmentShaderSrc = `#version 300 es
precision highp float;
uniform sampler2D sampler;
in vec2 vTextureCoord;
out vec4 fColor;
void main(void){
vec2 clamped = vec2(
clamp(vTextureCoord.x, ${minTextureCoord} + 0.01, ${maxTextureCoord} - 0.01),
clamp(vTextureCoord.y, ${minTextureCoord} + 0.01, ${maxTextureCoord} - 0.01)
);
fColor = texture(sampler, clamped);
}`;
const program = gl.createProgram();
assert(program !== null, `Program was unexpectedly \`null\`.`);
const vertexShader = gl.createShader(gl.VERTEX_SHADER);
assert(vertexShader !== null, `Vertex-shader was unexpectedly \`null\`.`);
gl.shaderSource(vertexShader, vertexShaderSrc);
gl.compileShader(vertexShader);
assert(gl.getShaderParameter(vertexShader, gl.COMPILE_STATUS), `Vertex-shader failed to compile:\n${gl.getShaderInfoLog(vertexShader)}`);
const fragmentShader = gl.createShader(gl.FRAGMENT_SHADER);
assert(fragmentShader !== null, `Vertex-shader was unexpectedly \`null\`.`);
gl.shaderSource(fragmentShader, fragmentShaderSrc);
gl.compileShader(fragmentShader);
assert(gl.getShaderParameter(fragmentShader, gl.COMPILE_STATUS), `Fragment-shader failed to compile:\n${gl.getShaderInfoLog(fragmentShader)}`);
gl.attachShader(program, vertexShader);
gl.attachShader(program, fragmentShader);
gl.linkProgram(program);
assert(gl.getProgramParameter(program, gl.LINK_STATUS), `Program linking failed:\n${gl.getProgramInfoLog(program)}`);
gl.useProgram(program);
const uniformLocationSampler = gl.getUniformLocation(program, 'sampler');
gl.uniform1i(uniformLocationSampler, 0);
gl.clearColor(0, 0, 0, 1.0);
gl.clear(gl.COLOR_BUFFER_BIT);
gl.drawElements(gl.TRIANGLES, 6, gl.UNSIGNED_SHORT, 0);
#canvas {
width: 256px;
height: 256px;
image-rendering: pixelated;
}
<canvas id='canvas' width='8' height='8'></canvas>
The code is a slightly modified copy&paste from a related problem (limited subtexel precision leading to UVs snapping to exactly half-way between texels). Therefore, some oddities may still be left in there.

samplerCube in struct cause GL_INVALID_OPERATION

I have the following code in glsl:
// snippet 1, works well
uniform vec4 uDiffuse;
uniform sampler2D uDiffuseMap;
uniform vec4 uSpecular;
uniform sampler2D uSpecularMap;
uniform float uShininess;
uniform samplerCube uEnvironmentMap;
// snippet 2, not work
struct PhongMaterial {
vec4 diffuse;
sampler2D diffuseMap;
vec4 specular;
sampler2D specularMap;
float shininess;
samplerCube environmentMap; // works well if I remove this line.
};
But it throw the following error:
[.WebGL-0x7fabfe002e00]RENDER WARNING: there is no texture bound to the unit 0
[.WebGL-0x7fabfe002e00]GL ERROR :GL_INVALID_OPERATION : GetShaderiv: <- error from previous GL command
[.WebGL-0x7fabfe002e00]GL ERROR :GL_INVALID_OPERATION : GLES2DecoderImpl::DoBindTexImage2DCHROMIUM: <- error from previous GL command
[.WebGL-0x7fabfe002e00]GL ERROR :GL_INVALID_OPERATION : glFramebufferTexture2D: <- error from previous GL command
[.WebGL-0x7fabfe002e00]GL ERROR :GL_INVALID_OPERATION : GLES2DecoderImpl::DoBindTexImage2DCHROMIUM: <- error from previous GL command
WebGL: too many errors, no more errors will be reported to the console for this context.
Here is an example:
https://codepen.io/scarletsky/pen/KEgBzx?editors=1010
What I want to do is to implement a shader which can receive sampler2D and samplerCube. When no samplerCube passed in the shader, it will throw error.
I have no idea what to do next. Can anyone help me ?
Your real error is likely some where else and related to you not binding your textures correctly or looking up the wrong locations or something else
RENDER WARNING: there is no texture bound to the unit 0
Here's a working example with your uniform structure
const fs = `
precision mediump float;
struct PhongMaterial {
vec4 diffuse;
sampler2D diffuseMap;
vec4 specular;
sampler2D specularMap;
float shininess;
samplerCube environmentMap;
};
uniform PhongMaterial material;
void main() {
vec4 diffuse = texture2D(material.diffuseMap, gl_PointCoord.xy);
vec4 specular = texture2D(material.specularMap, gl_PointCoord.xy);
vec4 cube = textureCube(
material.environmentMap,
vec3(gl_PointCoord.xy, gl_PointCoord.x * gl_PointCoord.y) * 2. - 1.);
// use all 3 textures so we can see they were set
vec4 diffuseOrSpecular = mix(diffuse, specular, step(0.25, gl_PointCoord.y));
gl_FragColor = mix(diffuseOrSpecular, cube, step(0.5, gl_PointCoord.y));
}
`
const vs = `
void main() {
gl_Position = vec4(0, 0, 0, 1);
gl_PointSize = 128.0;
}
`;
const gl = document.querySelector('canvas').getContext('webgl');
const prg = twgl.createProgram(gl, [vs, fs]);
const diffuseLocation = gl.getUniformLocation(prg, 'material.diffuseMap');
const specularLocation = gl.getUniformLocation(prg, 'material.specularMap');
const envmapLocation = gl.getUniformLocation(prg, 'material.environmentMap');
const texDiffuse = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, texDiffuse);
{
const level = 0;
const format = gl.RGBA;
const width = 1;
const height = 1;
const type = gl.UNSIGNED_BYTE;
const pixel = new Uint8Array([255, 255, 0, 255]); // yellow
gl.texImage2D(gl.TEXTURE_2D, level, format, width, height, 0, format, type, pixel);
}
const texSpecular = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, texSpecular);
{
const level = 0;
const format = gl.RGBA;
const width = 1;
const height = 1;
const type = gl.UNSIGNED_BYTE;
const pixel = new Uint8Array([0, 0, 255, 255]); // blue
gl.texImage2D(gl.TEXTURE_2D, level, format, width, height, 0, format, type, pixel);
}
const texCube = gl.createTexture();
gl.bindTexture(gl.TEXTURE_CUBE_MAP, texCube);
for (let i = 0; i < 6; ++i) {
const level = 0;
const format = gl.RGBA;
const width = 1;
const height = 1;
const type = gl.UNSIGNED_BYTE;
const pixel = new Uint8Array([(i & 1) * 255, (i & 2) * 255, (i & 4) * 255, 255]);
gl.texImage2D(gl.TEXTURE_CUBE_MAP_POSITIVE_X + i, level, format, width, height, 0, format, type, pixel);
}
gl.useProgram(prg);
// put the yellow diffuse texture on texture unit 0
gl.activeTexture(gl.TEXTURE0 + 0);
gl.bindTexture(gl.TEXTURE_2D, texDiffuse);
// use texture on texture unit 0
gl.uniform1i(diffuseLocation, 0);
// put the blue specular texture on texture unit 1
gl.activeTexture(gl.TEXTURE0 + 1);
gl.bindTexture(gl.TEXTURE_2D, texSpecular);
// tell the specular sampler to use texture unit 1
gl.uniform1i(specularLocation, 1);
// put the cubemap on texture unit 2
gl.activeTexture(gl.TEXTURE0 + 2);
gl.bindTexture(gl.TEXTURE_CUBE_MAP, texCube);
// tell the cubemap sampler to use texture unit 2
gl.uniform1i(envmapLocation, 2);
// draw one 128x128 pixel point
gl.drawArrays(gl.POINTS, 0, 1);
<script src="https://twgljs.org/dist/4.x/twgl.min.js"></script>
<canvas></canvas>
NOTE: you are required to supply a valid texture for every sampler uniform your shader claims is being used regardless of whether or not it is actually being used.
To find out if the shader claims it's being used call
gl.getUniformLocation(program, nameOfSamplerUniform);
If it returns non-null then AFAIK as WebGL is concerned you must supply a valid texture for that sampler.
If you don't actually need one (because of conditionals or something) then keep around a 1 pixel texture for 2D or 6 pixel texture, 1 pixel per face for cube maps and attach that texture when you don't need a specific texture.
For these cases I usually keep a white and/or black texture around. For example let's say I had math like
color = diffuseMapColor * diffuseColor + envMapColor;
If I only want diffuseColor then I can set diffuseMapColor to white and envMapColor to black which is effectively
color = 1 * diffuseColor + 0;
Similarly of I only want diffuseMapColor I can set diffuseColor to white and envMapColor to black and get
color = diffuseMapColor * 1 + 0;
and if I only want envMapColor then setting diffuseColor to 0 will work
color = diffuseMapColor * 0 + envMapColor;
is the same as
color = 0 + envMapColor;
On the other hand, most 3D engines would generate a different shaders for these cases. If no environment map is used they'd generate a shader that doesn't include an environment map. This is because generally doing less work in a shader is faster than doing more so a good 3D engine generates shaders for each case of what is needed.

How to add interpolation using webgl vertex and fragment shaders to the image

Hello everyone,
I am trying to render a image using webgl shaders and I have successfully done that using webgl samples but the issue is that when i increase the size of image the quality of image is not good. I want to upscale and interpolate the image using vertex and fragment shader.Here is my sample
"use strict";
function main() {
var image = new Image();
requestCORSIfNotSameOrigin(image, "https://upload.wikimedia.org/wikipedia/commons/5/57/Pneumothorax_CT.jpg")
image.src = "https://upload.wikimedia.org/wikipedia/commons/5/57/Pneumothorax_CT.jpg";
image.width = 1000;
image.height = 1000;
image.onload = function() {
render(image);
}
}
function render(image) {
// Get A WebGL context
/** #type {HTMLCanvasElement} */
var canvas = document.getElementById("canvas");
var gl = canvas.getContext("webgl");
if (!gl) {
return;
}
// setup GLSL program
var program = webglUtils.createProgramFromScripts(gl, ["2d-vertex-shader", "2d-fragment-shader"]);
// look up where the vertex data needs to go.
var positionLocation = gl.getAttribLocation(program, "a_position");
var texcoordLocation = gl.getAttribLocation(program, "a_texCoord");
// Create a buffer to put three 2d clip space points in
var positionBuffer = gl.createBuffer();
// Bind it to ARRAY_BUFFER (think of it as ARRAY_BUFFER = positionBuffer)
gl.bindBuffer(gl.ARRAY_BUFFER, positionBuffer);
// Set a rectangle the same size as the image.
setRectangle(gl, 0, 0, image.width, image.height);
// provide texture coordinates for the rectangle.
var texcoordBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, texcoordBuffer);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([
0.0, 0.0,
1.0, 0.0,
0.0, 1.0,
0.0, 1.0,
1.0, 0.0,
1.0, 1.0,
]), gl.STATIC_DRAW);
// Create a texture.
var texture = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, texture);
// Set the parameters so we can render any size image.
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
// Upload the image into the texture.
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, image);
// lookup uniforms
var resolutionLocation = gl.getUniformLocation(program, "u_resolution");
webglUtils.resizeCanvasToDisplaySize(gl.canvas);
// Tell WebGL how to convert from clip space to pixels
gl.viewport(0, 0, gl.canvas.width, gl.canvas.height);
// Clear the canvas
gl.clearColor(0, 0, 0, 0);
gl.clear(gl.COLOR_BUFFER_BIT);
// Tell it to use our program (pair of shaders)
gl.useProgram(program);
// Turn on the position attribute
gl.enableVertexAttribArray(positionLocation);
// Bind the position buffer.
gl.bindBuffer(gl.ARRAY_BUFFER, positionBuffer);
// Tell the position attribute how to get data out of positionBuffer (ARRAY_BUFFER)
var size = 2; // 2 components per iteration
var type = gl.FLOAT; // the data is 32bit floats
var normalize = false; // don't normalize the data
var stride = 0; // 0 = move forward size * sizeof(type) each iteration to get the next position
var offset = 0; // start at the beginning of the buffer
gl.vertexAttribPointer(
positionLocation, size, type, normalize, stride, offset)
// Turn on the teccord attribute
gl.enableVertexAttribArray(texcoordLocation);
// Bind the position buffer.
gl.bindBuffer(gl.ARRAY_BUFFER, texcoordBuffer);
// Tell the position attribute how to get data out of positionBuffer (ARRAY_BUFFER)
var size = 2; // 2 components per iteration
var type = gl.FLOAT; // the data is 32bit floats
var normalize = false; // don't normalize the data
var stride = 0; // 0 = move forward size * sizeof(type) each iteration to get the next position
var offset = 0; // start at the beginning of the buffer
gl.vertexAttribPointer(
texcoordLocation, size, type, normalize, stride, offset)
// set the resolution
gl.uniform2f(resolutionLocation, gl.canvas.width, gl.canvas.height);
// Draw the rectangle.
var primitiveType = gl.TRIANGLES;
var offset = 0;
var count = 6;
gl.drawArrays(primitiveType, offset, count);
}
function setRectangle(gl, x, y, width, height) {
var x1 = x;
var x2 = x + width;
var y1 = y;
var y2 = y + height;
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([
x1, y1,
x2, y1,
x1, y2,
x1, y2,
x2, y1,
x2, y2,
]), gl.STATIC_DRAW);
}
main();
// This is needed if the images are not on the same domain
// NOTE: The server providing the images must give CORS permissions
// in order to be able to use the image with WebGL. Most sites
// do NOT give permission.
// See: http://webglfundamentals.org/webgl/lessons/webgl-cors-permission.html
function requestCORSIfNotSameOrigin(img, url) {
if ((new URL(url)).origin !== window.location.origin) {
img.crossOrigin = "";
}
}
#import url("https://webglfundamentals.org/webgl/resources/webgl-tutorials.css");
body {
margin: 0;
}
canvas {
width: 100vw;
height: 100vh;
display: block;
}
<div style="height:700px; width:700px; overflow:scroll;">
<canvas id="canvas"></canvas>
</div>
<!-- vertex shader -->
<script id="2d-vertex-shader" type="x-shader/x-vertex">
attribute vec2 a_position;
attribute vec2 a_texCoord;
uniform vec2 u_resolution;
varying vec2 v_texCoord; void main() {
// convert the rectangle from pixels to 0.0 to 1.0
vec2 zeroToOne = a_position / u_resolution;
// convert from 0->1 to 0->2
vec2 zeroToTwo = zeroToOne * 2.0;
// convert from 0->2 to -1->+1 (clipspace)
vec2 clipSpace = zeroToTwo - 1.0;
gl_Position = vec4(clipSpace * vec2(1, -1), 0, 1);
// pass the texCoord to the fragment shader
// The GPU will interpolate this value between points.
v_texCoord = a_texCoord;
}
</script>
<!-- fragment shader -->
<script id="2d-fragment-shader" type="x-shader/x-fragment">
precision mediump float;
// our texture
uniform sampler2D u_image;
// the texCoords passed in from the vertex shader.
varying vec2 v_texCoord;
void main() {
// Look up a color from the texture.
gl_FragColor = texture2D(u_image, v_texCoord);
}
</script>
<script src="https://webglfundamentals.org/webgl/resources/webgl-utils.js"></script>
I need interpolation when image zoomed or if set by maximum height like AMI exmaple provided below Check This sample
It's not clear what you want to happen.
First off you set gl.NEAREST as your filtering. WebGL has several kind of filtering covered here. Setting them to gl.LINEAR would be better but only
a little
The problem is WebGL 1.0 doesn't support mips for images that are not power of 2 dimensions (2, 4, 8, 16, 32, 128, 256, 512, 1024, etc...). That page describes what mips are used for (interpolation) but mips can only be used on images that are power of 2 dimensions. The image you're trying to display is not power of 2 dimensions, it's 954 × 687 .
You have a few different options.
Download the image, edit to be power of 2 in both dimensions in a photo editing application. Then call gl.generateMipmap to generate mips for interpolation as described in that page
Copy the image to canvas that's a power of 2 in size then upload the canvas as a texture
Create a texture that's the next largest power of 2 then upload your image
function nearestGreaterOrEqualPowerOf2(v) {
return Math.pow(2, Math.ceil(Math.log2(v)));
}
const newWidth = nearestGreaterOrEqualPowerOf2(image.width);
const newHeight = nearestGreaterOrEqualPowerOf2(image.height);
// first make an empty texture of the new size
const level = 0;
const format = gl.RGBA;
const type = gl.UNSIGNED_BYTE;
const border = 0;
gl.texImage2D(gl.TEXTURE_2D, level, format, newWidth, newHeight, border,
format, type, null);
// then upload the image into the bottom left corner of the texture
const xoffset = 0;
const yoffset = 0;
gl.texSubImage2D(gl.TEXTURE_2D, level, xoffset, yoffset, format, type, image);
// now because the texture is a power of 2 in both dimensions you can
// generate mips and turn on maximum filtering
gl.generateMipmap(gl.TEXTURE_2D);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR_MIPMAP_LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
You have a new issue though in all these cases which is that the image is now just using a portion of the texture. You'd have to adjust your texture coordinates either using a texture matrix or by adjusting your texture coordinates directly.
// compute needed texture coordinates to show only portion of texture
var u = newWidth / image.width;
var v = newHeight / image.height;
// provide texture coordinates for the rectangle.
var texcoordBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, texcoordBuffer);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([
0, 0,
u, 0,
0, v,
0, v,
u, 0,
u, v,
]), gl.STATIC_DRAW);
"use strict";
function main() {
var image = new Image();
requestCORSIfNotSameOrigin(image, "https://upload.wikimedia.org/wikipedia/commons/5/57/Pneumothorax_CT.jpg")
image.src = "https://upload.wikimedia.org/wikipedia/commons/5/57/Pneumothorax_CT.jpg";
image.onload = function() {
render(image);
}
}
function render(image) {
// Get A WebGL context
/** #type {HTMLCanvasElement} */
var canvas = document.getElementById("canvas");
var gl = canvas.getContext("webgl");
if (!gl) {
return;
}
// setup GLSL program
var program = webglUtils.createProgramFromScripts(gl, ["2d-vertex-shader", "2d-fragment-shader"]);
// look up where the vertex data needs to go.
var positionLocation = gl.getAttribLocation(program, "a_position");
var texcoordLocation = gl.getAttribLocation(program, "a_texCoord");
function nearestGreaterOrEqualPowerOf2(v) {
return Math.pow(2, Math.ceil(Math.log2(v)));
}
const newWidth = nearestGreaterOrEqualPowerOf2(image.width);
const newHeight = nearestGreaterOrEqualPowerOf2(image.height);
// Create a buffer to put three 2d clip space points in
var positionBuffer = gl.createBuffer();
// Bind it to ARRAY_BUFFER (think of it as ARRAY_BUFFER = positionBuffer)
gl.bindBuffer(gl.ARRAY_BUFFER, positionBuffer);
// Set a rectangle fit in the canvas at the same aspect as the image.
const drawWidth = canvas.clientWidth;
const drawHeight = canvas.clientWidth / drawWidth * image.height;
setRectangle(gl, 0, 0, drawWidth, drawHeight);
// compute needed texture coordinates to show only portion of texture
var u = newWidth / image.width;
var v = newHeight / image.height;
// provide texture coordinates for the rectangle.
var texcoordBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, texcoordBuffer);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([
0, 0,
u, 0,
0, v,
0, v,
u, 0,
u, v,
]), gl.STATIC_DRAW);
// Create a texture.
var texture = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, texture);
// first make an empty texture of the new size
{
const level = 0;
const format = gl.RGBA;
const type = gl.UNSIGNED_BYTE;
const border = 0;
gl.texImage2D(gl.TEXTURE_2D, level, format, newWidth, newHeight, border,
format, type, null);
// then upload the image into the bottom left corner of the texture
const xoffset = 0;
const yoffset = 0;
gl.texSubImage2D(gl.TEXTURE_2D, level, xoffset, yoffset, format, type, image);
}
// now because the texture is a power of 2 in both dimensions you can
// generate mips and turn on maximum filtering
gl.generateMipmap(gl.TEXTURE_2D);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR_MIPMAP_LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
// lookup uniforms
var resolutionLocation = gl.getUniformLocation(program, "u_resolution");
webglUtils.resizeCanvasToDisplaySize(gl.canvas);
// Tell WebGL how to convert from clip space to pixels
gl.viewport(0, 0, gl.canvas.width, gl.canvas.height);
// Clear the canvas
gl.clearColor(0, 0, 0, 0);
gl.clear(gl.COLOR_BUFFER_BIT);
// Tell it to use our program (pair of shaders)
gl.useProgram(program);
// Turn on the position attribute
gl.enableVertexAttribArray(positionLocation);
// Bind the position buffer.
gl.bindBuffer(gl.ARRAY_BUFFER, positionBuffer);
// Tell the position attribute how to get data out of positionBuffer (ARRAY_BUFFER)
var size = 2; // 2 components per iteration
var type = gl.FLOAT; // the data is 32bit floats
var normalize = false; // don't normalize the data
var stride = 0; // 0 = move forward size * sizeof(type) each iteration to get the next position
var offset = 0; // start at the beginning of the buffer
gl.vertexAttribPointer(
positionLocation, size, type, normalize, stride, offset)
// Turn on the teccord attribute
gl.enableVertexAttribArray(texcoordLocation);
// Bind the position buffer.
gl.bindBuffer(gl.ARRAY_BUFFER, texcoordBuffer);
// Tell the position attribute how to get data out of positionBuffer (ARRAY_BUFFER)
var size = 2; // 2 components per iteration
var type = gl.FLOAT; // the data is 32bit floats
var normalize = false; // don't normalize the data
var stride = 0; // 0 = move forward size * sizeof(type) each iteration to get the next position
var offset = 0; // start at the beginning of the buffer
gl.vertexAttribPointer(
texcoordLocation, size, type, normalize, stride, offset)
// set the resolution
gl.uniform2f(resolutionLocation, gl.canvas.width, gl.canvas.height);
// Draw the rectangle.
var primitiveType = gl.TRIANGLES;
var offset = 0;
var count = 6;
gl.drawArrays(primitiveType, offset, count);
}
function setRectangle(gl, x, y, width, height) {
var x1 = x;
var x2 = x + width;
var y1 = y;
var y2 = y + height;
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([
x1, y1,
x2, y1,
x1, y2,
x1, y2,
x2, y1,
x2, y2,
]), gl.STATIC_DRAW);
}
main();
// This is needed if the images are not on the same domain
// NOTE: The server providing the images must give CORS permissions
// in order to be able to use the image with WebGL. Most sites
// do NOT give permission.
// See: http://webglfundamentals.org/webgl/lessons/webgl-cors-permission.html
function requestCORSIfNotSameOrigin(img, url) {
if ((new URL(url)).origin !== window.location.origin) {
img.crossOrigin = "";
}
}
#import url("https://webglfundamentals.org/webgl/resources/webgl-tutorials.css");
body {
margin: 0;
}
canvas {
width: 100vw;
height: 100vh;
display: block;
}
<div style="height:700px; width:700px; overflow:scroll;">
<canvas id="canvas"></canvas>
</div>
<!-- vertex shader -->
<script id="2d-vertex-shader" type="x-shader/x-vertex">
attribute vec2 a_position;
attribute vec2 a_texCoord;
uniform vec2 u_resolution;
varying vec2 v_texCoord; void main() {
// convert the rectangle from pixels to 0.0 to 1.0
vec2 zeroToOne = a_position / u_resolution;
// convert from 0->1 to 0->2
vec2 zeroToTwo = zeroToOne * 2.0;
// convert from 0->2 to -1->+1 (clipspace)
vec2 clipSpace = zeroToTwo - 1.0;
gl_Position = vec4(clipSpace * vec2(1, -1), 0, 1);
// pass the texCoord to the fragment shader
// The GPU will interpolate this value between points.
v_texCoord = a_texCoord;
}
</script>
<!-- fragment shader -->
<script id="2d-fragment-shader" type="x-shader/x-fragment">
precision mediump float;
// our texture
uniform sampler2D u_image;
// the texCoords passed in from the vertex shader.
varying vec2 v_texCoord;
void main() {
// Look up a color from the texture.
gl_FragColor = texture2D(u_image, v_texCoord);
}
</script>
<script src="https://webglfundamentals.org/webgl/resources/webgl-utils.js"></script>
Already really informative and good answers and comments here.
Also please take into account, that the link you provided using high quality images with high-res and excellent quality, at least with no compression artifacts.
Unpacked ~21mb in NIFTI Data Format.
using ami.js to uppack it:
https://github.com/FNNDSC/ami
Using the exampel by gman with a good image resolution that fits with your screen resolution, should give you a descend result.
Yes, their are some algorithms to fix a bad images quality and deal with image compression artifacts, but (and i don't whant to repeat the comments here) generally speaking once the information is lost, it is gone.

How to render images in WebGL from ArrayBuffer

I am having a image that I am reading in server side and pushing to web browser via AJAX call. I have a requirement where I have to render them line by line using WebGL.
For Example : Image is 640X480 where 640 is width and 480 is height. Now the total number of pixels will be 640*480 = 307200 pixels. So, I want to render the whole image in 640(total width) intervals in a loop using WebGL.
Now I have texture2D(as per my knowledge) in webgl to do so, but not getting any idea of where to start . I also having the ArrayBuffer with me , only thing is using Texture2D I want to render it slowly ,line by line.
I am ready to go for any js libraries ,if they are satisfying the requirements.
So, to write a image line by line we can do something like this.
Vertex Shader
attribute vec2 a_position;?
attribute vec2 a_texCoord;?
void main() {
???
}
Fragment Shader
#ifdef GL_ES
precision mediump float;
#endif
uniform float time;
uniform vec2 mouse;
uniform vec2 resolution;
void main( void ) {
vec2 position = 1.0 - gl_FragCoord.xy / resolution;
vec3 color = vec3(1.0);
if (time > position.y * 10.0) {
color = texture2D(uImage0, uv);
}
gl_FragColor = vec4(color, 1.0);
}
Javascript For rendering pixel by pixel
function createTextureFromArray(gl, dataArray, type, width, height) {
var data = new Uint8Array(dataArray);
var texture = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.texImage2D(gl.TEXTURE_2D, 0, type, width, height, 0, type, gl.UNSIGNED_BYTE, data);
return texture;
}
var arrayBuffer = new ArrayBuffer(640*480);
for (var i=0; i < 640; i++) {
for (var j=0; j < 480; j++) {
arrayBuffer[i] = Math.floor(Math.random() * 255) + 0; //filling buffer with random data between 0 and 255 which will be further filled to the texture
//NOTE : above data is just dummy data , I will get this data from server pixel by pixel.
}
}
var gl = canvas.getContext('webgl');
// setup GLSL program
var program = createProgramFromScripts(gl, ["2d-vertex-shader", "2d-fragment-shader"]);
gl.useProgram(program);
//what should I add after this ?
Can anybody complete the code , I have no idea of how to write code to accomplish this.
OpenGL is not designed to draw images "line by line." However, you can achieve this effect in software by writing to an array, uploading it as a texture, and sampling from it in your shader when drawing a full screen polygon.
To go about this you should create an unsigned byte array. For each pixel in your image you can have some combination of red, green, blue, and alpha channels. The simplest case would be RGB, 3 unsigned bytes for each pixel. The final array should be sized according to the component size (3), times your width (640), times your height (480). You should initialize the values in your array according to what you want your background color to be, then upload it to the gpu using texImage2D.
To 'draw line by line' would be to update 'width' pixels at a time given a row. Each time you change the image data you should then reupload the image to the gpu then draw the fullscreen polygon.
The fullscreen polygon is simply two triangles that cover the entire clip space of the screen. The screen goes from -1 to 1 in x and y dimensions, so make an array buffer accordingly, upload it with the two triangles, and call drawArrays as you update the texture. The UV's for the polygon should go from 0 to 1, so in your vertex shader you should have a 'varying' output variable that will be 0.5 * position + 0.5. This is used in the fragment shader to sample from the texture.
The official documentation is one of the best places to learn from. The official reference pages for openGL ES or openGL 3 contain relevant information, while the reference card https://www.khronos.org/files/webgl/webgl-reference-card-1_0.pdf show the available functions in WebGL that correspond roughly to the same api.
It's not clear at all what you're trying to accomplish and why you are using WebGL at all. Are you sending one line of data at a time and you want to render that one individual line of data when its received? Are you sending all the data and you just want reveal it a line at time horizontally?
If you have the entire image available then you can just render a larger and larger portion of it using canvas2d. The drawImage function takes optional source and destination rectangles.
// at init time
var x = 0;
// at render time
while (x < img.width) {
var srcX = x;
var srcY = 0;
var srcWidth = 1; // one pixel per frame
var srcHeight = img.height;
var dstX = x;
var dstY = 0;
var dstWidth = 1;
var dstHeight = img.height;
ctx.drawImage(img, srcX, srcY, srcWidth, srcHeight, dstX, dstY, dstWidth, dstHeight);
++x;
}
If you're sending them 1 line of data at a time you can use ImageData to make a 1xheight image and use putImageData to draw it.
// at init time or whenever you know the height
var imageData = ctx.createImageData(1, height);
var x = 0;
// on received next line of data
for (var ii = 0; ii < imageData.length; ++ii) {
imageData.data[ii] = receivedColumnOfPixels[ii];
}
ctx.putImageData(imageData, x, 0);
++x;
If you want to scale the ImageData put it in a second canvas and use that canvas as input to drawImage using the first technique.
You can do the same things in WebGL. If you have the entire texture in memory then each frame adjust your positions and texture coordinates to draw a different part of it. If you're receiving 1 column of data at a time then just use a texture that's 1 x height and draw that at the appropriate place. OR, copy that 1 x height data into the fullsize texture using gl.texSubImage2D and then adjust the positions and texture coordinates appropriately to draw the part of the texture you want to draw to the part of the canvas you want to draw it.
drawImage implemented in WebGL would look something like this. I'm using twgl.js because WebGL is too verbose.
var m4 = twgl.m4;
var gl = document.getElementById("c").getContext("webgl");
// compiles shader, links and looks up locations
var programInfo = twgl.createProgramInfo(gl, ["vs", "fs"]);
// a unit quad
var arrays = {
position: {
numComponents: 2,
data: [
0, 0,
1, 0,
0, 1,
0, 1,
1, 0,
1, 1,
],
},
};
// calls gl.createBuffer, gl.bindBuffer, gl.bufferData for each array
var bufferInfo = twgl.createBufferInfoFromArrays(gl, arrays);
// create a texture using a canvas so we don't have to download one
var ctx = document.createElement("canvas").getContext("2d");
ctx.fillStyle = "blue";
ctx.fillRect(0, 0, ctx.canvas.width, ctx.canvas.height);
ctx.lineWidth = 20;
["red", "orange", "yellow"].forEach(function(color, ndx, array) {
ctx.strokeStyle = color;
ctx.beginPath();
ctx.arc((ndx + 1) / (array.length + 1) * ctx.canvas.width, ctx.canvas.height / 2, ctx.canvas.height * 0.4, 0, Math.PI * 2, false);
ctx.stroke();
});
ctx.fillStyle = "white";
ctx.font = "40px sans-serif";
ctx.textAlign = "center";
ctx.textBaseline = "middle";
ctx.fillText("DrawImage", ctx.canvas.width / 2, ctx.canvas.height / 2);
// calls gl.createTexture, gl.bindTexture, gl.texImage2D, gl.texParameteri
var tex = twgl.createTexture(gl, { src: ctx.canvas });
var texWidth = ctx.canvas.width;
var texHeight = ctx.canvas.height;
// we pass in texWidth and texHeight because unlike images
// we can't look up the width and height of a texture
// we pass in targetWidth and targetHeight to tell it
// the size of the thing we're drawing too. We could look
// up the size of the canvas with gl.canvas.width and
// gl.canvas.height but maybe we want to draw to a framebuffer
// etc.. so might as well pass those in.
// srcX, srcY, srcWidth, srcHeight are in pixels
// computed from texWidth and texHeight
// dstX, dstY, dstWidth, dstHeight are in pixels
// computed from targetWidth and targetHeight
function drawImage(
tex, texWidth, texHeight,
srcX, srcY, srcWidth, srcHeight,
dstX, dstY, dstWidth, dstHeight,
targetWidth, targetHeight) {
var mat = m4.identity();
var tmat = m4.identity();
var uniforms = {
matrix: mat,
textureMatrix: tmat,
texture: tex,
};
// these adjust the unit quad to generate texture coordinates
// to select part of the src texture
// NOTE: no check is done that srcX + srcWidth go outside of the
// texture or are in range in any way. Same for srcY + srcHeight
m4.translate(tmat, [srcX / texWidth, srcY / texHeight, 0], tmat);
m4.scale(tmat, [srcWidth / texWidth, srcHeight / texHeight, 1], tmat);
// these convert from pixels to clip space
m4.translate(mat, [-1, 1, 0], mat);
m4.scale(mat, [2 / targetWidth, -2 / targetHeight, 1], mat);
// these move and scale the unit quad into the size we want
// in the target as pixels
m4.translate(mat, [dstX, dstY, 0], mat);
m4.scale(mat, [dstWidth, dstHeight, 1], mat);
gl.useProgram(programInfo.program);
// calls gl.bindBuffer, gl.enableVertexAttribArray, gl.vertexAttribPointer
twgl.setBuffersAndAttributes(gl, programInfo, bufferInfo);
// calls gl.uniformXXX, gl.activeTexture, gl.bindTexture
twgl.setUniforms(programInfo, uniforms);
// calls gl.drawArray or gl.drawElements
twgl.drawBufferInfo(gl, gl.TRIANGLES, bufferInfo);
}
function render(time) {
time *= 0.001;
var targetWidth = gl.canvas.width;
var targetHeight = gl.canvas.height;
// pick some various src rects and dst rects
var srcX = Math.abs(Math.sin(time * 1 )) * texWidth;
var srcY = Math.abs(Math.sin(time * 1.81)) * texHeight;
var srcWidth = (texWidth - srcX) * Math.abs(Math.sin(time * 2.12));
var srcHeight = (texHeight - srcY) * Math.abs(Math.sin(time * 1.53));
var dstX = Math.abs(Math.sin(time * 0.34)) * targetWidth;
var dstY = Math.abs(Math.sin(time * 2.75)) * targetHeight;
var dstWidth = (targetWidth - dstX) * Math.abs(Math.sin(time * 1.16));
var dstHeight = (targetHeight - dstY) * Math.abs(Math.sin(time * 1.17));
drawImage(
tex, texWidth, texHeight,
srcX, srcY, srcWidth, srcHeight,
dstX, dstY, dstWidth, dstHeight,
targetWidth, targetHeight);
requestAnimationFrame(render);
}
requestAnimationFrame(render);
canvas { border: 1px solid black; }
<script src="https://twgljs.org/dist/twgl-full.min.js"></script>
<script id="vs" type="not-js">
// we will always pass a 0 to 1 unit quad
// and then use matrices to manipulate it
attribute vec4 position;
uniform mat4 matrix;
uniform mat4 textureMatrix;
varying vec2 texcoord;
void main () {
gl_Position = matrix * position;
texcoord = (textureMatrix * position).xy;
}
</script>
<script id="fs" type="not-js">
precision mediump float;
varying vec2 texcoord;
uniform sampler2D texture;
void main() {
gl_FragColor = texture2D(texture, texcoord);
}
</script>
<canvas id="c"></canvas>
To understand them matrix math see these articles and work your way backward or forward in those articles.

How to send multiple textures to a fragment shader in WebGL?

So in the javascript portion of my code, here is the snippet that actually sends an array of pixels to the vertex and fragment shaders- but I am only working with 1 texture when I get to those shaders- is there anyway that I can send two textures at a time? if so, how would I 'catch' both of them on the GLSL side of the codee?
if (it > 0){
gl.activeTexture(gl.TEXTURE1);
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.activeTexture(gl.TEXTURE0);
gl.bindFramebuffer(gl.FRAMEBUFFER, FBO2);}
else{
gl.activeTexture(gl.TEXTURE1);
gl.bindTexture(gl.TEXTURE_2D, texture2);
gl.activeTexture(gl.TEXTURE0);
gl.bindFramebuffer(gl.FRAMEBUFFER, FBO);}
gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4);
You reference multiple textures in GLSL by declaring multiple sampler uniforms
uniform sampler2D u_myFirstTexture;
uniform sampler2D u_mySecondTexture;
...
vec4 colorFrom1stTexture = texture2D(u_myFirstTexture, someUVCoords);
vec4 colorFrom2ndTexture = texture2D(u_mySecondTexture, someOtherUVCoords);
You can specific which texture units those 2 samplers use by calling gl.uniform1i as in
var location1 = gl.getUniformLocation(program, "u_myFirstTexture");
var location2 = gl.getUniformLocation(program, "u_mySecondTexture");
...
// tell u_myFirstTexture to use texture unit #7
gl.uniform1i(location1, 7);
// tell u_mySecondTexture to use texture unit #4
gl.uniform1i(location2, 4);
And you setup texture units by using gl.activeTexture and gl.bindTexture
// setup texture unit #7
gl.activeTexture(gl.TEXTURE7); // or gl.TEXTURE0 + 7
gl.bindTexture(gl.TEXTURE_2D, someTexture);
...
// setup texture unit #4
gl.activeTexture(gl.TEXTURE4); // or gl.TEXTURE0 + 4
gl.bindTexture(gl.TEXTURE_2D, someOtherTexture);
...

Categories