I was trying to draw a simple square based on the book WebGL Beginner's guid by Diego Cantor & Brandon Jones but got stuck in lesson two.
I was trying to draw a square but my console shows error message as follows:
WebGL: INVALID_OPERATION: drawArrays: attribs not setup correctly
What i tried is given below:
My Code sample
<html>
<head>
<title>WebGL-Lesson One</title>
<style type="text/css">
canvas {
border: 2px dotted black;
}
</style>
<script id="code-js" type="text/javascript">
var gl = null;
function getGlContext() {
var my_canvas = document.getElementById("my_canvas");
var names = [ "webgl", "experimental-webgl", "webkit-3d", "moz-webgl" ];
for ( var i = 0; i < names.length; ++i) {
try {
gl = my_canvas.getContext(names[i]);
} catch (e) {
}
if (gl)
break;
}
if (gl == null) {
alert("empty canvas")
} else {
drawing();
}
}
function drawing() {
var vertices = [ -50.0, 50.0, 0.0, -50.0, -50.0, 0.0, 50.0, -50.0, 0.0,
50.0, 50.0, 0.0 ];
var indices = [ 3, 2, 1, 3, 1, 0 ]
var myVBOBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, myVBOBuffer);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(vertices),
gl.STATIC_DRAW);
var aVertexPosition = (0,0,0);
gl.vertexAttribPointer(aVertexPosition, 3, gl.FLOAT, false, 0, 0);
gl.enableVertexAttribArray(aVertexPosition);
gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4);
// gl.bindBuffer(gl.ARRAY_BUFFER, null);
var myIBOBuffer = gl.createBuffer();
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, myIBOBuffer);
gl.bufferData(gl.ELEMENT_ARRAY_BUFFER, new Uint16Array(indices),gl.STATIC_DRAW);
// gl.bindBuffer(gl.ARRAY_BUFFER, null);
}
</script>
<script id="shader-fs" type="x-shader/xfragment"></script>
<script id="shader-vs" type="x-shader/x-vertex"></script>
</head>
<body onload="getGlContext()">
<canvas id="my_canvas" width="700" height="500"></canvas>
</body>
</html>
Your shader scripts appear to be empty. You need at the very least a basic vertex and fragment shader. You also need to load, compile them and create the shader programs, and then you need to call gl.useProgram to set up your attribs, get uniform locations, and set uniform values. I recommend trying these lessons, they worked for me.
Related
I am currently working on project in HTML and js where I have a simple canvas and two text fields (x and y) with a button called "Draw".
Trying to get it to draw a line via the user's entered x and y intercepts onto the canvas with onclick.
The function I am calling with onclick is generateImage().
I can tell that the text fields are in fact receiving the users info. I tested this by using a window.alert to show a pop up with the info (i have it commented out for now).
At first I thought it had to do with how i set up my button, but after asking around I think this cannot be true:
<button type="button" onclick="generateImage()"> DRAW LINE!</button>
I have no more errors on the console.
I even had my professor look at it and he said that it looked alright. I am beginning to think that this is a problem somewhere else within the program. I am sure I am either a) being stupid/frustrated or b) seriously over looking something.
Here is some of my code.
One last thing. var x0=... and var y0 =... I am aware that I will get a slope of zero and a line length of zero because they are the same..
My professor has told us to use this algorithm even thought it doesn't make sense to me. I would like to mention that I have also tried to hard code the x and y values and it still does not work.
Thanks
HTML
<body>
<form action="demo_form.asp">
X = <input name="X Value" type="text" id="Xvalue"><br>
Y = <input name="Y Value" type="text" id="Yvalue"><br>
</form>
<button type="button" onclick="generateImage()"> DRAW LINE!</button>
<p>Click "DRAW LINE" button to draw the line with custom X and Y coords</p>
<canvas id="gl-canvas" width="512", height="512">
<pre>No Support... :(</pre>
</canvas>
</body>
***JS***
function generateImage()
{
var x0 = document.getElementById('Xvalue').value;
var y0 = document.getElementById('Yvalue').value;
var x1 = document.getElementById('Xvalue').value;
var y1 = document.getElementById('Yvalue').value;
// window.alert(x0);
// window.alert(y0);
var slope = (y1-y0)/(x1-x0);
for (var i=0; i<x1-x0; i++){
var y = (slope*i)+y0;
var x = x0 + i;
myImage.setPixel(x,y, RED);
}
}
function glInit(canvas)
{
//makes our GL instance
gl=WebGLUtils.setupWebGL(canvas);
//gets and initializes our vertex and fragment shaders
program = initShaders(gl, "vertex-shader", "fragment-shader");
//sets the resulting shader program to use
gl.useProgram(program);
}
window.onload=function init()
{
var canvas = document.getElementById("gl-canvas");
glInit(canvas);
myImage=new Image(canvas);
myImage.setImageSize(700, 350);
myImage.clearImage();
generateImage(myImage);
renderImage(myImage);
}
function renderImage(myImage)
{
points=[];
colors=[];
for (var i=0; i<pixels.length; i++)
{
var y=Math.floor(i/myImage.width);
var x=i-(y*myImage.width);
colors.push(myImage.getPixel(x, y));
x/=myImage.width/2;
y/=myImage.height/2;
x-=1.0;
y-=1.0;
points.push(vec3(x, y, 0.0, 0.0));
}
gl.clearColor(0.7, 0.7, 0.7, 1.0);
gl.viewport(0, 0, myImage.width, myImage.height);
var posBufferID=gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, posBufferID);
gl.bufferData(gl.ARRAY_BUFFER, flatten(points), gl.STATIC_DRAW);
program.vPositionA=gl.getAttribLocation(program, "vPosition");
gl.vertexAttribPointer(program.vPosition, 3, gl.FLOAT, false, 0, 0);
gl.enableVertexAttribArray(program.vPositionA);
var colBufferID=gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, colBufferID);
gl.bufferData(gl.ARRAY_BUFFER, flatten(colors), gl.STATIC_DRAW);
program.vColorA=gl.getAttribLocation(program, "vColor");
gl.vertexAttribPointer(program.vColorA, 4, gl.FLOAT, false, 0, 0);
gl.enableVertexAttribArray(program.vColorA);
render(program, colBufferID, posBufferID);
}
function render(program, colBufferID, posBufferID)
{
gl.clear(gl.COLOR_BUFFER_BIT);
gl.bindBuffer(gl.ARRAY_BUFFER, colBufferID);
gl.vertexAttribPointer(program.vColorA, 4, gl.FLOAT, false, 0, 0);
gl.bindBuffer(gl.ARRAY_BUFFER, posBufferID);
gl.vertexAttribPointer(program.vPositionA, 3, gl.FLOAT, false, 0, 0);
gl.drawArrays(gl.POINTS, 0, points.length);
}
We are writing a web client to stream video, one type of stream uses a proprietary library, so we cannot use current plugins or HTML tags that support standard urls.
I have several years experience with OpenGL, so I decided to try out WebGL for rendering raw data frames.
Rendering using a js Image() that loads png files is extremely fast. But rendering with raw RGBA data is extremely slow.
I am putting the raw data in a Uint8Array() and using the gl.texImage2D() that accepts width height, while the PNG rendering uses the gl.texImage2D() version that accepts an Image and no width height.
I would have assumed the raw data would be faster since it doesn't have to load and decode the png file, but it seems backwards.
My background is largely C++ and have a fair amount of experience with desktop OpenGL. HTML5 and javascript are still fairly new to me.
Why is WebGL rendering the Image() (1024x1024) much much quicker, and even a small image of raw data (32x32) much much slower? Is there a way to speed this up? I am running this on the newest version of Firefox.
Edit:
The problem was actually passing data from the plugin to javascript. I was profiling using Date.getTime(), but apparently that is not a good way since the time before and after creating an array and getting data from the plugin was the same. I've switched to getting data from a local HTTP server which has shown great performance improvement when getting and rendering raw data.
Hmm, let's test
var canvas = document.getElementById("c");
var gl = canvas.getContext("webgl");
var work = document.getElementById("w");
var fps = document.getElementById("f");
var imageData = new Uint8Array(canvas.width * canvas.height * 4);
var program = webglUtils.createProgramFromScripts(
gl, ["vshader", "fshader"], ["a_position"]);
gl.useProgram(program);
var verts = [
1, 1,
-1, 1,
-1, -1,
1, 1,
-1, -1,
1, -1,
];
var vertBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, vertBuffer);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(verts), gl.STATIC_DRAW);
gl.enableVertexAttribArray(0);
gl.vertexAttribPointer(0, 2, gl.FLOAT, false, 0, 0);
var tex = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, tex);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
var adjust = 1;
var workAmount = adjust;
var oneFrame = 1 / 50; // shoot for 50fps since timing is poor
var then = Date.now() * 0.001;
var frameCount = 0;
var maxIndex = canvas.width * canvas.height;
function doStuff() {
var now = Date.now() * 0.001;
var deltaTime = now - then;
then = now;
++frameCount;
if (deltaTime < oneFrame) {
workAmount += adjust;
} else {
workAmount = Math.max(workAmount - adjust, adjust);
}
fps.innerHTML = (1 / deltaTime).toFixed(1);
work.innerHTML = workAmount;
var color = (frameCount & 1) ? 255 : 128;
for (var i = 0; i < workAmount; ++i) {
var index = (Math.random() * maxIndex | 0) * 4;
imageData[index + 0] = color;
imageData[index + 1] = color;
imageData[index + 2] = color;
imageData[index + 3] = 255;
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, canvas.width, canvas.height, 0,
gl.RGBA, gl.UNSIGNED_BYTE, imageData);
}
gl.drawArrays(gl.TRIANGLES, 0, 6);
requestAnimationFrame(doStuff);
}
doStuff();
body, document {
font-family: monospace;
}
#c {
width: 128px;
height: 128px;
border: 1px solid red;
}
#outer {
position: relative;
}
#info {
position: absolute;
left: 10px;
top: 10px;
background-color: white;
padding: 0.5em;
}
<div id="outer">
<canvas id="c" width="1024" height="1024"></canvas>
<div id="info">
<div>fps : <span id="f"></span></div>
<div>work: <span id="w"></span></div>
</div>
</div>
<script src="https://webglfundamentals.org/webgl/resources/webgl-utils.js"></script>
<script id="vshader" type="whatever">
attribute vec4 a_position;
varying vec2 v_texcoord;
void main() {
gl_Position = a_position;
v_texcoord = a_position.xy * 0.5 + 0.5;
}
</script>
<script id="fshader" type="whatever">
precision mediump float;
varying vec2 v_texcoord;
uniform sampler2D u_sampler;
void main() {
gl_FragColor = texture2D(u_sampler, v_texcoord);
}
</script>
On my 2014 MBP I get about 20 1024x1024 RGBA/UNSIGNED_BYTE uploads a frame at 50fps on Chrome and about the same on Firefox
What do you get? Are you sure your bottleneck is the texture uploads and not something else?
The slowness could come from (at least) 3 things:
WebGL has to do error checking with texImage2D. Perhaps the browsers knows that data from a Image is always valid and thus skip this check.
WebGL may need to do data conversion behind the hood; For example, If you use premultiplied alpha; then WebGL may need to do the conversion with raw data; whereas Image element is already in premultiplied alpha (most likely).
Browsers are still godly slow at creating TypeArrays. If you are creating a new TypeArray every frame that will slow down performance.
I am trying to render a model under multiple light sources using multiple drawElement calls with different light sources. The gist of the code is as follows.
this.setUniform("u_matrix", "Matrix4fv", false, matrix);
this.setUniform("u_nMatrix", "Matrix3fv", false, nMatrix);
this.setUniform("u_kSpecular", "3fv", material.specular);
this.setUniform("u_kDiffuse", "3fv", material.diffuse);
this.setUniform("u_kAmbient", "3fv", material.ambient);
this.setUniform("u_shininess", "1f", material.shininess);
this.setVertexAttribArray("a_position", new Float32Array(mesh.vertices), 3);
this.setVertexAttribArray("a_normal", new Float32Array(mesh.vertexNormals), 3);
this.setElementArray(new Uint16Array(mesh.indices));
for(var i = 0;i < lights.length;i++) {
if(i == 1) {
gl.enable(gl.BLEND);
gl.blendFunc(gl.ONE, gl.ONE);
gl.blendEquation(gl.FUNC_ADD);
}
var light = lights[i];
console.dir(light);
this.setUniform("u_lightType", "1i", light.type);
this.setUniform("u_lightVector", "3fv", light.vector);
this.setUniform("u_lightColor", "3fv", light.color);
gl.drawElements(gl.TRIANGLES, mesh.indices.length, gl.UNSIGNED_SHORT, 0);
}
gl.disable(gl.BLEND);
The issue here is that only the first lighting is rendered. Subsequent drawElements do not render anything. I do not think the issue is with my shader code, because the issue persists even if I strip the shader down to just set fragment color equal to the light type or some such parameter.
setUniform setVertexAttribArray and setElementArray are some helpers that i have written. They basically look like this:
setElementArray: function(array) {
var gl = this.gl;
var buffer = this._arrBuffers.__indexBuffer;
if(_.isUndefined(buffer)) {
buffer = gl.createBuffer();
this._arrBuffers.__indexBuffer = buffer;
}
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, buffer);
gl.bufferData(gl.ELEMENT_ARRAY_BUFFER, array, gl.STATIC_DRAW);
}
setVertexAttribArray: function(name, array, size, type, normalized, stride, offset) {
var gl = this.gl;
size = size || 2;
type = type || gl.FLOAT;
normalized = normalized || false;
stride = stride || 0;
offset = offset || 0;
var buffer = this._arrBuffers[name];
if(_.isUndefined(buffer)) {
buffer = gl.createBuffer();
this._arrBuffers[name] = buffer;
}
var location = this.getLocation(name, true);
gl.bindBuffer(gl.ARRAY_BUFFER, buffer);
gl.bufferData(gl.ARRAY_BUFFER, array, gl.STATIC_DRAW);
gl.enableVertexAttribArray(location);
gl.vertexAttribPointer(location, size, type, normalized, stride, offset);
}
I have found the solution. I had set gl.depthFunc(gl.LESS), so subsequent renderings failed the depth test. Setting gl.depthFunc(gl.LEQUAL) solved the problem.
I'm trying to render a few objects to a canvas and I'm having a bit of trouble understanding what's not working.
I'm building two objects at the moment that represent the two meshes that I want to render. If I create one mesh the code works fine so the problem, I think, is that the data gets screwed up when I'm building two or more.
Here's an example of the mesh data:
"name":"cone",
"buffers":{
"vertexPosition":{}, // Buffer
"vertexIndex":{} // Buffer
},
"mesh":{
"vertices":[], // emptied it to fit on page
"faces":[] // emptied it to fit on page
},
"mvMatrix": Float32Array[16],
"itemSize":3,
"numItems":12,
"program":{
"vertexPosAttrib":0,
"mvMatrixUniform":{},
"pMatrixUniform":{}
}
This is build from this function:
buildMeshData: function(){
this.mvMatrix = mat4.create();
this.buffers.vertexPosition = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, this.buffers.vertexPosition);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(this.mesh.vertices), gl.STATIC_DRAW);
this.buffers.vertexIndex = gl.createBuffer();
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, this.buffers.vertexIndex);
gl.bufferData(gl.ELEMENT_ARRAY_BUFFER, new Uint16Array(this.mesh.faces), gl.STATIC_DRAW);
this.itemSize = 3;
this.numItems = this.mesh.faces.length;
var vertexProps = {
attributes: ['vec3', 'VertexPosition'],
uniforms: ['mat4', 'MVMatrix', 'mat4', 'PMatrix'],
varyings: ['vec3', 'TexCoord']
}
var vertexShaderFunction = 'vTexCoord = aVertexPosition + 0.5; gl_Position = uPMatrix * uMVMatrix * vec4(aVertexPosition, 1);';
var vshaderInput = utils.buildVertexShader(vertexProps, vertexShaderFunction);
var fragmentProps = {
attributes: [],
uniforms: [],
varyings: ['vec3', 'TexCoord']
}
var fragmentShaderFunction = 'gl_FragColor = vec4(vTexCoord, 1);';
var fshaderInput = utils.buildFragmentShader(fragmentProps, fragmentShaderFunction);
this.program = gl.createProgram();
var vshader = gl.createShader(gl.VERTEX_SHADER);
gl.shaderSource(vshader, vshaderInput);
gl.compileShader(vshader);
var fshader = gl.createShader(gl.FRAGMENT_SHADER);
gl.shaderSource(fshader, fshaderInput);
gl.compileShader(fshader);
gl.attachShader(this.program, vshader);
gl.attachShader(this.program, fshader);
gl.linkProgram(this.program);
gl.useProgram(this.program);
this.program.vertexPosAttrib = gl.getAttribLocation(this.program, 'aVertexPosition');
gl.vertexAttribPointer(this.program.vertexPosAttrib, this.itemSize, gl.FLOAT, false, 0, 0);
gl.enableVertexAttribArray(this.program.vertexPosAttrib);
this.program.mvMatrixUniform = gl.getUniformLocation(this.program, "uMVMatrix");
this.program.pMatrixUniform = gl.getUniformLocation(this.program, "uPMatrix");
scene.add(this);
}
and the render function goes like this:
function render(){
currentTime = new Date().getTime();
deltaTime = (currentTime - initialTime) / 1000; // in seconds
gl.viewport(0, 0, stage.width, stage.height);
gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
for(var i in scene.meshes){
(function(mesh){
mat4.translate(mesh.mvMatrix, mesh.mvMatrix, [0, 2 * i, -10 - (10 * i)]);
gl.useProgram(mesh.program);
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, null);
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, mesh.buffers.vertexIndex);
gl.vertexAttribPointer(mesh.program.vertexPosAttrib, mesh.itemSize, gl.FLOAT, false, 0, 0);
gl.enableVertexAttribArray(mesh.program.vertexPosAttrib);
gl.uniformMatrix4fv(mesh.program.mvMatrixUniform, false, mesh.mvMatrix);
gl.uniformMatrix4fv(mesh.program.pMatrixUniform, false, scene.pMatrix);
gl.drawElements(gl.TRIANGLES, mesh.numItems, gl.UNSIGNED_SHORT, 0);
gl.disableVertexAttribArray(mesh.program.vertexPosAttrib);
})(scene.meshes[i])
}
// requestAnimationFrame(render);
}
The result of this is the second object is drawn correctly but the first causes the error:
[.WebGLRenderingContext]GL ERROR :GL_INVALID_OPERATION : glDrawElements: attempt to access out of range vertices in attribute 0
...and is therefore not drawn.
Any ideas where the problem lies. Hopefully thats enough information from the code, I didn't want to put up too much, but if you need to see anything else I'll update.
This code
gl.vertexAttribPointer(this.program.vertexPosAttrib, this.itemSize, gl.FLOAT, false, 0, 0);
gl.enableVertexAttribArray(this.program.vertexPosAttrib);
Need to be called when drawing each mesh and not where it's called now. Additionally before calling gl.vertexAttribPointer for this.program.vertexPosAttrib you need to call
gl.bindBuffer(gl.ARRAY_BUFFER, mesh.buffers.vertexPosition);
Because gl.vertexAttribPointer binds the buffer currently bound to gl.ARRAY_BUFFER to the specified attribute.
In other words
gl.bindBuffer(gl.ARRAY_BUFFER, mesh.buffers.vertexPosition);
gl.vertexAttribPointer(mesh.program.vertexPosAttrib, mesh.itemSize, gl.FLOAT, false, 0, 0);
gl.enableVertexAttribArray(mesh.program.vertexPosAttrib);
This is my first time playing around with Vertex Shaders in a WebGL context. I want to texture a primitive with a video, but instead of just mapping the video into the surface I;m trying to translate the luma of the video into vertex displacement. This is kind of like the Rutt Etra, but in a digital format. A bright pixel should push the vertex forward, while a darker pixel does the inverse. Can anyone tell me what I'm doing wrong? I can't find a reference for this error.
When compiling my code, I get the following when using sampler2D and texture2D:
Mozilla/5.0 (Macintosh; Intel Mac OS X 10_8_4) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/29.0.1547.65 Safari/537.36 | WebGL 1.0 (OpenGL ES 2.0 Chromium) | WebKit | WebKit WebGL | WebGL GLSL ES 1.0 (OpenGL ES GLSL ES 1.0 Chromium) Three.js:264
ERROR: 0:57: 'ftransform' : no matching overloaded function found
ERROR: 0:57: 'assign' : cannot convert from 'const mediump float' to 'Position highp 4-component vector of float'
ERROR: 0:60: 'gl_TextureMatrix' : undeclared identifier
ERROR: 0:60: 'gl_TextureMatrix' : left of '[' is not of type array, matrix, or vector
ERROR: 0:60: 'gl_MultiTexCoord0' : undeclared identifier
Three.js:257
<!doctype html>
<html>
<head>
<title>boiler plate for three.js</title>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, user-scalable=no, minimum-scale=1.0, maximum-scale=1.0">
<script src="vendor/three.js/Three.js"></script>
<script src="vendor/three.js/Detector.js"></script>
<script src="vendor/three.js/Stats.js"></script>
<script src="vendor/threex/THREEx.screenshot.js"></script>
<script src="vendor/threex/THREEx.FullScreen.js"></script>
<script src="vendor/threex/THREEx.WindowResize.js"></script>
<script src="vendor/threex.dragpancontrols.js"></script>
<script src="vendor/headtrackr.js"></script>
<style>
body {
overflow : hidden;
padding : 0;
margin : 0;
color : #222;
background-color: #BBB;
font-family : arial;
font-size : 100%;
}
#info .top {
position : absolute;
top : 0px;
width : 100%;
padding : 5px;
text-align : center;
}
#info a {
color : #66F;
text-decoration : none;
}
#info a:hover {
text-decoration : underline;
}
#info .bottom {
position : absolute;
bottom : 0px;
right : 5px;
padding : 5px;
}
</style>
</head>
<body>
<!-- three.js container -->
<div id="container"></div>
<!-- info on screen display -->
<div id="info">
<!--<div class="top">
LearningThree.js
boiler plate for
three.js
</div>-->
<div class="bottom" id="inlineDoc" >
- <i>p</i> for screenshot
</div>
</div>
<canvas id="compare" width="320" height="240" style="display:none"></canvas>
<video id="vid" autoplay loop></video>
<script type="x-shader/x-vertex" id="vertexShader">
varying vec2 texcoord0;
void main()
{
// perform standard transform on vertex
gl_Position = ftransform();
// transform texcoords
texcoord0 = vec2(gl_TextureMatrix[0] * gl_MultiTexCoord0);
}
</script>
<script type="x-shader/x-vertex" id="fragmentShader">
varying vec2 texcoord0;
uniform sampler2D tex0;
uniform vec2 imageSize;
uniform float coef;
const vec4 lumcoeff = vec4(0.299,0.587,0.114,0.);
void main (void)
{
vec4 pixel = texture2D(tex0, texcoord0);
float luma = dot(lumcoeff, pixel);
gl_FragColor = vec4((texcoord0.x / imageSize.x), luma, (texcoord0.y / imageSize.y) , 1.0);
}
</script>
<script type="text/javascript">
var stats, scene, renderer;
var camera, cameraControls;
var videoInput = document.getElementById('vid');
var canvasInput = document.getElementById('compare');
var projector = new THREE.Projector();
var gl;
var mesh,
cube,
attributes,
uniforms,
material,
materials;
var videoTexture = new THREE.Texture( videoInput );
if( !init() ) animate();
// init the scene
function init(){
if( Detector.webgl ){
renderer = new THREE.WebGLRenderer({
antialias : true, // to get smoother output
preserveDrawingBuffer : true // to allow screenshot
});
renderer.setClearColorHex( 0xBBBBBB, 1 );
// uncomment if webgl is required
//}else{
// Detector.addGetWebGLMessage();
// return true;
}else{
renderer = new THREE.CanvasRenderer();
gl=renderer;
}
renderer.setSize( window.innerWidth, window.innerHeight );
document.getElementById('container').appendChild(renderer.domElement);
// create a scene
scene = new THREE.Scene();
// put a camera in the scene
camera = new THREE.PerspectiveCamera( 23, window.innerWidth / window.innerHeight, 1, 100000 );
camera.position.z = 0;
scene.add( camera );
//
// // create a camera contol
// cameraControls = new THREEx.DragPanControls(camera)
// transparently support window resize
// THREEx.WindowResize.bind(renderer, camera);
// allow 'p' to make screenshot
THREEx.Screenshot.bindKey(renderer);
// allow 'f' to go fullscreen where this feature is supported
if( THREEx.FullScreen.available() ){
THREEx.FullScreen.bindKey();
document.getElementById('inlineDoc').innerHTML += "- <i>f</i> for fullscreen";
}
materials = new THREE.MeshLambertMaterial({
map : videoTexture
});
attributes = {};
uniforms = {
tex0: {type: 'mat2', value: materials},
imageSize: {type: 'f', value: []},
coef: {type: 'f', value: 1.0}
};
//Adding a directional light source to see anything..
var directionalLight = new THREE.DirectionalLight(0xffffff);
directionalLight.position.set(1, 1, 1).normalize();
scene.add(directionalLight);
// video styling
videoInput.style.position = 'absolute';
videoInput.style.top = '50px';
videoInput.style.zIndex = '100001';
videoInput.style.display = 'block';
// set up camera controller
headtrackr.controllers.three.realisticAbsoluteCameraControl(camera, 1, [0,0,0], new THREE.Vector3(0,0,0), {damping : 1.1});
var htracker = new headtrackr.Tracker();
htracker.init(videoInput, canvasInput);
htracker.start();
// var stats = new Stats();
// stats.domElement.style.position = 'absolute';
// stats.domElement.style.top = '0px';
// document.body.appendChild( stats.domElement );
document.addEventListener('headtrackrStatus',
function (event) {
if (event.status == "found") {
addCube();
}
}
);
}
// animation loop
function animate() {
// loop on request animation loop
// - it has to be at the begining of the function
// - see details at http://my.opera.com/emoller/blog/2011/12/20/requestanimationframe-for-smart-er-animating
requestAnimationFrame( animate );
// do the render
render();
// update stats
//stats.update();
}
function render() {
// convert matrix of every frame of video -> texture
uniforms.tex0 = materials;
uniforms.coef = 0.2;
uniforms.imageSize.x = window.innerWidth;
uniforms.imageSize.y = window.innerHeight;
// update camera controls
// cameraControls.update();
if( videoInput.readyState === videoInput.HAVE_ENOUGH_DATA ){
videoTexture.needsUpdate = true;
}
// actually render the scene
renderer.render( scene, camera );
}
function addCube(){
material = new THREE.ShaderMaterial({
uniforms: uniforms,
attributes: attributes,
vertexShader: document.getElementById('vertexShader').textContent,
fragmentShader: document.getElementById('fragmentShader').textContent,
transparent: true
});
//The cube
cube = new THREE.Mesh(new THREE.CubeGeometry(40, 30, 10, 1, 1, 1, material), new THREE.MeshFaceMaterial());
cube.overdraw = true;
scene.add(cube);
}
</script>
</body>
</html>
The primary problem here is that you are using the old GLSL reserved words that were intended for programmable / fixed-function interop. In OpenGL ES 2.0 things like gl_MultiTexCoord0 and gl_TextureMatrix [n] are not defined, because they completely removed the legacy fixed-function vertex array baggage that regular OpenGL has to deal with. These reserved words let you have matrix/vertex array state per-texture unit; they do not exist in OpenGL ES, this was their purpose in OpenGL.
To get around this, you have to use generic vertex attributes (e.g. attribute vec2 tex_st) instead of having a 1:1 mapping between texture coordinate pointers and texture units. Likewise, there is no texture matrix associated with each texture unit. To duplicate the functionality of texture matrices, you need to use matrix uniforms in your vertex/fragment shader.
To be honest, I cannot remember the last time I actually found it useful to have a separate texture matrix / texture coordinate pointer for each texture unit when using shaders... I often have 4 or 5 different textures and only need maybe 1 or 2 sets of texture coordinates. It is no big loss.
The kicker here is ftransform (...). This is intended to make it possible to write 1-line vertex shaders in OpenGL that behave the same way as the fixed-function pipeline. You must have copied and pasted a shader that was written for OpenGL 2.x or 3.x (compatibility). Explaining how to fix everything in this shader could be a real chore, you might have to learn more about GLSL before most of what I just wrote makes sense :-\