When you create a sphere(Actually, It is also apolyhedron) or other polyhedron in WebGL native API, you will get a polyhedron with flat style, and you assign a texture to the polyhedron, It will look ugly with angle between two small face at the polyhedron suface. actually,you can subdivide the surface to get a smooth surface. and is there any other method to smooth the surface of the polyhedron.just look lile as the two picture as below.(the two picture is capture from the blender software)
Here is my code for generating the sphere
function getSphere(r,segment_lat,segment_lon){
var normalData = [];
var vertexData = [];
var textureCoord = [];
var vertexIndex = [],
for (var latNum = 0; latNum <= segment_lat; latNum++) {
var theta = latNum * Math.PI / segment_lat;
var sinTheta = Math.sin(theta);
var cosTheta = Math.cos(theta);
for (var lonNum = 0; lonNum <= segment_lon; lonNum++) {
var phi = lonNum * 2 * Math.PI / segment_lon;
var sinPhi = Math.sin(phi);
var cosPhi = Math.cos(phi);
var x = cosPhi * sinTheta;
var y = cosTheta;
var z = sinPhi * sinTheta;
var u = 1 - (lonNum / segment_lon);
var v = 1 - (latNum / segment_lat);
textureCoord.push(u);
textureCoord.push(v);
vertexData.push(r * x);
vertexData.push(r * y);
vertexData.push(r * z);
}
}
for (var latNum=0; latNum < segment_lat;latNum++) {
for (var lonNum=0; lonNum < segment_lon; lonNum++) {
var first = (latNum * (segment_lon + 1)) + lonNum;
var second = first + segment_lon + 1;
vertexIndex .push(first);
vertexIndex .push(second);
vertexIndex .push(first + 1);
vertexIndex .push(second);
vertexIndex .push(second + 1);
vertexIndex .push(first + 1);
}
}
return {'vertexData':vertexData,'vertexIndex':vertexIndex,'textureCoord':textureCoord,'normalDatas':normalData};
},
Fragment Shader:
precision mediump float;
varying vec2 vTextureCoord;
uniform sampler2D uSampler;
void main(void) {
vec3 light = vec3(1,1,1);
vec4 textureColor = texture2D(uSampler, vec2(vTextureCoord.s, vTextureCoord.t));
gl_FragColor = vec4(textureColor.rgb*light,textureColor.a);
// gl_FragColor = vec4 (1,0,0,.8);
}
Vertex Shader:
attribute vec2 aTextureCoord;
attribute vec3 aVertexPosition;
// uniform mediump mat4 proj_inv;
uniform mediump mat4 modelViewMatrix;
uniform mediump mat4 projectMatrix;
varying highp vec2 vTextureCoord;
void main(void) {
//projectMatrix multi modelViewMatrix must be in vertex shader,or it will be wrong;
gl_Position = projectMatrix*modelViewMatrix*vec4(aVertexPosition, 1.0);
vTextureCoord = aTextureCoord;
}
If I have to guess your rendered result is different than the picture you showed. What you see is a "flat" sphere in one uniform color and you want a shaded sphere, is that correct?
If so, you need to go read tutorials on how lighting works. Basically, the angle between the viewing vector and the fragment's normal is used to determined the brightness of each fragment. A fragment on the sphere that you are staring at directly have a very small angle between the view vector and its normal and thus its bright. A fragment on the barely visible edge on the sphere have a large angle between normal and view and thus it appears dark.
In your sphere generation code, you need to calculate the normals as well and pass that information to the gpu along with the rest. Fortunately for a sphere, normal is easy to calculate: normal = normalize(position - center); or just normalize(position) if center is assumed to be at (0,0,0).
Related
I am trying to make a 3D box with a pattern on each side using the following code but when viewed from certain angles, the back faces disappear when looking through the transparent parts of the forward faces. I was also wondering if it is possible to have a different pattern on each face? Many thanks in advance!
let r = 10
let a = 0
let c = 20
let angle = 0
let art
function setup() {
createCanvas(windowWidth, windowHeight, WEBGL);
art = createGraphics(800, 800)
}
function draw() {
background(0);
let x = r + c * cos(a)
let y = r + c * sin(a)
art.fill(r, a, c)
art.ellipse(x + 400, y + 400, 10, 10)
c += 0.2
a += 1.8
push()
texture(art)
rotateX(angle)
rotateY(angle)
rotateZ(angle)
box(400)
angle += 0.0003
pop()
orbitControl();
}
html, body { margin: 0; overflow: hidden; }
<script src="https://cdnjs.cloudflare.com/ajax/libs/p5.js/1.4.0/p5.js"></script>
This happens because in in WebGL, once a pixels is drawn, regardless of the level of level of transparency of that pixel, if another triangle would draw to that same pixel, but at a further depth, it is discarded (I think the alpha information from the original pixel(s) may no longer be available). In order for transparency to work properly in WebGL it is necessary to draw all triangles in depth order (furthest from the camera first). And even then if two triangles intersect there will still be problems.
In your case because you have many pixels that are completely transparent and others that are completely opaque there is another solution: a custom fragment shader that discards pixels if the texture alpha is below some threshold.
const vert = `
uniform mat4 uModelViewMatrix;
uniform mat4 uProjectionMatrix;
attribute vec3 aPosition;
attribute vec2 aTexCoord;
varying vec2 vTexCoord;
void main() {
vTexCoord = aTexCoord;
vec4 viewModelPosition = uModelViewMatrix * vec4(aPosition, 1.0);
gl_Position = uProjectionMatrix * viewModelPosition;
}`;
const frag = `
precision mediump float;
// ranges from 0..1
varying vec2 vTexCoord;
uniform sampler2D uSampler;
void main() {
vec4 tex = texture2D(uSampler, vTexCoord);
if (tex.a < 0.05) {
discard;
}
gl_FragColor = tex;
}`;
let r = 10
let a = 0
let c = 20
let angle = 0
let art
let discardShader;
function setup() {
createCanvas(windowWidth, windowHeight, WEBGL);
art = createGraphics(800, 800)
discardShader = createShader(vert, frag)
textureMode(NORMAL)
}
function draw() {
background(0);
let x = r + c * cos(a)
let y = r + c * sin(a)
art.fill(r, a, c)
art.ellipse(x + 400, y + 400, 10, 10)
c += 0.2
a += 1.8
push()
noStroke()
texture(art)
shader(discardShader)
rotateX(angle)
rotateY(angle)
rotateZ(angle)
box(400)
angle += 0.0003
pop()
orbitControl();
}
html,
body {
margin: 0;
overflow: hidden;
}
<script src="https://cdnjs.cloudflare.com/ajax/libs/p5.js/1.4.1/p5.js"></script>
Note #1: It is important that you use p5.js v1.4.1 for this to work because prior to that there was a bug that prevented user shaders from working with textures.
Note #2: If your texture had partial opacity then this would not work and instead you would want to render each plane of the box separately and in the correct order (farthest from the camera first).
I am first rotating a sprite which has a texture applied, then applying a filter with a fragment shader which causes distortion on the sprite. However, when I add the filter to the sprite, it rotates to normal horizontal position instead of the angled position it had before.
I have tried to apply a rotating function inside the shader to rotate the uv. This rotates the image but changes the image outside the parts that are rotated. Here are some screenshots.
Initial look of the sprite after adding and changing the angle:
How it looks after applying the filter:
As you can see the rotation is removed.
I tried to add a rotation matrix inside the shader, here is the result:
The rotation is correct, but only the texture is rotated and not the actual container.
Applying angle back to sprite does nothing.
The actual result should be first + second image, so that the filter applies on the rotated sprite.
Here is the code that adds the filter to the image:
const filter = new PIXI.Filter(null, getTransitionFragmentShader(transition, 2), uniforms);
filter.apply = function (filterManager, input, output, clear) {
var matrix = new PIXI.Matrix();
this.uniforms.mappedMatrix = filterManager.calculateNormalizedScreenSpaceMatrix(matrix);
PIXI.Filter.prototype.apply.call(this, filterManager, input, output, clear);
};
sprite.filters = [filter];
vec2 rotate(vec2 v, float a) {
float s = sin(a);
float c = cos(a);
mat2 m = mat2(c, -s, s, c);
return m * v;
}
vec4 transition (vec2 p) {
float dt = parabola(progress,1.);
float border = 1.;
vec2 newUV = rotate(p, angle);
vec4 color1 = vec4(0, 0, 0, 0);
if (fromNothing) {
color1 = vec4(0, 0, 0, 0);
} else {
color1 = texture2D(uTexture1, newUV);
}
vec4 color2 = texture2D(uTexture2, newUV);
vec4 d = texture2D(displacement,vec2(newUV.x*scaleX,newUV.y*scaleY));
float realnoise = 0.5*(cnoise(vec4(newUV.x*scaleX + 0.*time/3., newUV.y*scaleY,0.*time/3.,0.)) +1.);
float w = width*dt;
float maskvalue = smoothstep(1. - w,1.,p.x + mix(-w/2., 1. - w/2., progress));
float maskvalue0 = smoothstep(1.,1.,p.x + progress);
float mask = maskvalue + maskvalue*realnoise;
float final = smoothstep(border,border+0.01,mask);
return mix(color1, color2, final);
}
This is the shader code with ommitted functions for brevity.
Thanks!
What I did, was instead use a vertex shader for rotation as follows:
attribute vec2 aVertexPosition;
uniform mat3 projectionMatrix;
varying vec2 vTextureCoord;
uniform vec4 inputSize;
uniform vec4 outputFrame;
uniform vec2 rotation;
vec4 filterVertexPosition( void )
{
vec2 position = aVertexPosition * max(outputFrame.zw, vec2(0.)) + outputFrame.xy;
vec2 rotatedPosition = vec2(
position.x * rotation.y + position.y * rotation.x,
position.y * rotation.y - position.x * rotation.x
);
return vec4((projectionMatrix * vec3(rotatedPosition, 1.0)).xy, 0.0, 1.0);
}
vec2 filterTextureCoord( void )
{
return aVertexPosition * (outputFrame.zw * inputSize.zw);
}
void main(void)
{
gl_Position = filterVertexPosition();
vTextureCoord = filterTextureCoord();
}
Rotation is passed as pair of sine, cosine of angle [sine(radians), cosine(radians)].
I just switched to a new shopify theme. I've run into some issues with code that works fine on my other themes, but not on this new one, due to how their theme is structured.
Their documentation says to use the following event listener instead of document.onload and $(document).ready():
document.addEventListener('page:loaded', function() {
console.log('page:loaded');
});
I'm not skilled in javascript and I'm having trouble getting it to work with the following 2 scripts. Can anyone assist?
<script type="text/javascript">
jQuery(document).ready(function($) {
$('a[data-rel^=lightcase]').lightcase();
});
</script>
<script>
window.addEventListener("load", function () {
var curtains = new Curtains({
container: "planes-canvas"
});
var planeEls = document.getElementsByClassName("planes");
var vs = `#ifdef GL_ES
precision mediump float;
#endif
// default mandatory attributes
attribute vec3 aVertexPosition;
attribute vec2 aTextureCoord;
// those projection and model view matrices are generated by the library
// it will position and size our plane based on its HTML element CSS values
uniform mat4 uMVMatrix;
uniform mat4 uPMatrix;
// texture coord varying that will be passed to our fragment shader
varying vec2 vTextureCoord;
void main() {
// apply our vertex position based on the projection and model view matrices
gl_Position = uPMatrix * uMVMatrix * vec4(aVertexPosition, 1.0);
// varying
// use texture matrix and original texture coords to generate accurate texture coords
vTextureCoord = aTextureCoord;
}`;
var fs = `
#ifdef GL_ES
precision mediump float;
#endif
// get our varyings
varying vec3 vVertexPosition;
varying vec2 vTextureCoord;
// the uniform we declared inside our javascript
uniform float uTime;
// our texture sampler (default name, to use a different name please refer to the documentation)
uniform sampler2D planeTexture;
vec3 hueRotate(vec3 col, float hue) {
vec3 k = vec3(0.57735, 0.57735, 0.57735);
float cosAngle = cos(hue);
return col * cosAngle + cross(k, col) * sin(hue) + k * dot(k, col) * (1.0 - cosAngle);
}
vec3 saturate(vec3 rgb, float adjustment) {
vec3 W = vec3(0.2125, 0.7154, 0.0721);
vec3 intensity = vec3(dot(rgb, W));
return mix(intensity, rgb, adjustment);
}
void main() {
// get our texture coords
vec2 textureCoord = vTextureCoord;
// displace our pixels along both axis based on our time uniform and texture UVs
// this will create a kind of water surface effect
// try to comment a line or change the constants to see how it changes the effect
// reminder : textures coords are ranging from 0.0 to 1.0 on both axis
const float PI = 3.141592;
textureCoord.x += (
sin(textureCoord.x * 12.0 + ((uTime * (PI / 15.0)) * 0.031))
+ sin(textureCoord.y * 12.0 + ((uTime * (PI / 12.489)) * 0.047))
) * 0.0050;
textureCoord.y += (
sin(textureCoord.y * 8.0 + ((uTime * (PI / 12.023)) * 0.023))
+ sin(textureCoord.x * 8.0 + ((uTime * (PI / 15.1254)) * 0.067))
) * 0.0100;
vec4 color = texture2D(planeTexture, textureCoord);
// hue rotation from 0 to PI in 10 seconds
float hueRotation = cos(uTime / 600.0) * PI;
color.rgb = hueRotate(color.rgb, hueRotation);
// saturate
color.rgb = saturate(color.rgb, 2.0);
gl_FragColor = color;
}
`;
var planes = [];
function handlePlane(index) {
var plane = planes[index];
plane
.onReady(function () {
// our texture has been loaded, resize our plane!
plane.planeResize();
})
.onRender(function () {
plane.uniforms.time.value++;
});
}
for (var i = 0; i < planeEls.length; i++) {
var params = {
vertexShader: vs,
fragmentShader: fs,
uniforms: {
time: {
name: "uTime",
type: "1f",
value: 0
}
}
};
var plane = curtains.addPlane(planeEls[i], params);
if (plane) {
planes.push(plane);
handlePlane(i);
}
}
});
</script>
For this code to work with the new theme, you need to listen to the custom event page:loaded specific to this new theme, instead of the standard events window.onload or $(document).ready().
Below you will find your old code snippets adapted to the new event:
First script tag :
<script type="text/javascript">
document.addEventListener('page:loaded', function() {
$('a[data-rel^=lightcase]').lightcase();
});
</script>
Second one :
<script>
document.addEventListener('page:loaded', function() {
var curtains = new Curtains({
container: "planes-canvas"
});
var planeEls = document.getElementsByClassName("planes");
var vs = `#ifdef GL_ES
precision mediump float;
#endif
// default mandatory attributes
attribute vec3 aVertexPosition;
attribute vec2 aTextureCoord;
// those projection and model view matrices are generated by the library
// it will position and size our plane based on its HTML element CSS values
uniform mat4 uMVMatrix;
uniform mat4 uPMatrix;
// texture coord varying that will be passed to our fragment shader
varying vec2 vTextureCoord;
void main() {
// apply our vertex position based on the projection and model view matrices
gl_Position = uPMatrix * uMVMatrix * vec4(aVertexPosition, 1.0);
// varying
// use texture matrix and original texture coords to generate accurate texture coords
vTextureCoord = aTextureCoord;
}`;
var fs = `
#ifdef GL_ES
precision mediump float;
#endif
// get our varyings
varying vec3 vVertexPosition;
varying vec2 vTextureCoord;
// the uniform we declared inside our javascript
uniform float uTime;
// our texture sampler (default name, to use a different name please refer to the documentation)
uniform sampler2D planeTexture;
vec3 hueRotate(vec3 col, float hue) {
vec3 k = vec3(0.57735, 0.57735, 0.57735);
float cosAngle = cos(hue);
return col * cosAngle + cross(k, col) * sin(hue) + k * dot(k, col) * (1.0 - cosAngle);
}
vec3 saturate(vec3 rgb, float adjustment) {
vec3 W = vec3(0.2125, 0.7154, 0.0721);
vec3 intensity = vec3(dot(rgb, W));
return mix(intensity, rgb, adjustment);
}
void main() {
// get our texture coords
vec2 textureCoord = vTextureCoord;
// displace our pixels along both axis based on our time uniform and texture UVs
// this will create a kind of water surface effect
// try to comment a line or change the constants to see how it changes the effect
// reminder : textures coords are ranging from 0.0 to 1.0 on both axis
const float PI = 3.141592;
textureCoord.x += (
sin(textureCoord.x * 12.0 + ((uTime * (PI / 15.0)) * 0.031))
+ sin(textureCoord.y * 12.0 + ((uTime * (PI / 12.489)) * 0.047))
) * 0.0050;
textureCoord.y += (
sin(textureCoord.y * 8.0 + ((uTime * (PI / 12.023)) * 0.023))
+ sin(textureCoord.x * 8.0 + ((uTime * (PI / 15.1254)) * 0.067))
) * 0.0100;
vec4 color = texture2D(planeTexture, textureCoord);
// hue rotation from 0 to PI in 10 seconds
float hueRotation = cos(uTime / 600.0) * PI;
color.rgb = hueRotate(color.rgb, hueRotation);
// saturate
color.rgb = saturate(color.rgb, 2.0);
gl_FragColor = color;
}
`;
var planes = [];
function handlePlane(index) {
var plane = planes[index];
plane
.onReady(function () {
// our texture has been loaded, resize our plane!
plane.planeResize();
})
.onRender(function () {
plane.uniforms.time.value++;
});
}
for (var i = 0; i < planeEls.length; i++) {
var params = {
vertexShader: vs,
fragmentShader: fs,
uniforms: {
time: {
name: "uTime",
type: "1f",
value: 0
}
}
};
var plane = curtains.addPlane(planeEls[i], params);
if (plane) {
planes.push(plane);
handlePlane(i);
}
}
});
</script>
BLOOM AFFECTS TRANSPARENCY
For renderer I'm having this setup:
renderer = new THREE.WebGLRenderer( { antialias: true, preserveDrawingBuffer:true, alpha:true } );
for bloom pass (post processing)
var renderPass = new RenderPass( scene, camera );
var bloomPass = new UnrealBloomPass( new THREE.Vector2( window.innerWidth, window.innerHeight ), 1.5, 0.4, 0.85 );
bloomPass.exposure =0.2;
bloomPass.threshold =0;
bloomPass.strength = 0.2;
bloomPass.radius = 0.1;
composer.addPass( renderPass );
composer.addPass( bloomPass );
and while rendering I'm using
composer.render()
but this is affecting the transparency of the canvas by darkening it (Scene)
I had the same issue, your code is right for the creation of the UnrealBloomPass, but the issue is in the shader of the UnrealBloomPass at the method getSeperableBlurMaterial.
You need to replace the fragmentShader by this code below and your pass background will consider the alpha channel:
fragmentShader:
"#include <common>\
varying vec2 vUv;\n\
uniform sampler2D colorTexture;\n\
uniform vec2 texSize;\
uniform vec2 direction;\
\
float gaussianPdf(in float x, in float sigma) {\
return 0.39894 * exp( -0.5 * x * x/( sigma * sigma))/sigma;\
}\
void main() {\n\
vec2 invSize = 1.0 / texSize;\
float fSigma = float(SIGMA);\
float weightSum = gaussianPdf(0.0, fSigma);\
float alphaSum = 0.0;\
vec3 diffuseSum = texture2D( colorTexture, vUv).rgb * weightSum;\
for( int i = 1; i < KERNEL_RADIUS; i ++ ) {\
float x = float(i);\
float w = gaussianPdf(x, fSigma);\
vec2 uvOffset = direction * invSize * x;\
vec4 sample1 = texture2D( colorTexture, vUv + uvOffset);\
vec4 sample2 = texture2D( colorTexture, vUv - uvOffset);\
diffuseSum += (sample1.rgb + sample2.rgb) * w;\
alphaSum += (sample1.a + sample2.a) * w;\
weightSum += 2.0 * w;\
}\
gl_FragColor = vec4(diffuseSum/weightSum, alphaSum/weightSum);\n\
}"
A bloom pass is doing some mix between the image and a blurred version, causing colors to change. You should consider setting the WebGLRenderer tone mapping property to set a good color dynamic range
Tone mapping definition (Wikipedia)
Tone mapping is a technique used in image processing and computer
graphics to map one set of colors to another to approximate the
appearance of high dynamic range images in a medium that has a more
limited dynamic range.
Add this line in your init routine
renderer.toneMapping = THREE.ReinhardToneMapping
I'm attempting to animate a projectile's trajectory (in the form of a cannon ball) given an angle and initial velocity. I've built the "cannon" in the form of a line and the target I'm aiming for in the form of a box, which I know is elementary but I just want to get the projectile motion down for now. Currently, I'm messing around with hardcoded angles and velocity, but eventually would like to input the angle and velocity and have the cannon shoot following the input. The target is parallel to the launch point, so I know that the x value of the cannon will be (initialVelocity)cos(angle)(time), and the y will be (initialVelocity)sin(angle)(time) - (g*t^2)/2, where g is the length or distance. Currently what I have is a cannon ball moving linearly across the screen, and it doesn't even start in the right spot.
I'm not asking for code to be written for me, I'd just like a starting point as to how to get the cannon to move from the right spot, and to know where I'm going completely wrong. I'm confident I can get it to hit the target if I'm taught how to manipulate the shaders correctly.
Shaders:
<script id="vertex-shader" type="x-shader/x-vertex">
precision mediump float;
attribute vec4 vPosition;
attribute vec4 vColor;
varying vec4 fColor;
uniform float time;
void main()
{
/*old code from manipulating clock hands*/
/* fColor = vColor;
float length = sqrt(vPosition.x*vPosition.x + vPosition.y * vPosition.y);
gl_Position.x = length*cos(theta);
gl_Position.y = length*sin(theta);
gl_Position.z = 0.0;
gl_Position.w = 1.0; */
fColor = vColor;
gl_Position = vPosition;
}
</script>
<script id="background-vertex-shader" type="x-shader/x-vertex">
precision mediump float;
attribute vec4 vPosition;
attribute vec4 vColor;
varying vec4 fColor;
void main()
{
fColor = vColor;
gl_Position = vPosition;
}
</script>
<script id="fragment-shader" type="x-shader/x-fragment">
precision mediump float;
varying vec4 fColor;
void main()
{
gl_FragColor = fColor;
}
</script>
WebGL code:
var gl;
var points = [];
var colors = [];
var cannonpoints = [];
var circlepoints;
var squarepoints;
var baseColors = [
vec3(1.0,0.0,0.0),
vec3(0.0,1.0,0.0),
vec3(0.0,0.0,1.0),
vec3(1.0,1.0,1.0),
vec3(0.0,0.0,0.0)
];
var program;
var backgroundprogram;
var Time;
var thetaLoc;
var angle;
var initialVel;
var vx;
var vy;
var ballX = -0.5;
var ballY = -0.5;
window.onload = function init(){
var canvas = document.getElementById("gl-canvas");
gl = WebGLUtils.setupWebGL(canvas);
if(!gl) {
alert("webGL isn't available");
}
// configuring WebGL
gl.viewport(0,0,
canvas.width,canvas.height);
gl.clearColor(0.0,0.0,1.0,1.0); // set background color to black.
// load the shaders and initialize
// the attrbibute buffers.
program = initShaders(gl, "vertex-shader", "fragment-shader");
backgroundprogram = initShaders(gl, "background-vertex-shader", "fragment- shader");
document.getElementById("shoot").onclick = function() {
velocity = document.getElementById("velocity").value;
angle = document.getElementById("angle").value;
console.log("angle="+angle);
vx = (Math.cos(angle*(Math.PI/180))*velocity);
console.log("vx="+vx);
vy = (Math.sin(angle*(Math.PI/180))*velocity);
console.log("vy="+vy);
}
Time = 0.0;
thetaLoc = gl.getUniformLocation(program,"time");
initBackground();
/******************
initBall(Time,1);
*******************/
initBall(Time);
//render();
setInterval(render, 100);
};
function render(){
gl.clear(gl.COLOR_BUFFER_BIT);
/* draw the circle */
gl.drawArrays(gl.TRIANGLE_FAN,0,circlepoints);
/* draw the square(s) */
gl.drawArrays(gl.TRIANGLES,circlepoints,squarepoints);
//draw the cannon
gl.drawArrays(gl.LINES,circlepoints+squarepoints,2);
//draw the cannon ball
//starting index is the amount of points already drawn
//amount of points for circle + amount of points for square + amount of points for line
var start = circlepoints + squarepoints + 2;
Time += 0.01;
initBall(Time); //,1);
gl.uniform1f(thetaLoc,Time);
//amount of points to draw is length of points array minus the start index
gl.drawArrays(gl.TRIANGLE_FAN,start,points.length-start);
}
function initBall(Time) { //,r){
gl.useProgram(program);
/*******************************************************
filled_circle(vec2(r*Math.cos(Time),r*Math.sin(Time)),0.05,4);*/
vx= (Math.cos(60*(Math.PI/180))*1);
vy= (Math.sin(60*(Math.PI/180))*1);
filled_circle(vec2(-0.8+(vx*Time),-0.3+(vy*Time)),0.05,4);
// Load the data into the GPU
var bufferId = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, bufferId);
gl.bufferData(gl.ARRAY_BUFFER,
flatten(points),
gl.STATIC_DRAW);
// Associate our shader variables with
// the data buffer.
var vPosition = gl.getAttribLocation(program,"vPosition");
gl.vertexAttribPointer(vPosition,2,gl.FLOAT,false,0,0);
gl.enableVertexAttribArray(vPosition);
// load color data to the gpu
var cBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER,
cBuffer);
gl.bufferData(gl.ARRAY_BUFFER,
flatten(colors),
gl.STATIC_DRAW);
var vColor = gl.getAttribLocation(
program, "vColor");
gl.vertexAttribPointer(vColor,3,
gl.FLOAT, false, 0, 0);
gl.enableVertexAttribArray(vColor);
}
I think the easiest way to do it is give your projectile a starting position, velocity and acceleration. Then the position of the projectile at any time is position + velocity * time + acceleration * time * time. The angle of the projectile would just be the angle of the projectile's current velocity.
If you want to eventually adding other stuff like collisions then its probably a good idea to make the projectile track its current velocity and acceleration; and on each frame the position and velocity changes based on the elapsed time between each frame. Like so:
Projectile.prototype.update = function(dt){
this.velocity += this.acceleration * dt;
this.position += this.velocity * dt;
this.angle = getAngle(this.velocity);
};
And on each frame, call projectile.update(dt) where dt = currentFrameTime - lastFrameTime.