How to change javascript event listener? - javascript

I just switched to a new shopify theme. I've run into some issues with code that works fine on my other themes, but not on this new one, due to how their theme is structured.
Their documentation says to use the following event listener instead of document.onload and $(document).ready():
document.addEventListener('page:loaded', function() {
console.log('page:loaded');
});
I'm not skilled in javascript and I'm having trouble getting it to work with the following 2 scripts. Can anyone assist?
<script type="text/javascript">
jQuery(document).ready(function($) {
$('a[data-rel^=lightcase]').lightcase();
});
</script>
<script>
window.addEventListener("load", function () {
var curtains = new Curtains({
container: "planes-canvas"
});
var planeEls = document.getElementsByClassName("planes");
var vs = `#ifdef GL_ES
precision mediump float;
#endif
// default mandatory attributes
attribute vec3 aVertexPosition;
attribute vec2 aTextureCoord;
// those projection and model view matrices are generated by the library
// it will position and size our plane based on its HTML element CSS values
uniform mat4 uMVMatrix;
uniform mat4 uPMatrix;
// texture coord varying that will be passed to our fragment shader
varying vec2 vTextureCoord;
void main() {
// apply our vertex position based on the projection and model view matrices
gl_Position = uPMatrix * uMVMatrix * vec4(aVertexPosition, 1.0);
// varying
// use texture matrix and original texture coords to generate accurate texture coords
vTextureCoord = aTextureCoord;
}`;
var fs = `
#ifdef GL_ES
precision mediump float;
#endif
// get our varyings
varying vec3 vVertexPosition;
varying vec2 vTextureCoord;
// the uniform we declared inside our javascript
uniform float uTime;
// our texture sampler (default name, to use a different name please refer to the documentation)
uniform sampler2D planeTexture;
vec3 hueRotate(vec3 col, float hue) {
vec3 k = vec3(0.57735, 0.57735, 0.57735);
float cosAngle = cos(hue);
return col * cosAngle + cross(k, col) * sin(hue) + k * dot(k, col) * (1.0 - cosAngle);
}
vec3 saturate(vec3 rgb, float adjustment) {
vec3 W = vec3(0.2125, 0.7154, 0.0721);
vec3 intensity = vec3(dot(rgb, W));
return mix(intensity, rgb, adjustment);
}
void main() {
// get our texture coords
vec2 textureCoord = vTextureCoord;
// displace our pixels along both axis based on our time uniform and texture UVs
// this will create a kind of water surface effect
// try to comment a line or change the constants to see how it changes the effect
// reminder : textures coords are ranging from 0.0 to 1.0 on both axis
const float PI = 3.141592;
textureCoord.x += (
sin(textureCoord.x * 12.0 + ((uTime * (PI / 15.0)) * 0.031))
+ sin(textureCoord.y * 12.0 + ((uTime * (PI / 12.489)) * 0.047))
) * 0.0050;
textureCoord.y += (
sin(textureCoord.y * 8.0 + ((uTime * (PI / 12.023)) * 0.023))
+ sin(textureCoord.x * 8.0 + ((uTime * (PI / 15.1254)) * 0.067))
) * 0.0100;
vec4 color = texture2D(planeTexture, textureCoord);
// hue rotation from 0 to PI in 10 seconds
float hueRotation = cos(uTime / 600.0) * PI;
color.rgb = hueRotate(color.rgb, hueRotation);
// saturate
color.rgb = saturate(color.rgb, 2.0);
gl_FragColor = color;
}
`;
var planes = [];
function handlePlane(index) {
var plane = planes[index];
plane
.onReady(function () {
// our texture has been loaded, resize our plane!
plane.planeResize();
})
.onRender(function () {
plane.uniforms.time.value++;
});
}
for (var i = 0; i < planeEls.length; i++) {
var params = {
vertexShader: vs,
fragmentShader: fs,
uniforms: {
time: {
name: "uTime",
type: "1f",
value: 0
}
}
};
var plane = curtains.addPlane(planeEls[i], params);
if (plane) {
planes.push(plane);
handlePlane(i);
}
}
});
</script>

For this code to work with the new theme, you need to listen to the custom event page:loaded specific to this new theme, instead of the standard events window.onload or $(document).ready().
Below you will find your old code snippets adapted to the new event:
First script tag :
<script type="text/javascript">
document.addEventListener('page:loaded', function() {
$('a[data-rel^=lightcase]').lightcase();
});
</script>
Second one :
<script>
document.addEventListener('page:loaded', function() {
var curtains = new Curtains({
container: "planes-canvas"
});
var planeEls = document.getElementsByClassName("planes");
var vs = `#ifdef GL_ES
precision mediump float;
#endif
// default mandatory attributes
attribute vec3 aVertexPosition;
attribute vec2 aTextureCoord;
// those projection and model view matrices are generated by the library
// it will position and size our plane based on its HTML element CSS values
uniform mat4 uMVMatrix;
uniform mat4 uPMatrix;
// texture coord varying that will be passed to our fragment shader
varying vec2 vTextureCoord;
void main() {
// apply our vertex position based on the projection and model view matrices
gl_Position = uPMatrix * uMVMatrix * vec4(aVertexPosition, 1.0);
// varying
// use texture matrix and original texture coords to generate accurate texture coords
vTextureCoord = aTextureCoord;
}`;
var fs = `
#ifdef GL_ES
precision mediump float;
#endif
// get our varyings
varying vec3 vVertexPosition;
varying vec2 vTextureCoord;
// the uniform we declared inside our javascript
uniform float uTime;
// our texture sampler (default name, to use a different name please refer to the documentation)
uniform sampler2D planeTexture;
vec3 hueRotate(vec3 col, float hue) {
vec3 k = vec3(0.57735, 0.57735, 0.57735);
float cosAngle = cos(hue);
return col * cosAngle + cross(k, col) * sin(hue) + k * dot(k, col) * (1.0 - cosAngle);
}
vec3 saturate(vec3 rgb, float adjustment) {
vec3 W = vec3(0.2125, 0.7154, 0.0721);
vec3 intensity = vec3(dot(rgb, W));
return mix(intensity, rgb, adjustment);
}
void main() {
// get our texture coords
vec2 textureCoord = vTextureCoord;
// displace our pixels along both axis based on our time uniform and texture UVs
// this will create a kind of water surface effect
// try to comment a line or change the constants to see how it changes the effect
// reminder : textures coords are ranging from 0.0 to 1.0 on both axis
const float PI = 3.141592;
textureCoord.x += (
sin(textureCoord.x * 12.0 + ((uTime * (PI / 15.0)) * 0.031))
+ sin(textureCoord.y * 12.0 + ((uTime * (PI / 12.489)) * 0.047))
) * 0.0050;
textureCoord.y += (
sin(textureCoord.y * 8.0 + ((uTime * (PI / 12.023)) * 0.023))
+ sin(textureCoord.x * 8.0 + ((uTime * (PI / 15.1254)) * 0.067))
) * 0.0100;
vec4 color = texture2D(planeTexture, textureCoord);
// hue rotation from 0 to PI in 10 seconds
float hueRotation = cos(uTime / 600.0) * PI;
color.rgb = hueRotate(color.rgb, hueRotation);
// saturate
color.rgb = saturate(color.rgb, 2.0);
gl_FragColor = color;
}
`;
var planes = [];
function handlePlane(index) {
var plane = planes[index];
plane
.onReady(function () {
// our texture has been loaded, resize our plane!
plane.planeResize();
})
.onRender(function () {
plane.uniforms.time.value++;
});
}
for (var i = 0; i < planeEls.length; i++) {
var params = {
vertexShader: vs,
fragmentShader: fs,
uniforms: {
time: {
name: "uTime",
type: "1f",
value: 0
}
}
};
var plane = curtains.addPlane(planeEls[i], params);
if (plane) {
planes.push(plane);
handlePlane(i);
}
}
});
</script>

Related

PIXI.js sprite loses rotation after applying filter

I am first rotating a sprite which has a texture applied, then applying a filter with a fragment shader which causes distortion on the sprite. However, when I add the filter to the sprite, it rotates to normal horizontal position instead of the angled position it had before.
I have tried to apply a rotating function inside the shader to rotate the uv. This rotates the image but changes the image outside the parts that are rotated. Here are some screenshots.
Initial look of the sprite after adding and changing the angle:
How it looks after applying the filter:
As you can see the rotation is removed.
I tried to add a rotation matrix inside the shader, here is the result:
The rotation is correct, but only the texture is rotated and not the actual container.
Applying angle back to sprite does nothing.
The actual result should be first + second image, so that the filter applies on the rotated sprite.
Here is the code that adds the filter to the image:
const filter = new PIXI.Filter(null, getTransitionFragmentShader(transition, 2), uniforms);
filter.apply = function (filterManager, input, output, clear) {
var matrix = new PIXI.Matrix();
this.uniforms.mappedMatrix = filterManager.calculateNormalizedScreenSpaceMatrix(matrix);
PIXI.Filter.prototype.apply.call(this, filterManager, input, output, clear);
};
sprite.filters = [filter];
vec2 rotate(vec2 v, float a) {
float s = sin(a);
float c = cos(a);
mat2 m = mat2(c, -s, s, c);
return m * v;
}
vec4 transition (vec2 p) {
float dt = parabola(progress,1.);
float border = 1.;
vec2 newUV = rotate(p, angle);
vec4 color1 = vec4(0, 0, 0, 0);
if (fromNothing) {
color1 = vec4(0, 0, 0, 0);
} else {
color1 = texture2D(uTexture1, newUV);
}
vec4 color2 = texture2D(uTexture2, newUV);
vec4 d = texture2D(displacement,vec2(newUV.x*scaleX,newUV.y*scaleY));
float realnoise = 0.5*(cnoise(vec4(newUV.x*scaleX + 0.*time/3., newUV.y*scaleY,0.*time/3.,0.)) +1.);
float w = width*dt;
float maskvalue = smoothstep(1. - w,1.,p.x + mix(-w/2., 1. - w/2., progress));
float maskvalue0 = smoothstep(1.,1.,p.x + progress);
float mask = maskvalue + maskvalue*realnoise;
float final = smoothstep(border,border+0.01,mask);
return mix(color1, color2, final);
}
This is the shader code with ommitted functions for brevity.
Thanks!
What I did, was instead use a vertex shader for rotation as follows:
attribute vec2 aVertexPosition;
uniform mat3 projectionMatrix;
varying vec2 vTextureCoord;
uniform vec4 inputSize;
uniform vec4 outputFrame;
uniform vec2 rotation;
vec4 filterVertexPosition( void )
{
vec2 position = aVertexPosition * max(outputFrame.zw, vec2(0.)) + outputFrame.xy;
vec2 rotatedPosition = vec2(
position.x * rotation.y + position.y * rotation.x,
position.y * rotation.y - position.x * rotation.x
);
return vec4((projectionMatrix * vec3(rotatedPosition, 1.0)).xy, 0.0, 1.0);
}
vec2 filterTextureCoord( void )
{
return aVertexPosition * (outputFrame.zw * inputSize.zw);
}
void main(void)
{
gl_Position = filterVertexPosition();
vTextureCoord = filterTextureCoord();
}
Rotation is passed as pair of sine, cosine of angle [sine(radians), cosine(radians)].

Shader linking fails without error message only on mobile devices

I'm working on a graphics engine written in C++. We currently can either build for desktop or for web (using emscripten). Both platforms utilize OpenGL ES 2.0.
I have a vertex and a fragment shader:
Vertex Shader
#version 100
attribute vec4 a_position;
attribute vec2 a_texcoord;
attribute vec3 a_normal;
attribute vec3 a_tangent;
attribute vec3 a_bitangent;
uniform mat4 u_model;
uniform mat4 u_view;
uniform mat4 u_projection;
uniform mat3 u_normalMatrix;
varying vec2 v_texcoord;
varying vec3 v_normal;
varying vec3 v_fragPos;
varying mat3 v_TBN;
void main()
{
vec3 T = normalize(vec3(u_model * vec4(a_tangent, 0.0)));
vec3 B = normalize(vec3(u_model * vec4(a_bitangent, 0.0)));
vec3 N = normalize(vec3(u_model * vec4(a_normal, 0.0)));
v_TBN = mat3(T, B, N);
gl_Position = u_projection * u_view * u_model * a_position;
v_texcoord = a_texcoord;
v_normal = u_normalMatrix * a_normal;
v_fragPos = vec3(u_model * a_position);
};
Fragment Shader
#version 100
precision mediump float;
varying vec2 v_texcoord;
varying vec3 v_normal;
varying vec3 v_fragPos;
varying mat3 v_TBN;
struct Material {
sampler2D diffuse;
sampler2D specular;
sampler2D normal;
float shininess;
};
struct Light {
int type;
vec3 position;
vec3 direction;
vec3 ambient;
vec3 diffuse;
vec3 specular;
float constant;
float linear;
float quadratic;
float cutOff;
float outerCutOff;
};
const int MAX_LIGHTS = 16;
uniform vec3 u_viewPos;
uniform Material u_material;
uniform Light u_lights[MAX_LIGHTS];
uniform int u_lightCount;
uniform bool normalMapping;
vec3 calcDirLight(Light light, vec3 normal, vec3 viewDir);
vec3 calcPointLight(Light light, vec3 normal, vec3 viewDir);
vec3 calcSpotLight(Light light, vec3 normal, vec3 viewDir);
void main()
{
vec3 lightOutput = vec3(0.0);
vec3 norm = normalize(v_normal);
if (normalMapping)
{
norm = texture2D(u_material.normal, v_texcoord).rgb;
norm = normalize(norm * 2.0 - 1.0);
norm = normalize(v_TBN * norm);
}
vec3 viewDir = normalize(u_viewPos - v_fragPos);
for(int i = 0; i < MAX_LIGHTS; i++)
{
if (i >= u_lightCount){break;}
if (u_lights[i].type == 0)
{
lightOutput += calcDirLight(u_lights[i], norm, viewDir)
}
else if (u_lights[i].type == 1)
{
lightOutput += calcPointLight(u_lights[i], norm, viewDir);
}
else if (u_lights[i].type == 2)
{
lightOutput += calcSpotLight(u_lights[i], norm, viewDir);
}
}
gl_FragColor = vec4(lightOutput, 1.0);
}
vec3 calcDirLight(Light light, vec3 normal, vec3 viewDir)
{
vec3 lightDir = normalize(-light.direction);
float diff = max(dot(normal, lightDir), 0.0);
vec3 reflectDir = reflect(-lightDir, normal);
float spec = pow(max(dot(viewDir, reflectDir), 0.0), u_material.shininess);
vec3 ambient = light.ambient * vec3(texture2D(u_material.diffuse, v_texcoord));
vec3 diffuse = light.diffuse * diff * vec3(texture2D(u_material.diffuse, v_texcoord));
vec3 specular = light.specular * spec * vec3(texture2D(u_material.specular, v_texcoord));
return (ambient + diffuse + specular);
}
vec3 calcPointLight(Light light, vec3 normal, vec3 viewDir)
{
vec3 lightDir = normalize(light.position - v_fragPos);
float diff = max(dot(normal, lightDir), 0.0);
vec3 reflectDir = reflect(-lightDir, normal);
float spec = pow(max(dot(viewDir, reflectDir), 0.0), u_material.shininess);
float distance = length(light.position - v_fragPos);
float attenuation = 1.0 / (light.constant + light.linear * distance + light.quadratic * (distance * distance));
vec3 ambient = light.ambient * vec3(texture2D(u_material.diffuse, v_texcoord));
vec3 diffuse = light.diffuse * diff * vec3(texture2D(u_material.diffuse, v_texcoord));
vec3 specular = light.specular * spec * vec3(texture2D(u_material.specular, v_texcoord));
ambient *= attenuation;
diffuse *= attenuation;
specular *= attenuation;
return (ambient + diffuse + specular);
}
vec3 calcSpotLight(Light light, vec3 normal, vec3 viewDir)
{
vec3 lightDir = normalize(light.position - v_fragPos);
float diff = max(dot(normal, lightDir), 0.0);
vec3 reflectDir = reflect(-lightDir, normal);
float spec = pow(max(dot(viewDir, reflectDir), 0.0), u_material.shininess);
float distance = length(light.position - v_fragPos);
float attenuation = 1.0 / (light.constant + light.linear * distance + light.quadratic * (distance * distance));
float theta = dot(lightDir, normalize(light.direction));
float epsilon = light.cutOff - light.outerCutOff;
float intensity = clamp((theta - light.outerCutOff) / epsilon, 0.0, 1.0);
vec3 ambient = light.ambient * vec3(texture2D(u_material.diffuse, v_texcoord));
vec3 diffuse = light.diffuse * diff * vec3(texture2D(u_material.diffuse, v_texcoord));
vec3 specular = light.specular * spec * vec3(texture2D(u_material.specular, v_texcoord));
ambient *= attenuation * intensity;
diffuse *= attenuation * intensity;
specular *= attenuation * intensity;
return (ambient + diffuse + specular);
};
They both compile without errors, but the linking stage fails. The info log (accessed using glGetProgramInfoLog) is empty.
The biggest problem is that it compiles and links perfectly fine on desktop native and desktop browsers (emscripten). It only fails linking on mobile devices.
I've spent hours trying to figure this out, without success. Is there anything obvious i might have missed?
Edit 1: This is the code that builds the shader program:
GLuint GLES2Shader::buildProgram(GLuint vertexShader, GLuint fragmentShader)
{
GL( GLuint programObject = glCreateProgram() );
GL( glAttachShader(programObject, vertexShader) );
GL( glAttachShader(programObject, fragmentShader) );
GL( glLinkProgram(programObject) );
//check if the program linked successfully
GLint linked;
GL( glGetProgramiv(programObject, GL_LINK_STATUS, &linked) );
if (!linked)
{
DEBUG( GLchar infoLog[512] );
DEBUG( glGetProgramInfoLog(programObject, 512, NULL, infoLog) );
DEBUG( LOGE("ERROR::SHADER::LINKING_FAILED %s", infoLog) );
GL( glDeleteProgram(programObject) );
return 0;
}
return programObject;
}

How to pass and use a lookup table in a shader with three.js

I have written a simple three.js of using a height map. This is the relevant code that creates the shader material:
function loadHeightMap() {
// fake a lookup table
var lut = [];
for ( var n=0; n<256; n++ ) {
lut.push(new THREE.Vector3(0.5, 0.4, 0.3));
}
var loader = new THREE.TextureLoader();
var zScale = 10;
var mapLoc = "https://s22.postimg.org/8n93ehmep/Terrain128.png";
loader.load(mapLoc, function ( texture ) {
// use "this." to create global object
this.customUniforms = {
zTexture: { type: "t", value: texture },
zScale: { type: "f", value: zScale },
zLut: { type: "v3v", value: lut }
};
var customMaterial = new THREE.ShaderMaterial({
uniforms: customUniforms,
vertexShader: document.getElementById( 'vertexShader' ).textContent,
fragmentShader: document.getElementById( 'fragmentShader' ).textContent,
side: THREE.DoubleSide
});
var planeGeo = new THREE.PlaneGeometry( 20, 20, 129, 129 );
var plane = new THREE.Mesh( planeGeo, customMaterial );
plane.rotation.x = -Math.PI / 2;
plane.position.y = 0;
scene.add(plane);
});
}
And here are the shaders:
<script id="vertexShader" type="x-shader/x-vertex">
uniform sampler2D zTexture;
uniform float zScale;
uniform vec3 zLut[ 256 ];
varying float vAmount;
void main() {
vec4 heightData = texture2D( zTexture, uv );
vAmount = heightData.r;
// move the position along the normal
vec3 newPosition = position + normal * zScale * vAmount;
gl_Position = projectionMatrix * modelViewMatrix * vec4( newPosition, 1.0 );
}
</script>
<script id="fragmentShader" type="x-shader/x-vertex">
uniform vec3 zLut[ 256 ];
varying float vAmount;
void main() {
int index = int(vAmount) * 255;
vec3 vColor = vec3(vAmount, vAmount, vAmount);
//gl_FragColor = vec4(zLut[index], 1.0);
gl_FragColor = vec4(vColor, 1.0);
}
The shaders and the height map part works fine. But I want to pass the lookup table (zLut). The above code works fine if I don't try to use the lookup table. A working example is here. I created a fiddle as well here but it fails because of CORS issues.
Any suggestions are welcome.
OK, solved this (mostly). The trick was to fetch the lookup color in the vertex shader, where one CAN index into an array with a non-const value. The pass the resulting color to the fragmentShader as a varying. So the two shaders end up being:
<script id="vertexShader" type="x-shader/x-vertex">
uniform sampler2D vTexture;
uniform float vScale;
uniform vec3 vLut[ 256 ];
varying vec3 vColor;
void main() {
vec4 heightData = texture2D( vTexture, uv );
// assuming map is grayscale it doesn't matter if you use r, g, or b.
float vAmount = heightData.r;
// fetch the color from the lookup table so it gets passed to the fragshader
int index = int(heightData.r * 255.0);
vColor = vLut[index];
// move the position along the normal
vec3 newPosition = position + normal * vScale * vAmount;
gl_Position = projectionMatrix * modelViewMatrix * vec4( newPosition, 1.0 );
}
</script>
<script id="fragmentShader" type="x-shader/x-vertex">
varying vec3 vColor;
void main() {
gl_FragColor = vec4(vColor, 1.0);
}
</script>
The remaining problem I have is that when rendered the colors are all flat. I tried forcing an update on the vertices in the animate function but didn't work. Still researching but the question here is solved (AFAIK).
You can see the result here

smooth edge in WebGL Programming

When you create a sphere(Actually, It is also apolyhedron) or other polyhedron in WebGL native API, you will get a polyhedron with flat style, and you assign a texture to the polyhedron, It will look ugly with angle between two small face at the polyhedron suface. actually,you can subdivide the surface to get a smooth surface. and is there any other method to smooth the surface of the polyhedron.just look lile as the two picture as below.(the two picture is capture from the blender software)
Here is my code for generating the sphere
function getSphere(r,segment_lat,segment_lon){
var normalData = [];
var vertexData = [];
var textureCoord = [];
var vertexIndex = [],
for (var latNum = 0; latNum <= segment_lat; latNum++) {
var theta = latNum * Math.PI / segment_lat;
var sinTheta = Math.sin(theta);
var cosTheta = Math.cos(theta);
for (var lonNum = 0; lonNum <= segment_lon; lonNum++) {
var phi = lonNum * 2 * Math.PI / segment_lon;
var sinPhi = Math.sin(phi);
var cosPhi = Math.cos(phi);
var x = cosPhi * sinTheta;
var y = cosTheta;
var z = sinPhi * sinTheta;
var u = 1 - (lonNum / segment_lon);
var v = 1 - (latNum / segment_lat);
textureCoord.push(u);
textureCoord.push(v);
vertexData.push(r * x);
vertexData.push(r * y);
vertexData.push(r * z);
}
}
for (var latNum=0; latNum < segment_lat;latNum++) {
for (var lonNum=0; lonNum < segment_lon; lonNum++) {
var first = (latNum * (segment_lon + 1)) + lonNum;
var second = first + segment_lon + 1;
vertexIndex .push(first);
vertexIndex .push(second);
vertexIndex .push(first + 1);
vertexIndex .push(second);
vertexIndex .push(second + 1);
vertexIndex .push(first + 1);
}
}
return {'vertexData':vertexData,'vertexIndex':vertexIndex,'textureCoord':textureCoord,'normalDatas':normalData};
},
Fragment Shader:
precision mediump float;
varying vec2 vTextureCoord;
uniform sampler2D uSampler;
void main(void) {
vec3 light = vec3(1,1,1);
vec4 textureColor = texture2D(uSampler, vec2(vTextureCoord.s, vTextureCoord.t));
gl_FragColor = vec4(textureColor.rgb*light,textureColor.a);
// gl_FragColor = vec4 (1,0,0,.8);
}
Vertex Shader:
attribute vec2 aTextureCoord;
attribute vec3 aVertexPosition;
// uniform mediump mat4 proj_inv;
uniform mediump mat4 modelViewMatrix;
uniform mediump mat4 projectMatrix;
varying highp vec2 vTextureCoord;
void main(void) {
//projectMatrix multi modelViewMatrix must be in vertex shader,or it will be wrong;
gl_Position = projectMatrix*modelViewMatrix*vec4(aVertexPosition, 1.0);
vTextureCoord = aTextureCoord;
}
If I have to guess your rendered result is different than the picture you showed. What you see is a "flat" sphere in one uniform color and you want a shaded sphere, is that correct?
If so, you need to go read tutorials on how lighting works. Basically, the angle between the viewing vector and the fragment's normal is used to determined the brightness of each fragment. A fragment on the sphere that you are staring at directly have a very small angle between the view vector and its normal and thus its bright. A fragment on the barely visible edge on the sphere have a large angle between normal and view and thus it appears dark.
In your sphere generation code, you need to calculate the normals as well and pass that information to the gpu along with the rest. Fortunately for a sphere, normal is easy to calculate: normal = normalize(position - center); or just normalize(position) if center is assumed to be at (0,0,0).

Export from Shadertoy to Three.js

I am making my first steps coding. I made some courses on internet, then I played with some three.js experiments, and now I would like to continue learning experimenting with Shaders.
I found Shadertoy.com and it's really amazing! There are a lot of difference experiments with incredible effects. I am trying to use one of these shaders in Three.js but is not so easy.
The Shaders are already written, it's true. But I don't know what to do with that, I don't know how I can use it.
Because it's not only copy and paste the code. There is a relation that I have to write to can apply some of these amazing effects to a Three.js geometry. I have to use uniforms, and I don't know how I can make to know which uniforms I can use, and how can I use them.
I started see the tutorials in Shadertoy and some articles on Internet and it looks like something really abstract. I think that I should study a lot of maths before start understanding that language.
Do you have some recommendation to start?
Maybe is something simpler than I think and I can just copy, paste, and experiment with the code on my HTML document?
Shadertoy is a relatively complex program. It's got audio input into shaders, video input into shaders, audio data generation from shaders, various kinds of textures including both 2d and cubemaps. Supporting all those features is not a small amount of work.
That said a basic shader can be used pretty easily, see example below. But shadertoy shaders are not really designed to be used as materials on meshes in three.js.
If you want to understand why and how WebGL works see http://webglfundamentals.org
const vs = `
attribute vec4 position;
void main() {
gl_Position = position;
}
`;
const userShader = `
// FROM: https://www.shadertoy.com/view/4sdXDl
//spikey
#define SHAPE length(z.yz)
//normal
//#define SHAPE length(z.xyz)
//bizarro
//#define SHAPE length(z.yz-z.xx)
//etc...
#define HIGH_QUAL
#ifdef HIGH_QUAL
#define MARCH_STEPS 199
#else
#define MARCH_STEPS 99
#endif
float k=7.0+3.0*sin(iGlobalTime*0.15);
vec3 mcol=vec3(0.0);
void AbsBox(inout vec4 z){//abs box by kali
z.xyz=abs(z.xyz+1.0)-1.0;
z*=1.5/clamp(dot(z.xyz,z.xyz),0.25,1.0);
}
void Bulb(inout vec4 z, in vec4 c){//mandelBulb by twinbee
float r = length(z.xyz);
float zo = asin(z.z / r) * k + iGlobalTime*0.15;
float zi = atan(z.y, z.x) * 7.0;
z=pow(r, k-1.0)*vec4(r*vec3(cos(zo)*vec2(cos(zi),sin(zi)),sin(zo)),z.w*k)+c;
}
float DE(vec3 p){
vec4 c = vec4(p,1.0),z = c;
Bulb(z,c);
float r0=(length(z.xyz)-1.15)/z.w;
z.xyz-=1.0;
for(int i=0;i<7;i++)AbsBox(z);
float r=SHAPE;
mcol.rgb=vec3(1.0,0.5,0.2)+abs(sin(0.2*r+100.0*z.yxz/z.w));
return 0.5 * max((r-1.0) / z.w,-r0);
}
vec3 sky(vec3 rd, vec3 L){//modified bananaft's & public_int_i's code
float d=0.4*dot(rd,L)+0.6;
//return vec3(d);
rd.y+=sin(sqrt(clamp(-rd.y,0.0,0.9))*90.0)*0.45*max(-0.1,rd.y);
rd=abs(rd);
float y=max(0.,L.y),sun=max(1.-(1.+10.*y+rd.y)*length(rd-L),0.)
+.3*pow(1.-rd.y,12.)*(1.6-y);
return d*mix(vec3(0.3984,0.5117,0.7305),vec3(0.7031,0.4687,0.1055),sun)
*((.5+pow(y,.4))*(1.5-abs(L.y))+pow(sun,5.2)*y*(5.+15.0*y));
}
float rnd;
void randomize(in vec2 p){rnd=fract(float(iFrame)+sin(dot(p,vec2(13.3145,117.7391)))*42317.7654321);}
float ShadAO(in vec3 ro, in vec3 rd){
float t=0.0,s=1.0,d,mn=0.01;
for(int i=0;i<12;i++){
d=max(DE(ro+rd*t)*1.5,mn);
s=min(s,d/t+t*0.5);
t+=d;
}
return s;
}
vec3 scene(vec3 ro, vec3 rd){
vec3 L=normalize(vec3(0.4,0.025,0.5));
vec3 bcol=sky(rd,L);
vec4 col=vec4(0.0);//color accumulator
float t=DE(ro)*rnd,d,od=1.0,px=1.0/iResolution.x;
for(int i=0;i<MARCH_STEPS;i++){
d=DE(ro);
if(d<px*t){
float dif=clamp(1.0-d/od,0.2,1.0);
vec3 scol=mcol*dif*(1.3-0.3*t);
#ifdef HIGH_QUAL
vec2 s=vec2(DE(ro+d*4.0*L),DE(ro+d*16.0*L));
scol*=clamp(0.5*s.x/d+(s.y/d)/8.0,0.0,1.0);
#endif
float alpha=(1.0-col.w)*clamp(1.0-d/(px*t),0.0,1.0);
col+=vec4(clamp(scol,0.0,1.0),1.0)*alpha;
if(col.w>0.9)break;
}
t+=d;ro+=rd*d;od=d;
if(t>6.0)break;
}
col.rgb+=bcol*(1.0-clamp(col.w,0.0,1.0));
return col.rgb;
}
mat3 lookat(vec3 fw){
fw=normalize(fw);vec3 rt=normalize(cross(fw,vec3(0.0,1.0,0.0)));return mat3(rt,cross(rt,fw),fw);
}
void mainImage(out vec4 fragColor, in vec2 fragCoord) {
randomize(fragCoord);
float tim=iGlobalTime*0.3,r=2.0+cos(tim*0.7);
vec2 uv=(fragCoord-0.5*iResolution.xy)/iResolution.x;
vec3 ro=vec3(sin(tim)*r,sin(tim*0.4),cos(tim)*r);
vec3 rd=lookat(-ro)*normalize(vec3(uv,1.0));
//rd+=2.0*cross(qrt.xyz,cross(qrt.xyz,rd)+qrt.w*rd);
fragColor=vec4(scene(ro,rd)*2.0,1.0);
}
`;
// FROM shadertoy.com
const shadertoyBoilerplate = `
#extension GL_OES_standard_derivatives : enable
//#extension GL_EXT_shader_texture_lod : enable
#ifdef GL_ES
precision highp float;
#endif
uniform vec3 iResolution;
uniform float iGlobalTime;
uniform float iChannelTime[4];
uniform vec4 iMouse;
uniform vec4 iDate;
uniform float iSampleRate;
uniform vec3 iChannelResolution[4];
uniform int iFrame;
uniform float iTimeDelta;
uniform float iFrameRate;
struct Channel
{
vec3 resolution;
float time;
};
uniform Channel iChannel[4];
uniform sampler2D iChannel0;
uniform sampler2D iChannel1;
uniform sampler2D iChannel2;
uniform sampler2D iChannel3;
void mainImage( out vec4 c, in vec2 f );
${userShader}
void main( void ){
vec4 color = vec4(0.0,0.0,0.0,1.0);
mainImage( color, gl_FragCoord.xy );
color.w = 1.0;
gl_FragColor = color;
}
`;
const $ = document.querySelector.bind(document);
const camera = new THREE.Camera();
camera.position.z = 1;
const scene = new THREE.Scene();
const geometry = new THREE.BufferGeometry();
const vertices = new Float32Array([
-1, -1,
1, -1,
-1, 1,
-1, 1,
1, -1,
1, 1,
]);
geometry.addAttribute( 'position', new THREE.BufferAttribute( vertices, 2 ) );
const uniforms = {
iGlobalTime: { type: "f", value: 1.0 },
iResolution: { type: "v3", value: new THREE.Vector3() },
};
const material = new THREE.RawShaderMaterial({
uniforms: uniforms,
vertexShader: vs,
fragmentShader: shadertoyBoilerplate,
});
var mesh = new THREE.Mesh(geometry, material);
scene.add(mesh);
var renderer = new THREE.WebGLRenderer();
document.body.appendChild(renderer.domElement);
resize(true);
render(0);
function resize(force) {
var canvas = renderer.domElement;
var dpr = 1; //window.devicePixelRatio; // make 1 or less if too slow
var width = canvas.clientWidth * dpr;
var height = canvas.clientHeight * dpr;
if (force || width != canvas.width || height != canvas.height) {
renderer.setSize( width, height, false );
uniforms.iResolution.value.x = renderer.domElement.width;
uniforms.iResolution.value.y = renderer.domElement.height;
}
}
function render(time) {
resize();
uniforms.iGlobalTime.value = time * 0.001;
renderer.render(scene, camera);
requestAnimationFrame(render);
}
canvas {
border: 1px solid black;
}
<script src="https://cdnjs.cloudflare.com/ajax/libs/three.js/r74/three.min.js"></script>
The code above from shadertoy passes gl_FragCoord as input to the user's shader which is the pixel coordinate of the pixel being drawn in the canvas.
For a model we can pass in UV coordinates instead, we just have to choose a resolution to multiply them by since UV coordinates usually go from 0 to 1 and the shadertoy shaders are expecting 0 to canvas.width and 0 to canvas.height
Example:
const vs = `
varying vec2 vUv;
void main()
{
vUv = uv;
vec4 mvPosition = modelViewMatrix * vec4( position, 1.0 );
gl_Position = projectionMatrix * mvPosition;
}
`;
const userShader = `
// FROM: https://www.shadertoy.com/view/4sdXDl
//spikey
#define SHAPE length(z.yz)
//normal
//#define SHAPE length(z.xyz)
//bizarro
//#define SHAPE length(z.yz-z.xx)
//etc...
#define HIGH_QUAL
#ifdef HIGH_QUAL
#define MARCH_STEPS 199
#else
#define MARCH_STEPS 99
#endif
float k=7.0+3.0*sin(iGlobalTime*0.15);
vec3 mcol=vec3(0.0);
void AbsBox(inout vec4 z){//abs box by kali
z.xyz=abs(z.xyz+1.0)-1.0;
z*=1.5/clamp(dot(z.xyz,z.xyz),0.25,1.0);
}
void Bulb(inout vec4 z, in vec4 c){//mandelBulb by twinbee
float r = length(z.xyz);
float zo = asin(z.z / r) * k + iGlobalTime*0.15;
float zi = atan(z.y, z.x) * 7.0;
z=pow(r, k-1.0)*vec4(r*vec3(cos(zo)*vec2(cos(zi),sin(zi)),sin(zo)),z.w*k)+c;
}
float DE(vec3 p){
vec4 c = vec4(p,1.0),z = c;
Bulb(z,c);
float r0=(length(z.xyz)-1.15)/z.w;
z.xyz-=1.0;
for(int i=0;i<7;i++)AbsBox(z);
float r=SHAPE;
mcol.rgb=vec3(1.0,0.5,0.2)+abs(sin(0.2*r+100.0*z.yxz/z.w));
return 0.5 * max((r-1.0) / z.w,-r0);
}
vec3 sky(vec3 rd, vec3 L){//modified bananaft's & public_int_i's code
float d=0.4*dot(rd,L)+0.6;
//return vec3(d);
rd.y+=sin(sqrt(clamp(-rd.y,0.0,0.9))*90.0)*0.45*max(-0.1,rd.y);
rd=abs(rd);
float y=max(0.,L.y),sun=max(1.-(1.+10.*y+rd.y)*length(rd-L),0.)
+.3*pow(1.-rd.y,12.)*(1.6-y);
return d*mix(vec3(0.3984,0.5117,0.7305),vec3(0.7031,0.4687,0.1055),sun)
*((.5+pow(y,.4))*(1.5-abs(L.y))+pow(sun,5.2)*y*(5.+15.0*y));
}
float rnd;
void randomize(in vec2 p){rnd=fract(float(iFrame)+sin(dot(p,vec2(13.3145,117.7391)))*42317.7654321);}
float ShadAO(in vec3 ro, in vec3 rd){
float t=0.0,s=1.0,d,mn=0.01;
for(int i=0;i<12;i++){
d=max(DE(ro+rd*t)*1.5,mn);
s=min(s,d/t+t*0.5);
t+=d;
}
return s;
}
vec3 scene(vec3 ro, vec3 rd){
vec3 L=normalize(vec3(0.4,0.025,0.5));
vec3 bcol=sky(rd,L);
vec4 col=vec4(0.0);//color accumulator
float t=DE(ro)*rnd,d,od=1.0,px=1.0/iResolution.x;
for(int i=0;i<MARCH_STEPS;i++){
d=DE(ro);
if(d<px*t){
float dif=clamp(1.0-d/od,0.2,1.0);
vec3 scol=mcol*dif*(1.3-0.3*t);
#ifdef HIGH_QUAL
vec2 s=vec2(DE(ro+d*4.0*L),DE(ro+d*16.0*L));
scol*=clamp(0.5*s.x/d+(s.y/d)/8.0,0.0,1.0);
#endif
float alpha=(1.0-col.w)*clamp(1.0-d/(px*t),0.0,1.0);
col+=vec4(clamp(scol,0.0,1.0),1.0)*alpha;
if(col.w>0.9)break;
}
t+=d;ro+=rd*d;od=d;
if(t>6.0)break;
}
col.rgb+=bcol*(1.0-clamp(col.w,0.0,1.0));
return col.rgb;
}
mat3 lookat(vec3 fw){
fw=normalize(fw);vec3 rt=normalize(cross(fw,vec3(0.0,1.0,0.0)));return mat3(rt,cross(rt,fw),fw);
}
void mainImage(out vec4 fragColor, in vec2 fragCoord) {
randomize(fragCoord);
float tim=iGlobalTime*0.3,r=2.0+cos(tim*0.7);
vec2 uv=(fragCoord-0.5*iResolution.xy)/iResolution.x;
vec3 ro=vec3(sin(tim)*r,sin(tim*0.4),cos(tim)*r);
vec3 rd=lookat(-ro)*normalize(vec3(uv,1.0));
//rd+=2.0*cross(qrt.xyz,cross(qrt.xyz,rd)+qrt.w*rd);
fragColor=vec4(scene(ro,rd)*2.0,1.0);
}
`;
// FROM shadertoy.com
const shadertoyBoilerplate = `
#extension GL_OES_standard_derivatives : enable
//#extension GL_EXT_shader_texture_lod : enable
#ifdef GL_ES
precision highp float;
#endif
uniform vec3 iResolution;
uniform float iGlobalTime;
uniform float iChannelTime[4];
uniform vec4 iMouse;
uniform vec4 iDate;
uniform float iSampleRate;
uniform vec3 iChannelResolution[4];
uniform int iFrame;
uniform float iTimeDelta;
uniform float iFrameRate;
struct Channel
{
vec3 resolution;
float time;
};
uniform Channel iChannel[4];
uniform sampler2D iChannel0;
uniform sampler2D iChannel1;
uniform sampler2D iChannel2;
uniform sampler2D iChannel3;
varying vec2 vUv;
void mainImage( out vec4 c, in vec2 f );
${userShader}
void main( void ){
vec4 color = vec4(0.0,0.0,0.0,1.0);
mainImage( color, vUv * iResolution.xy );
color.w = 1.0;
gl_FragColor = color;
}
`;
const $ = document.querySelector.bind(document);
const fieldOfView = 45;
const zNear = .1;
const zFar = 100;
const camera = new THREE.PerspectiveCamera(fieldOfView, 1, zNear, zFar);
camera.position.z = 3;
const scene = new THREE.Scene();
const geometry = new THREE.BoxGeometry(1, 1, 1);
const uniforms = {
iGlobalTime: { type: "f", value: 1.0 },
iResolution: { type: "v3", value: new THREE.Vector3() },
};
// choose a resolution to pass to the shader
uniforms.iResolution.value.x = 100;
uniforms.iResolution.value.y = 100;
const material = new THREE.ShaderMaterial({
uniforms: uniforms,
vertexShader: vs,
fragmentShader: shadertoyBoilerplate,
});
const mesh = new THREE.Mesh(geometry, material);
scene.add(mesh);
var renderer = new THREE.WebGLRenderer();
document.body.appendChild(renderer.domElement);
resize(true);
render(0);
function resize(force) {
const canvas = renderer.domElement;
const dpr = 1; //window.devicePixelRatio; // make 1 or less if too slow
const width = canvas.clientWidth * dpr;
const height = canvas.clientHeight * dpr;
if (force || width != canvas.width || height != canvas.height) {
renderer.setSize( width, height, false );
camera.aspect = width / height;
camera.updateProjectionMatrix();
}
}
function render(time) {
time *= 0.001; // seconds
resize();
uniforms.iGlobalTime.value = time;
mesh.rotation.x = time * 0.5;
mesh.rotation.y = time * 0.6;
renderer.render(scene, camera);
requestAnimationFrame(render);
}
body { margin: 0; }
canvas { width: 100vw; height: 100vh; display: block; }
<script src="https://cdnjs.cloudflare.com/ajax/libs/three.js/86/three.min.js"></script>
Note that shadertoy shaders are generally not designed to be used as materials. They are not efficient, rather they are more like a fun activity of "how cool an image can I make using only time and pixel location as input". Because of that while the results can be amazing they are often 10x or 100x or even 1000x slower than traditional techniques for materials (using textures)
Compare for example this amazing shader which draws an entire city but at least on my machine it runs at 10-18fps in a small window and 1fps when fullscreen. VS for example Grand Theft Auto 5 which also shows an entire city yet manages to run at 30-60fps when fullscreen on the same machine.
There is a lot of fun to be had and lots of interesting techniques to learn on shadertoy.com that might be useful in your shaders but don't mistake what's there for "production" techniques. It's called shaderTOY for a reason 😉

Categories