Edgar SIMO-SERRA
シモセラ エドガー
(x,y,z)
, however, surface normals and other information can also be used(u,v)
space(u,v)
-coordinates per vertex(u,v)
values and don’t have issues[0,1]
range:[−1,+1]
range from the [0,1]
texture valueuniform sampler
in the fragment shader (sampler2D
for 2D images)gl.createTexture();
gl.bindTexture(...)
and set the data with gl.TexImage2D(...)
uniform sampler2D my_texture; // Texture is defined as a uniform
in vec2 texcoords; // Set in the vertex shader
out vec4 colour_out; // Output colour
void main () {
// Sample the texture at corresponding position and output
colour_out = texture( my_texture, texcoords );
}
// Create a new texture
var texture = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, texture);
// Fill the texture with a 1x1 red pixel
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, 1, 1, 0, gl.RGBA, gl.UNSIGNED_BYTE,
new Uint8Array([255, 0, 0, 255]));
// Asynchronously load an image
var image = new Image();
image.src = "path/to/image.png";
image.addEventListener('load', function() {
// Copy the image to the texture
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA,gl.UNSIGNED_BYTE, image);
});
in vec2 tex_vertex; // Vertex texture coordinate
out vec2 texcoords; // Passed to fragment shader
... // Vertex position stuff
void main () {
texcoords = tex_vertex; // You could do transformations here if needed
... // Vertex position stuff
}
var loc_tex_vertex = gl.getAttribLocation(program, "tex_vertex");
// Create buffer
var texcoordBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, texcoordBuffer);
// Set buffer data here with gl.bufferData(...)
gl.enableVertexAttribArray( loc_tex_vertex );
gl.vertexAttribPointer( loc_tex_vertex, 2, gl.FLOAT, false, 0, 0 );
(0,0)
at the bottom left of the image(0.5,0.5)
to be the center of a pixel2m×2n
texels with m,n∈Z+
)16,384×16,384
texelssinc(x)=sin(πx)πx
x *= 2.0; /* For visualization purposes. */ y = sin( M_PI * x) / (M_PI * x); /* Sinc filter. */
α
α>1
minification (downsampling) takes placeα<1
magnification (upsampling) takes placesinc(x/α)
applied before resampling
\bm{p}(x,y) = \sum_{i=1}^n w_i \bm{c}(i,x,y)
n
is the number of samples for a pixel\bm{c}(i,x,y)
is sample functionw_i \in [0,1]
is a weightz
-depthN
-rook based patterns64 \times 64
texel image on 256 \times 256
pixels4 \times 4
or 5 \times 5
array of texelsx \in [0,1]
, we want to estimate y \in [f(0), f(1)]
with f(\cdot)
only defined at 0
and 1
y = \begin{cases}
f(0), & \text{if}\;x<0.5 \\
f(1), & \text{otherwise}
\end{cases}
y = f(0) + \left(f(1) - f(0)\right) \frac{x-0}{1-0} = x\,f(1) + (1-x)\,f(0)
x=x_{-1}
or x=x_{+1}
f(-1), f(2)
adjacent to the points for interpolationf(0)
and f(1)
and derivative at points x=0
and x=1
y = f(0) + x^2 (3-2x) f(1)
f'(0) = f'(1) = 0
smoothstep()
m(x) = ax^3 + bx^2 + cx + d
m(0)=0, m(1)=1, m'(0)=m'(1)=0
m(0)=0 \implies d=0
m'(x) = 3ax^2 + 2bx + c
m'(0)=0 \implies c=0
m(1)=1 \implies a+b=1 \implies a=1-b
m'(1)=0 \implies 3a+2b=0 \implies (3-3b)+2b=0 \implies b=3
a=1-b \implies a=-2
m(x) = -2x^3 + 3x^2 = x^2(3-2x)
m''(0)=m''(1)=0
\partial u / \partial x
, \partial v / \partial x
, \partial u / \partial y
, and \partial v / \partial y
z
buffer depth, and alpha are associated with each pixelz
-buffer which only has one value per pixel
\bm{c}_o = \alpha_s \bm{c}_s + (1-\alpha_s) \bm{c}_d
\bm{c}_s
is the colour of the transparent source object\alpha_s
is the alpha of the source object\bm{d}_s
is the colour of the destination
\bm{c}_o = \alpha_s \bm{c}_s + \bm{c}_d
z
-buffer replacement is disabled when drawing transparency
\begin{align}
\bm{c}_o &= \alpha_d \bm{c}_d + (1-\alpha_d) \bm{c}_s \\
\alpha_o &= \alpha_s(1-\alpha_d) + \alpha = \alpha_s - \alpha_s\alpha_d + \alpha_d \\
\end{align}
z
-buffers and multiple passesz
-bufferz
-depth of an object matches value in the first z-buffer
, we know this is the closest transparent object and save RGBA to a colour bufferz
-depth in another buffer (peeling)z
-buffer)
\bm{c}_o = \sum_{i=1}^n (\alpha_i \bm{c}_i) + \bm{c}_d (1-\sum_{i=1}^n \alpha_i)
n
is number of transparent surfaces, \bm{c}_i
and \alpha_i
are colour and transparency values, and \bm{c}_d
is the opaque portion of the scene
\begin{align}
\bm{c}_\text{sum} &= \sum_{i=1}^n \alpha_i \bm{c}_i, \quad
\alpha_\text{sum} = \sum_{i=1}^n \alpha_i \\
u &= (1-\alpha_\text{sum}/n)^n \\
\bm{c}_o &= (1 - u) \frac{\bm{c}_\text{sum}}{\alpha_\text{sum}} + u \bm{c}_d
\end{align}
\bm{c}_\text{sum}
and \alpha_\text{sum}
are created by transparency renderingu
is estimated visibility of the destination\bm{c}'_s = \alpha_s \bm{c}_s
\bm{c}, \alpha
\bm{c}' = \bm{c}\alpha, \alpha' = \alpha
\bm{c}_o = \bm{c}'\alpha' = \bm{c}\alpha^2
gl.blendFunc()
float d = compute_sdf();
vec4 col = vec4(1.0); /* White. */
col.a = smoothstep( -1.0, 0.0, -d ); /* Antialias one pixel (assuming SDF is defined on pixels. */
/* If not using transparency, alternatively col.a is the value you want use for blending. */
void main () { vec2 p = (2.0 * gl_FragCoord.xy - u_resolution.xy) / u_resolution.y; /* Try playing with values of m. */ float m = 1.0 / u_resolution.x; /* Compute one pixel in normalized coordinates. */ float d = sdStar( p, 0.8, 10, 7.0 ); /* Try making this abs(). */ float b = smoothstep( -m, 0.0, -d ); /* Blend factor. */ colour_out = vec4( mix( vec3(0.0), vec3(1.0), b ), 1.0 ); }
void main () { vec2 p = (2.0 * gl_FragCoord.xy - u_resolution.xy) / u_resolution.y; float m = 1.0 / u_resolution.x; float d1 = sdCircle( p+vec2(sin(u_time),0.0), 0.5 ); float d2 = sdStar( p, 0.8, 10, 7.0 ); float b1 = smoothstep( -m, 0.0, -d1 ) * 0.5; float b2 = smoothstep( -m, 0.0, -d2 ); colour_out.rgb = mix( vec3(0.0), vec3(0.0,0.0,1.0), b2 ); colour_out.rgb = mix( colour_out.rgb, vec3(1.0,0.0,0.0), b1 ); colour_out.a = 1.0; }