メインコンテンツまでスキップ

Images and Textures

Learn how to load and process external images, and techniques for texture-based expressions.

Loading Images

Basic Image Loading

ライブエディター
const fragment = () => {
      const iTexture = uniform('https://avatars.githubusercontent.com/u/40712342')
      const uv = position.xy.div(iResolution) // Calculate correct UV coordinates
      // Get pixel color from texture
      const textureColor = iTexture.texture(uv)
      return textureColor
}

Understanding UV Coordinates

UV coordinates are a coordinate system that represents positions on an image:

(0,1) -------- (1,1)
| |
| Image |
| |
(0,0) -------- (1,0)
ライブエディター
const fragment = () => {
      const uvColor = vec4(uv.x, uv.y, 0, 1)
      return uvColor
}

Image Filters

Blur Effect

ライブエディター
const fragment = () => {
      const iTexture = uniform('https://avatars.githubusercontent.com/u/40712342')
      // Vector average function
      const average = Fn((args) => {
              return args.reduce((sum, v) => sum.add(v)).div(9)
      })
      // Box blur using 3x3 convolution
      const step = vec2(1).div(iResolution)
      const s = Fn(([uv, x, y]) => {
              return iTexture.texture(vec2(x, y).mul(step).add(uv))
      })
      // Box blur: average all 9 samples
      return average(
              s(uv, -1, -1), s(uv, 0, -1), s(uv, 1, -1),
              s(uv, -1,  0), s(uv, 0,  0), s(uv, 1,  0),
              s(uv, -1,  1), s(uv, 0,  1), s(uv, 1,  1)
      )
}

Edge Detection

ライブエディター
const fragment = () => {
      const iTexture = uniform('https://avatars.githubusercontent.com/u/40712342')
      const step = vec2(1).div(iResolution)
      const s = Fn(([uv, x, y]) => {
              const offset = vec2(x, y).mul(step).add(uv)
              return iTexture.texture(offset).r
      })
      // Matrix Frobenius dot product function
      const matDot = Fn(([a, b]) => {
              return a[0].dot(b[0]).add(a[1].dot(b[1])).add(a[2].dot(b[2]))
      })
      // Sample 3x3 sample as matrix
      const sample = mat3(
              s(uv, -1, -1), s(uv, 0, -1), s(uv, 1, -1),
              s(uv, -1,  0), s(uv, 0,  0), s(uv, 1,  0),
              s(uv, -1,  1), s(uv, 0,  1), s(uv, 1,  1)
      )
      // Sobel X kernel as matrix
      const sobelX = mat3(
              -1, 0, 1,
              -2, 0, 2,
              -1, 0, 1
      ).constant()
      // Sobel Y kernel as matrix
      const sobelY = mat3(
              -1, -2, -1,
               0,  0,  0,
               1,  2,  1
      ).constant()
      // Matrix convolution using Frobenius inner product
      const gx = matDot(sample, sobelX)
      const gy = matDot(sample, sobelY)
      const edge = vec2(gx, gy).length()
      return vec4(vec3(edge), 1)
}

Sepia Tone

ライブエディター
const fragment = () => {
      const iTexture = uniform('https://avatars.githubusercontent.com/u/40712342')
      const originalColor = iTexture.texture(uv)
      // Sepia tone conversion matrix
      const sepiaMatrix = mat3(
              0.4, 0.3, 0.3,  // for R
              0.8, 0.7, 0.5,  // for G
              0.2, 0.2, 0.1   // for B
      )
      const sepiaColor = sepiaMatrix.mul(originalColor.rgb)
      return vec4(sepiaColor, 1)
}

Texture Coordinate

Texture Tiling

ライブエディター
const fragment = () => {
      const iTexture = uniform('https://avatars.githubusercontent.com/u/40712342')
      // Scale and repeat
      const repeatedUV = uv.mul(3).fract()
      return iTexture.texture(repeatedUV)
}

Texture Rotation

ライブエディター
const fragment = () => {
      const iTexture = uniform('https://avatars.githubusercontent.com/u/40712342')
      // Center coordinates (-0.5 to 0.5)
      const center = uv.sub(0.5)
      // Rotation
      const cosA = iTime.cos()
      const sinA = iTime.sin()
      const rotateMat = mat2(cosA, sinA, sinA.negate(), cosA)
      const rotatePos = rotateMat.mul(center).add(0.5)
      return iTexture.texture(rotatePos)
}

Fisheye Lens

ライブエディター
const fragment = () => {
      const iTexture = uniform('https://avatars.githubusercontent.com/u/40712342')
      // Center UV coordinates around (0, 0)
      const center = uv.sub(0.5).mul(2)
      const radius = center.length()
      // Fisheye distortion - barrel distortion effect
      const distorte = radius.pow(2).mul(0.5).oneMinus().mul(radius)
      // Calculate new UV coordinates
      const factor = distorte.div(radius)
      const newUV = center.mul(factor).div(2).add(0.5)
      // Sample texture
      return iTexture.texture(newUV)
}

Dynamic Texture

Texture Scrolling

ライブエディター
const fragment = () => {
      const iTexture = uniform('https://avatars.githubusercontent.com/u/40712342')
      // Scroll speed
      const scrollSpeed = vec2(0.1, 0.05)
      // Time-based offset
      const offset = iTime.mul(scrollSpeed).add(uv).fract()
      return iTexture.texture(offset)
}

Wavy Texture

ライブエディター
const fragment = () => {
      const iTexture = uniform('https://avatars.githubusercontent.com/u/40712342')
      // Create wave distortion as vec2
      const wavy = Fn(([uv, amplitude, frequency, speed]) => {
              const timeOffset = iTime.mul(speed)
              const wavePhase = uv.yx.mul(frequency).add(timeOffset)
              return wavePhase.sin().mul(amplitude)
      })
      // Apply wave distortion to UV coordinates
      const waveDistortion = wavy(uv, 0.1, 16, 4)
      const wavyUV = uv.add(waveDistortion)
      return iTexture.texture(wavyUV)
}

Glass Texture

ライブエディター
const fragment = () => {
      // ref: https://www.shadertoy.com/view/33yXzd
      const iTexture = uniform('https://avatars.githubusercontent.com/u/40712342')
      const circleRadius = iTime.sin().add(1).div(8).add(0.2)
      const center = iMouse.div(4).add(0.5)
      const distance = uv.distance(center)
      // Calculate refraction effect
      const refraction = distance.div(circleRadius).pow(6).mul(0.1)
      const offset = center.sub(uv).normalize().mul(refraction)
      const dispersion = offset.mul(0.1)
      // Sample texture with chromatic aberration
      const r = iTexture.texture(uv.add(offset).sub(dispersion)).r
      const g = iTexture.texture(uv.add(offset)).g
      const b = iTexture.texture(uv.add(offset).add(dispersion)).b
      // Create smooth circle mask
      const edge = fwidth(distance)
      const alpha = circleRadius.sub(edge).smoothstep(circleRadius.add(edge), distance)
      // Mix background and effect
      const backgroundColor = iTexture.texture(uv)
      return vec4(r, g, b, 1).mix(backgroundColor, alpha)
}

Advanced Texture

Pattern Blending

Combine procedural patterns with texture sampling for complex effects:

ライブエディター
const fragment = () => {
      const iTexture = uniform('https://avatars.githubusercontent.com/u/40712342')
      // Procedural checker mask using vec
      const grid = uv.mul(8).floor()
      const checker = mod(grid.x.add(grid.y), 2)
      return iTexture.texture(uv.tan()).mix(iTexture.texture(uv), checker)
}

Pattern Control

Use texture data to control procedural pattern parameters:

ライブエディター
const fragment = () => {
      const iTexture = uniform('https://avatars.githubusercontent.com/u/40712342')
      // Sample texture for generative parameters
      const smallUV = uv.mul(0.2)
      const texData = texture(iTexture, smallUV)
      const timeScale = iTime.mul(0.2)
      const flow = texData.r.mul(timeScale)
      const freq = texData.g.mul(15).add(5)
      // Create generative pattern driven by texture
      const patternPhase = uv.mul(freq).add(flow)
      const patterns = vec2(patternPhase.x.sin(), patternPhase.y.cos())
      const center = uv.sub(0.5)
      const radialPattern = center.length().mul(10).sub(iTime).sin()
      const pattern = patterns.x.mul(patterns.y).add(radialPattern)
      // Blend original texture with subtle pattern overlay
      const originalColor = texture(iTexture, uv)
      const patternOverlay = pattern.mul(0.3).add(0.7)
      const color = originalColor.rgb.mul(patternOverlay)
      return vec4(color, 1)
}

Dynamic Distortion

Use texture data to drive animation parameters and create dynamic effects:

ライブエディター
const fragment = () => {
      const iTexture = uniform('https://avatars.githubusercontent.com/u/40712342')
      // Generate procedural noise for animation data
      const noise = Fn(([uv]) => {
              const n1Pattern = uv.mul(10).sin()
              const n1 = n1Pattern.x.mul(n1Pattern.y)
              const n2 = uv.x.mul(20).add(uv.y.mul(15)).sin().mul(0.5)
              return n1.add(n2).mul(0.5).add(0.5)
      })
      // Sample texture to get base noise value
      const baseColor = iTexture.texture(uv)
      const textureNoise = vec3(1).dot(baseColor.rgb)
      // Combine texture-based and procedural noise
      const smallerUV = uv.mul(0.5)
      const proceduralNoise = noise(smallerUV)
      const combinedNoise = mix(textureNoise, proceduralNoise, 0.5)
      // Use combined noise to modulate time-based animation
      const animOffset = iTime.mul(combinedNoise).sin().mul(0.08)
      // Create texture-driven distortion
      const phase = combinedNoise.mul(6.28)
      const distortedUV = vec2(phase.cos(), phase.sin()).mul(animOffset).add(uv)
      // Sample texture with distorted coordinates
      return iTexture.texture(distortedUV)
}

Next Steps

Once you've mastered image and texture manipulation, next learn about Interactive Effects to create effects that respond to user interactions.

What you learned in this tutorial:

  • ✅ Image loading and UV coordinates
  • ✅ Image filters (blur, edge detection, sepia)
  • ✅ Texture coordinate manipulation
  • ✅ Dynamic texture effects
  • ✅ Procedural texture creation

In the next chapter, you'll learn about interactive visual expressions that respond to mouse and keyboard operations.