Spaces:
Running
Running
| /** | |
| * WebGPU (wgsl) implementation of https://codepen.io/ksenia-k/pen/vYwgrWv | |
| */ | |
| import { useRef, useEffect } from "react"; | |
| const SHADER_SOURCE = /* wgsl */ ` | |
| struct Uniforms { | |
| time: f32, | |
| ratio: f32, | |
| pointer: vec2f, | |
| scroll_progress: f32, | |
| } | |
| @group(0) @binding(0) var<uniform> u: Uniforms; | |
| struct VsOut { | |
| @builtin(position) pos: vec4f, | |
| @location(0) uv: vec2f, | |
| } | |
| @vertex | |
| fn vs(@builtin(vertex_index) i: u32) -> VsOut { | |
| let x = f32(i32(i & 1u) * 2 - 1); | |
| let y = f32(i32(i >> 1u) * 2 - 1); | |
| var o: VsOut; | |
| o.pos = vec4f(x, y, 0.0, 1.0); | |
| o.uv = vec2f(x, y) * 0.5 + 0.5; | |
| return o; | |
| } | |
| fn rotate2(v: vec2f, th: f32) -> vec2f { | |
| let c = cos(th); | |
| let s = sin(th); | |
| return vec2f(c * v.x + s * v.y, -s * v.x + c * v.y); | |
| } | |
| fn neuro_shape(uv_in: vec2f, t: f32, p: f32) -> f32 { | |
| var uv = uv_in; | |
| var sine_acc = vec2f(0.0); | |
| var res = vec2f(0.0); | |
| var scale: f32 = 8.0; | |
| for (var j = 0; j < 15; j++) { | |
| uv = rotate2(uv, 1.0); | |
| sine_acc = rotate2(sine_acc, 1.0); | |
| let layer = uv * scale + f32(j) + sine_acc - t; | |
| sine_acc += sin(layer) + 2.4 * p; | |
| res += (0.5 + 0.5 * cos(layer)) / scale; | |
| scale *= 1.2; | |
| } | |
| return res.x + res.y; | |
| } | |
| @fragment | |
| fn fs(@location(0) vUv: vec2f) -> @location(0) vec4f { | |
| var uv = 0.5 * vUv; | |
| uv.x *= u.ratio; | |
| var pointer = vUv - u.pointer; | |
| pointer.x *= u.ratio; | |
| let p = 0.5 * pow(1.0 - clamp(length(pointer), 0.0, 1.0), 2.0); | |
| let t = 0.001 * u.time; | |
| var noise = neuro_shape(uv, t, p); | |
| noise = 1.2 * pow(noise, 3.0); | |
| noise += pow(noise, 10.0); | |
| noise = max(0.0, noise - 0.5); | |
| noise *= (1.0 - length(vUv - 0.5)); | |
| let sp = u.scroll_progress; | |
| var color = normalize(vec3f(0.2, 0.5 + 0.4 * cos(3.0 * sp), 0.5 + 0.5 * sin(3.0 * sp))); | |
| color = color * noise; | |
| return vec4f(color, noise); | |
| } | |
| `; | |
| const UNIFORM_SIZE = 32; | |
| interface NeuroBackgroundProps { | |
| onReady?: () => void; | |
| } | |
| export function NeuroBackground({ onReady }: NeuroBackgroundProps) { | |
| const canvasRef = useRef<HTMLCanvasElement>(null); | |
| const pointerRef = useRef({ x: 0, y: 0, tX: 0, tY: 0 }); | |
| const onReadyRef = useRef(onReady); | |
| onReadyRef.current = onReady; | |
| const rafRef = useRef<number>(0); | |
| useEffect(() => { | |
| const canvas = canvasRef.current; | |
| if (!canvas) return; | |
| let destroyed = false; | |
| async function init() { | |
| if (!navigator.gpu) return; | |
| const adapter = await navigator.gpu.requestAdapter(); | |
| if (!adapter || destroyed) return; | |
| const device = await adapter.requestDevice(); | |
| if (destroyed) { | |
| device.destroy(); | |
| return; | |
| } | |
| const ctx = canvas!.getContext("webgpu") as GPUCanvasContext | null; | |
| if (!ctx) return; | |
| const format = navigator.gpu.getPreferredCanvasFormat(); | |
| ctx.configure({ device, format, alphaMode: "premultiplied" }); | |
| const module = device.createShaderModule({ code: SHADER_SOURCE }); | |
| const pipeline = device.createRenderPipeline({ | |
| layout: "auto", | |
| vertex: { module, entryPoint: "vs" }, | |
| fragment: { | |
| module, | |
| entryPoint: "fs", | |
| targets: [ | |
| { | |
| format, | |
| blend: { | |
| color: { | |
| srcFactor: "src-alpha", | |
| dstFactor: "one-minus-src-alpha", | |
| operation: "add", | |
| }, | |
| alpha: { | |
| srcFactor: "one", | |
| dstFactor: "one-minus-src-alpha", | |
| operation: "add", | |
| }, | |
| }, | |
| }, | |
| ], | |
| }, | |
| primitive: { topology: "triangle-strip", stripIndexFormat: "uint32" }, | |
| }); | |
| const uniformBuf = device.createBuffer({ | |
| size: UNIFORM_SIZE, | |
| usage: GPUBufferUsage.UNIFORM | GPUBufferUsage.COPY_DST, | |
| }); | |
| const bindGroup = device.createBindGroup({ | |
| layout: pipeline.getBindGroupLayout(0), | |
| entries: [{ binding: 0, resource: { buffer: uniformBuf } }], | |
| }); | |
| const uniformData = new Float32Array(UNIFORM_SIZE / 4); | |
| const dpr = Math.min(window.devicePixelRatio, 2); | |
| function resize() { | |
| if (!canvas || destroyed) return; | |
| canvas.width = window.innerWidth * dpr; | |
| canvas.height = window.innerHeight * dpr; | |
| } | |
| resize(); | |
| window.addEventListener("resize", resize); | |
| let firstFrame = true; | |
| function render() { | |
| if (destroyed) return; | |
| const ptr = pointerRef.current; | |
| ptr.x += (ptr.tX - ptr.x) * 0.2; | |
| ptr.y += (ptr.tY - ptr.y) * 0.2; | |
| uniformData[0] = performance.now(); | |
| uniformData[1] = canvas!.width / canvas!.height; | |
| uniformData[2] = ptr.x / window.innerWidth; | |
| uniformData[3] = 1 - ptr.y / window.innerHeight; | |
| uniformData[4] = 0; | |
| device.queue.writeBuffer(uniformBuf, 0, uniformData); | |
| const encoder = device.createCommandEncoder(); | |
| const pass = encoder.beginRenderPass({ | |
| colorAttachments: [ | |
| { | |
| view: ctx!.getCurrentTexture().createView(), | |
| loadOp: "clear", | |
| storeOp: "store", | |
| clearValue: { r: 0, g: 0, b: 0, a: 0 }, | |
| }, | |
| ], | |
| }); | |
| pass.setPipeline(pipeline); | |
| pass.setBindGroup(0, bindGroup); | |
| pass.draw(4); | |
| pass.end(); | |
| device.queue.submit([encoder.finish()]); | |
| if (firstFrame) { | |
| firstFrame = false; | |
| onReadyRef.current?.(); | |
| } | |
| rafRef.current = requestAnimationFrame(render); | |
| } | |
| rafRef.current = requestAnimationFrame(render); | |
| return { device, resize }; | |
| } | |
| let cleanup: (() => void) | undefined; | |
| init().then((resources) => { | |
| if (destroyed) { | |
| resources?.device.destroy(); | |
| return; | |
| } | |
| function onPointerMove(e: PointerEvent) { | |
| pointerRef.current.tX = e.clientX; | |
| pointerRef.current.tY = e.clientY; | |
| } | |
| function onTouchMove(e: TouchEvent) { | |
| pointerRef.current.tX = e.targetTouches[0].clientX; | |
| pointerRef.current.tY = e.targetTouches[0].clientY; | |
| } | |
| function onClick(e: MouseEvent) { | |
| pointerRef.current.tX = e.clientX; | |
| pointerRef.current.tY = e.clientY; | |
| } | |
| window.addEventListener("pointermove", onPointerMove); | |
| window.addEventListener("touchmove", onTouchMove); | |
| window.addEventListener("click", onClick); | |
| cleanup = () => { | |
| window.removeEventListener("resize", resources!.resize); | |
| window.removeEventListener("pointermove", onPointerMove); | |
| window.removeEventListener("touchmove", onTouchMove); | |
| window.removeEventListener("click", onClick); | |
| resources?.device.destroy(); | |
| }; | |
| }); | |
| return () => { | |
| destroyed = true; | |
| cancelAnimationFrame(rafRef.current); | |
| cleanup?.(); | |
| }; | |
| }, []); | |
| return <canvas ref={canvasRef} className="fixed inset-0 w-full h-full pointer-events-none opacity-[0.95]" />; | |
| } | |