import { mat4, vec3 } from 'gl-matrix';
import { Camera } from './camera';
import { createGPUBuffer, imagedataToImage, loadObj } from './utils';
import { Plane } from './plane';
import { Teapot } from './teapot';
import { SpotLight } from './spot-light';

export class Renderer {
  canvas: HTMLCanvasElement;
  device: GPUDevice;
  context: GPUCanvasContext;
  sampler: GPUSampler;

  camera: Camera;
  plane: Plane;
  spotlight: SpotLight;
  teapot?: Teapot;

  lightColorTexture: GPUTexture;
  lightColorTextureView: GPUTextureView;

  lightDepthTexture: GPUTexture;
  lightDepthTextureView: GPUTextureView;

  depthTexture: GPUTexture;
  depthTextureView: GPUTextureView;

  copiedBuffer: GPUBuffer;

  requestId: number;
  angle: number;
  hasDumped: boolean;
  spotLightId: number;

  constructor(canvas: HTMLCanvasElement, device: GPUDevice) {
    this.canvas = canvas;
    this.device = device;
    this.context = canvas.getContext('webgpu') as GPUCanvasContext;

    this.angle = 0.0;
    this.hasDumped = true;

    this.context.configure({
      device: device,
      format: navigator.gpu.getPreferredCanvasFormat(),
      usage: GPUTextureUsage.RENDER_ATTACHMENT,
      alphaMode: 'opaque',
    });

    this.sampler = device.createSampler({
      addressModeU: 'clamp-to-edge',
      addressModeV: 'clamp-to-edge',
      magFilter: 'linear',
      minFilter: 'linear',
      mipmapFilter: 'linear',
      compare: 'less',
    });

    this.lightColorTexture = this.device.createTexture({
      size: [1024, 1024, 1],
      dimension: '2d',
      format: 'bgra8unorm',
      usage: GPUTextureUsage.RENDER_ATTACHMENT,
    });
    this.lightColorTextureView = this.lightColorTexture.createView();

    this.lightDepthTexture = device.createTexture({
      size: [1024, 1024, 1],
      dimension: '2d',
      format: 'depth32float',
      usage:
        GPUTextureUsage.RENDER_ATTACHMENT |
        GPUTextureUsage.COPY_SRC |
        GPUTextureUsage.TEXTURE_BINDING,
    });
    this.lightDepthTextureView = this.lightDepthTexture.createView();

    this.depthTexture = this.device.createTexture({
      size: [this.canvas.width, this.canvas.height, 1],
      dimension: '2d',
      format: 'depth32float',
      usage: GPUTextureUsage.RENDER_ATTACHMENT,
    });

    this.depthTextureView = this.depthTexture.createView();

    this.copiedBuffer = createGPUBuffer(
      this.device,
      new Float32Array(1024 * 1024),
      GPUBufferUsage.COPY_DST | GPUBufferUsage.MAP_READ
    );

    this.camera = new Camera(this);
    this.plane = new Plane(this);
    this.spotlight = new SpotLight(this);

    this.spotLightId = this.spotlight.upsertSpotLight(
      undefined,
      vec3.fromValues(3.0, 3.0, 2.0) as Float32Array,
      vec3.fromValues(1.0, 1.0, 1.0) as Float32Array,
      vec3.fromValues(0.0, 1.0, 0.0) as Float32Array
    );

    this.requestId = window.requestAnimationFrame(this.render);

    this.init();
  }

  async init() {
    const info = await loadObj('../models/teapot-h.obj');
    this.teapot = new Teapot(this, info);
  }

  resize() {
    const { clientWidth, clientHeight, width, height } = this.canvas;
    const devicePixelRatio = window.devicePixelRatio || 1;
    const currentCanvasWidth = clientWidth * devicePixelRatio;
    const currentCanvasHeight = clientHeight * devicePixelRatio;

    if (currentCanvasWidth != width || currentCanvasHeight != height) {
      this.canvas.width = currentCanvasWidth;
      this.canvas.height = currentCanvasHeight;

      this.depthTexture.destroy();

      this.depthTexture = this.device.createTexture({
        size: [width, height, 1],
        dimension: '2d',
        format: 'depth32float',
        usage: GPUTextureUsage.RENDER_ATTACHMENT,
      });

      this.depthTextureView = this.depthTexture.createView();

      const projectionMatrix = mat4.perspective(mat4.create(), 1.4, width / height, 0.1, 1000.0);

      this.device.queue.writeBuffer(
        this.camera.projectionMatrixUniformBuffer,
        0,
        projectionMatrix as Float32Array
      );
    }
  }

  render = async () => {
    // TODO 根据arcball移动，更新相机模型视图矩阵
    this.camera.update();

    const lightDir = vec3.fromValues(Math.cos(this.angle) * 8.0, 10, Math.sin(this.angle) * 8.0);

    this.spotlight.upsertSpotLight(
      this.spotLightId,
      lightDir as Float32Array,
      vec3.fromValues(
        -Math.cos(this.angle) * 8.0,
        -10,
        -Math.sin(this.angle) * 8.0
      ) as Float32Array,
      vec3.fromValues(0.0, 1.0, 0.0) as Float32Array
    );
    this.spotlight.refreshBuffer(this.device);

    const commandEncoder = this.device.createCommandEncoder();

    // 放到light中处理
    const lightPassEncoder = commandEncoder.beginRenderPass({
      colorAttachments: [
        {
          view: this.lightColorTextureView,
          clearValue: { r: 1, g: 0, b: 0, a: 1 },
          loadOp: 'load',
          storeOp: 'store',
        },
      ],
      depthStencilAttachment: {
        view: this.lightDepthTextureView,
        depthClearValue: 1,
        depthLoadOp: 'clear',
        depthStoreOp: 'store',
      },
    });

    lightPassEncoder.setViewport(0, 0, 1024, 1024, 0, 1);
    this.teapot?.renderForLight(lightPassEncoder);
    lightPassEncoder.end();

    commandEncoder.copyTextureToBuffer(
      { texture: this.lightDepthTexture, origin: { x: 0, y: 0 } },
      { buffer: this.copiedBuffer, bytesPerRow: 1024 * 4 },
      { width: 1024, height: 1024 }
    );

    const colorTexture = this.context.getCurrentTexture();
    const colorTextureView = colorTexture.createView();

    const passEncoder = commandEncoder.beginRenderPass({
      colorAttachments: [
        {
          view: colorTextureView,
          clearValue: { r: 1, g: 0, b: 0, a: 1 },
          loadOp: 'clear',
          storeOp: 'store',
        },
      ],
      depthStencilAttachment: {
        view: this.depthTextureView,
        depthClearValue: 1,
        depthLoadOp: 'clear',
        depthStoreOp: 'store',
      },
    });
    passEncoder.setViewport(0, 0, this.canvas.width, this.canvas.height, 0, 1);
    this.teapot?.render(passEncoder);
    this.plane.render(passEncoder);
    // arrow.encode(passEncoder);
    // dot.encode(passEncoder);
    this.spotlight.render(passEncoder);
    passEncoder.end();

    // const passEncoder2 = commandEncoder.beginRenderPass({
    //   colorAttachments: [
    //     {
    //       view: colorTextureView,
    //       clearValue: { r: 1, g: 0, b: 0, a: 1 },
    //       loadOp: 'load',
    //       storeOp: 'store',
    //     },
    //   ],
    // });
    // // axis.encode(passEncoder2);
    // passEncoder2.end();

    this.device.queue.submit([commandEncoder.finish()]);

    await this.device.queue.onSubmittedWorkDone();

    await this.captureDepth();

    this.angle += 0.01;

    this.requestId = window.requestAnimationFrame(this.render);
  };

  async captureDepth() {
    if (this.hasDumped || !this.teapot) return;

    this.hasDumped = true;

    await this.copiedBuffer.mapAsync(GPUMapMode.READ, 0, 1024 * 1024 * 4);

    const d = new Float32Array(this.copiedBuffer.getMappedRange());
    const x = new Uint8ClampedArray(1024 * 1024 * 4);
    let maxv = -900;
    let minv = 900;
    for (let i = 0; i < 1024 * 1024; ++i) {
      const v = d[i];

      if (maxv < v) {
        maxv = v;
      }
      if (minv > v) {
        minv = v;
      }
      x[i * 4] = v * 255.0;
      x[i * 4 + 1] = v * 255.0;
      x[i * 4 + 2] = v * 255.0;
      x[i * 4 + 3] = v * 255.0;
    }
    this.copiedBuffer.unmap();
    const imageData = new ImageData(x, 1024, 1024);
    imagedataToImage(imageData);
    console.log('max min: ', maxv, minv);
  }

  dispose() {
    window.cancelAnimationFrame(this.requestId);

    this.device.destroy();
  }
}
