import { Engine } from "../../Engine";
import { Register } from "../../register/index";
import { ComponentType } from "../../component/index";
import { mat4, vec3 } from "../../core/math/wgpu-matrix.module";
import { Pass } from "../../component/post/Pass";
import { guid } from "../../core/utils";

class LineUtile {
  constructor(options) {
    this.rawPosition = options.rawPosition;
    this.maxPointsPerSegment = 100; // 每段线最大插值点数
    this.threshold = 5; // 5米的插值阈值
    this.init();
  }
  restore() {
    const camera = Engine.instance.scene.getCamera();
    let cameraParam = camera.getParam("camera");
    cameraParam.buffer(this.rowCameraData);
  }
  getDescript() {
    if (this._descript) {
      if (
        this._descript.size[0] !== Engine.instance.size[0] ||
        this._descript.size[1] !== Engine.instance.size[1]
      ) {
        this._descript = null;
      }
    }
    // 多采样的话，输出结果在rosolveTarget上，但是单采样时在view上
    if (!this._descript) {
      this.colorTexture = Engine.instance.device.createTexture({
        label: "colorTexture",
        size: Engine.instance.size,
        format: Engine.instance.format,
        mipLevelCount: 1,
        sampleCount: Engine.instance.multisample ? 4 : 1,
        usage:
          GPUTextureUsage.RENDER_ATTACHMENT |
          GPUTextureUsage.TEXTURE_BINDING |
          GPUTextureUsage.COPY_SRC,
      });
      this.colorTexture.view = this.colorTexture.createView();
      this.depthTexture = Engine.instance.device.createTexture({
        label: "depthTexture",
        size: Engine.instance.size,
        format: "depth24plus-stencil8",
        sampleCount: Engine.instance.multisample ? 4 : 1,
        usage:
          GPUTextureUsage.RENDER_ATTACHMENT |
          GPUTextureUsage.TEXTURE_BINDING |
          GPUTextureUsage.COPY_SRC,
      });
      this.depthTexture.view = this.depthTexture.createView();
      this._descript = {
        colorAttachments: [
          {
            view: this.colorTexture.view,
            // resolveTarget:Engine.instance.context.getCurrentTexture().createView(),
            loadOp: "clear",
            clearValue: { r: 1.0, g: 1.0, b: 1.0, a: 1.0 },
            storeOp: "store",
          },
        ],
        depthStencilAttachment: {
          view: this.depthTexture.view,
          depthClearValue: 1.0,
          depthLoadOp: "clear",
          depthStoreOp: "store",
        },
      };
      if (Engine.instance.enableGbuffer) {
        this.positionTexture = Engine.instance.device.createTexture({
          label: "position",
          size: Engine.instance.size,
          format: "rgba16float",
          mipLevelCount: 1,
          sampleCount: 1,
          usage:
            GPUTextureUsage.RENDER_ATTACHMENT |
            GPUTextureUsage.TEXTURE_BINDING |
            GPUTextureUsage.COPY_SRC,
        });
        this.positionTexture.view = this.positionTexture.createView();
        this.NomalTexture = Engine.instance.device.createTexture({
          label: "position",
          size: Engine.instance.size,
          format: "rgba16float",
          mipLevelCount: 1,
          sampleCount: 1,
          usage:
            GPUTextureUsage.RENDER_ATTACHMENT |
            GPUTextureUsage.TEXTURE_BINDING |
            GPUTextureUsage.COPY_SRC,
        });
        this.NomalTexture.view = this.NomalTexture.createView();
        if (Engine.instance.multisample) {
          this.positionTextureMulti = Engine.instance.device.createTexture({
            label: "position",
            size: Engine.instance.size,
            format: "rgba16float",
            mipLevelCount: 1,
            sampleCount: 4,
            usage:
              GPUTextureUsage.RENDER_ATTACHMENT |
              GPUTextureUsage.TEXTURE_BINDING |
              GPUTextureUsage.COPY_SRC,
          });
          this.positionTextureMulti.view =
            this.positionTextureMulti.createView();
          this.NomalTextureMulti = Engine.instance.device.createTexture({
            label: "position",
            size: Engine.instance.size,
            format: "rgba16float",
            mipLevelCount: 1,
            sampleCount: 4,
            usage:
              GPUTextureUsage.RENDER_ATTACHMENT |
              GPUTextureUsage.TEXTURE_BINDING |
              GPUTextureUsage.COPY_SRC,
          });
          this.NomalTextureMulti.view = this.NomalTextureMulti.createView();
        }
        this._descript.colorAttachments.push({
          label: "position",
          view: Engine.instance.multisample
            ? this.positionTextureMulti.view
            : this.positionTexture.view,
          resolveTarget: Engine.instance.multisample
            ? this.positionTexture.view
            : undefined,
          loadOp: "clear",
          clearValue: { r: 1.0, g: 1.0, b: 1.0, a: 0 },
          storeOp: "store",
        });
        this._descript.colorAttachments.push({
          label: "normal",
          view: Engine.instance.multisample
            ? this.NomalTextureMulti.view
            : this.NomalTexture.view,
          resolveTarget: Engine.instance.multisample
            ? this.NomalTexture.view
            : undefined,
          loadOp: "clear",
          clearValue: { r: 1.0, g: 1.0, b: 1.0, a: 0 },
          storeOp: "store",
        });
      }
      if (Engine.instance.enablePick) {
        this.pickTexture = Engine.instance.device.createTexture({
          label: "pick",
          size: Engine.instance.size,
          format: Engine.instance.format,
          mipLevelCount: 1,
          sampleCount: 1,
          usage:
            GPUTextureUsage.RENDER_ATTACHMENT |
            GPUTextureUsage.TEXTURE_BINDING |
            GPUTextureUsage.COPY_SRC,
        });
        this.pickTexture.view = this.pickTexture.createView();
        if (Engine.instance.multisample) {
          this.pickTextureMulti = Engine.instance.device.createTexture({
            label: "pick",
            size: Engine.instance.size,
            format: Engine.instance.format,
            mipLevelCount: 1,
            sampleCount: 4,
            usage:
              GPUTextureUsage.RENDER_ATTACHMENT |
              GPUTextureUsage.TEXTURE_BINDING |
              GPUTextureUsage.COPY_SRC,
          });
          this.pickTextureMulti.view = this.pickTextureMulti.createView();
        }
        this._descript.colorAttachments.push({
          label: "pick",
          view: Engine.instance.multisample
            ? this.pickTextureMulti.view
            : this.pickTexture.view,
          resolveTarget: Engine.instance.multisample
            ? this.pickTexture.view
            : undefined,
          loadOp: "clear",
          clearValue: { r: 1.0, g: 1.0, b: 1.0, a: 0 },
          storeOp: "store",
        });
      }
      this._descript.size = Engine.instance.size;
    }
    return this._descript;
  }
  init() {
    this.twoPoints = this.createLineSegment();
    this.getDescript();
    // 创建参数uniform buffer
    this.paramsBuffer = Engine.instance.device.createBuffer({
      label: "parameters",
      size: 24, // threshold(4) + texSize(8) + maxPoints(4)
      usage: GPUBufferUsage.UNIFORM | GPUBufferUsage.COPY_DST,
    });
    // 1. 创建输入缓冲区
    // const points=new Float32Array(this.rawPosition[0])
    const inputBuffer = Engine.instance.device.createBuffer({
      label: "inputBuffer",
      size: this.twoPoints.byteLength, // vec4 per point
      usage: GPUBufferUsage.STORAGE | GPUBufferUsage.COPY_DST,
    });
    // 2. 创建输出缓冲区
    this.pointsCount = this.rawPosition[0].length / 3;
    const maxOutputPoints = (this.pointsCount - 1) * this.maxPointsPerSegment;
    this.outputBuffer = Engine.instance.device.createBuffer({
      size: maxOutputPoints * 16,
      usage: GPUBufferUsage.STORAGE | GPUBufferUsage.COPY_SRC,
    });
    // 3. 更新参数
    const paramsData = new Float32Array([
      this.threshold,
      this.positionTexture.width,
      this.positionTexture.height,
      maxOutputPoints,
    ]);
    Engine.instance.device.queue.writeBuffer(this.paramsBuffer, 0, paramsData);
    // 4. 更新输入点
    Engine.instance.device.queue.writeBuffer(inputBuffer, 0, this.twoPoints);
    // 5、顶视图camera
    this.topCamera = Engine.instance.device.createBuffer({
      label: "topCamera",
      size: (16 + 4) * 4, // 16个矩阵元素 + 2个纹理尺寸
      usage:
        GPUBufferUsage.UNIFORM |
        GPUBufferUsage.COPY_SRC |
        GPUBufferUsage.COPY_DST,
    });
    const IDWShader = `
       struct Point {
          pos: vec4<f32>  // xyz是位置，w可以用作标记
      }
       struct Camera {
          viewProj: mat4x4<f32>,    // 顶视图的视图投影矩阵
          texSize: vec2<f32>,       // 深度图尺寸
      }
      @group(0) @binding(0) var heightMap: texture_2d<f32>;
      @group(0) @binding(1) var<uniform> topCamera: Camera;
      @group(0) @binding(2) var<storage, read> inputPoints: array<Point>;
      @group(0) @binding(3) var<storage, read_write> outputPoints: array<Point>;
      @group(0) @binding(4) var<uniform> params: Params;
      
      struct Params {
          threshold: f32,      // 插值阈值
          texSize: vec2<f32>,  // 高度图尺寸
          maxPoints: u32       // 输出缓冲区最大点数
      }
      
      // 获取世界空间点的高度
      fn sampleHeight(worldPos: vec2<f32>) -> f32 {
          // 转换到纹理空间
          let clipPos = topCamera.viewProj * vec4<f32>(worldPos.x, worldPos.y, 0.0, 1.0);
          // 透视除法，转换到NDC空间
          let ndcPos = clipPos.xy / clipPos.w;  // 这里是关键修改
          // 转换到纹理空间
          let uv = vec2<f32>(
            (ndcPos.x * params.texSize.x + params.texSize.x) / 2,
             ( params.texSize.y - ndcPos.y * params.texSize.y) / 2
          ) ;
          
          return textureLoad(heightMap, vec2<u32>(uv), 0).z;
      }
      
      // 计算两点间的插值点
      fn interpolatePoints(p1: Point, p2: Point, outputIndex: ptr<function, u32>) {
          let distance = distance(p1.pos.xy, p2.pos.xy);  // 只考虑xy平面距离
          let numSegments = ceil(distance / params.threshold);
          
          // 安全检查：确保不超出输出缓冲区
          if (*outputIndex >= params.maxPoints - u32(numSegments)) {
              return;
          }
          
          // 存储起点
          outputPoints[*outputIndex] = p1;
          *outputIndex = *outputIndex + 1u;
          
          // 如果需要插值
          if (numSegments > 1.0) {
              for (var i = 1.0; i < numSegments; i += 1.0) {
                  let t = i / numSegments;
                  
                  // 在xz平面插值
                  let interpolatedXY = mix(p1.pos.xy, p2.pos.xy, t);
                  
                  // 采样高度
                  let height = sampleHeight(interpolatedXY);
                  
                  // 存储插值点
                  outputPoints[*outputIndex] = Point(
                      vec4<f32>(interpolatedXY.x, interpolatedXY.y, height, 1.0)
                  );
                  *outputIndex = *outputIndex + 1u;
              }
          }
      }
      @compute @workgroup_size(256)
      fn main(@builtin(global_invocation_id) global_id: vec3<u32>) {
          let pointIndex = global_id.x;
          
          // 确保不是最后一个点
          if (pointIndex >= arrayLength(&inputPoints) - 1) {
              return;
          }
          var outputIndex = pointIndex * 100u;  // 假设每段最多100个插值点
          // 获取当前点和下一个点
          let currentPoint = inputPoints[pointIndex];
          let nextPoint = inputPoints[pointIndex + 1];
          // 计算插值点
          interpolatePoints(currentPoint, nextPoint, &outputIndex);
          outputPoints[pointIndex] = Point(
                      vec4<f32>(currentPoint.pos.x, currentPoint.pos.y, currentPoint.pos.z, 1.0)
                  );
          let clipPos = topCamera.viewProj * vec4<f32>(currentPoint.pos.x, currentPoint.pos.y,0,1.0);
              // 透视除法，转换到NDC空间
          let ndcPos = clipPos.xy / clipPos.w;  // 这里是关键修改
          // 转换到纹理空间
           // 从 NDC [-1,1] 转换到 UV [0,1] 空间
          let uv = vec2<f32>(
              (ndcPos.x + 1.0) * 0.5,  // [-1,1] -> [0,1]
              (1.0 - (ndcPos.y + 1.0) * 0.5)  // [-1,1] -> [1,0] (Y轴翻转)
          );
          
          // 转换到纹理像素坐标
          let texCoord = vec2<u32>(
              u32(uv.x * f32(params.texSize.x)),
              u32(uv.y * f32(params.texSize.y))
          );
          let outp= textureLoad(heightMap, texCoord, 0);
          var xyz=outp.xyz*outp.w;
          outputPoints[pointIndex] = Point(
                     vec4<f32>(xyz,1.0),
                  );        
      }
      `;
    this.computePipelineIDW = Engine.instance.device.createComputePipeline({
      label: "ComputePipelineIDW",
      layout: "auto",
      compute: {
        module: Engine.instance.device.createShaderModule({
          code: IDWShader,
        }),
        entryPoint: "main",
      },
    });
    this.IDWBindGroup = Engine.instance.device.createBindGroup({
      layout: this.computePipelineIDW.getBindGroupLayout(0),
      entries: [
        {
          binding: 0,
          resource: this.positionTexture.createView(),
        },
        {
          binding: 1,
          resource: {
            buffer: this.topCamera,
          },
        },
        {
          binding: 2,
          resource: {
            buffer: inputBuffer,
          },
        },
        {
          binding: 3,
          resource: {
            buffer: this.outputBuffer,
          },
        },
        {
          binding: 4,
          resource: {
            buffer: this.paramsBuffer,
          },
        },
      ],
    });
    // 计算插值数量相关的
    //   this.WORKGROUP_SIZE = 256;
    //   this.IDWSegment=this.rawPosition[0].length/3-1
    //   this.IDWworkgroupCount = Math.ceil(this.IDWSegment/ this.WORKGROUP_SIZE);
    //   this.topCamera = Engine.instance.device.createBuffer({
    //     label: 'topCamera',
    //     size: (16 + 4) * 4,  // 16个矩阵元素 + 2个纹理尺寸
    //     usage: GPUBufferUsage.UNIFORM | GPUBufferUsage.COPY_SRC | GPUBufferUsage.COPY_DST
    //   });
    //   this.LineSegmentBuffer =  Engine.instance.device.createBuffer({
    //     label: 'LineSegmentBuffer',
    //     size:this.twoPoints.byteLength,
    //     usage: GPUBufferUsage.STORAGE | GPUBufferUsage.COPY_DST | GPUBufferUsage.COPY_SRC
    //   })
    //   Engine.instance.queue.writeBuffer(
    //       this.LineSegmentBuffer,
    //       0,
    //       this.twoPoints
    //   );
    //   this.sampleCountsBuffer =  Engine.instance.device.createBuffer({
    //     label: 'sampleCountsBuffer',
    //     size:this.IDWSegment*4,
    //     usage: GPUBufferUsage.STORAGE | GPUBufferUsage.COPY_DST | GPUBufferUsage.COPY_SRC
    //   })
    //   const IDWcountShader=`
    //     struct Camera {
    //         viewProj: mat4x4<f32>,    // 顶视图的视图投影矩阵
    //         texSize: vec2<f32>,       // 深度图尺寸
    //     }
    //     struct LineSegment {
    //         start: vec4<f32>,
    //         end: vec4<f32>
    //     }
    //     // 计算线段上的深度变化
    //     fn computeDepthVariation(startPixel: vec2<f32>, endPixel: vec2<f32>, sampleCount: u32) -> f32 {
    //         let step = (endPixel - startPixel) / f32(sampleCount - 1);
    //         var maxDiff: f32 = 0.0;
    //         var prevDepth = textureLoad(depthMap, vec2<u32>(startPixel), 0).z;
    //         for (var i: u32 = 1; i < sampleCount; i = i + 1) {
    //             let currentPos = startPixel + step * f32(i);
    //             // 确保采样点在纹理范围内
    //             if (any(currentPos < vec2(0.0)) || any(currentPos >= topCamera.texSize)) {
    //                 continue;
    //             }
    //             let currentDepth = textureLoad(depthMap, vec2<u32>(currentPos), 0).z;
    //             let diff = abs(currentDepth - prevDepth);
    //             maxDiff = max(maxDiff, diff);
    //             prevDepth = currentDepth;
    //         }
    //         return maxDiff;
    //     }
    //   @group(0) @binding(0) var depthMap : texture_2d<f32>;
    //   @group(0) @binding(1) var<uniform> topCamera: Camera;
    //   @group(0) @binding(2) var<storage, read> inputSegments: array<LineSegment>;
    //   @group(0) @binding(3) var<storage, read_write> outputSampleCounts: array<u32>;
    //   @compute @workgroup_size(256)
    //   fn main( @builtin(global_invocation_id) global_id: vec3<u32>){
    //     let segmentIndex = global_id.x;
    //     if (segmentIndex >= arrayLength(&inputSegments)) {
    //       return;
    //     }
    //     let segment = inputSegments[segmentIndex];
    //     // 转换世界坐标到深度图空间
    //     let startClip = topCamera.viewProj * segment.start;
    //     let endClip = topCamera.viewProj * segment.end;
    //      // 转换到像素坐标
    //     let startPixel = vec2<f32>(
    //         (startClip.x + 1.0) * topCamera.texSize.x * 0.5,
    //         (1.0 - startClip.y) * topCamera.texSize.y * 0.5
    //     );
    //     let endPixel = vec2<f32>(
    //         (endClip.x + 1.0) * topCamera.texSize.x * 0.5,
    //         (1.0 - endClip.y) * topCamera.texSize.y * 0.5
    //     );
    //     // 计算线段长度（像素单位）
    //     let pixelDistance = distance(startPixel, endPixel);
    //     // 进行初步采样计算深度变化
    //     let initialSamples = 10u;  // 初始采样点数
    //     let depthVariation = computeDepthVariation(startPixel, endPixel, initialSamples);
    //     // 计算最终采样数量
    //     let baseCount = 100u;  // 基础采样数
    //     var finalCount = baseCount;
    //     // 根据深度变化调整采样数
    //     if (depthVariation > 0.01) {  // 深度变化阈值
    //         finalCount = u32(f32(baseCount) * (1.0 + depthVariation * 10.0));
    //     }
    //      // 根据线段长度调整
    //     let lengthFactor = clamp(pixelDistance / 100.0, 0.5, 2.0);
    //     finalCount = u32(f32(finalCount) * lengthFactor);
    //      // 限制最终采样数量范围
    //     finalCount = clamp(finalCount, 10u, 1000u);
    //     // 存储结果
    //     outputSampleCounts[segmentIndex] = finalCount;
    //   }
    //   `
    //   this.computePipelineIDWCount = Engine.instance.device.createComputePipeline({
    //     label: 'ComputePipelineIDWCount',
    //     layout: "auto",
    //     compute: {
    //       module: Engine.instance.device.createShaderModule({
    //         code:IDWcountShader
    //       }),
    //       entryPoint: 'main',
    //     },
    //   });
    //   this.IDWCountBindGroup = Engine.instance.device.createBindGroup({
    //     layout: this.computePipelineIDWCount.getBindGroupLayout(0),
    //     entries: [
    //       {
    //         binding: 0,
    //         resource: this.positionTexture.createView(),
    //       },
    //       {
    //         binding: 1,
    //         resource: {
    //           buffer: this.topCamera ,
    //         },
    //       },
    //       {
    //         binding: 2,
    //         resource: {
    //           buffer: this.LineSegmentBuffer ,
    //         },
    //       },
    //       {
    //         binding: 3,
    //         resource: {
    //           buffer: this.sampleCountsBuffer,
    //         },
    //       },
    //     ],
    //   });
  }
  createLineSegment() {
    const lineSegmentData = [];
    for (let i = 0; i < this.rawPosition[0].length; i += 3) {
      const x = this.rawPosition[0][i];
      const y = this.rawPosition[0][i + 1];
      const z = this.rawPosition[0][i + 2];
      // const x1 = this.rawPosition[0][i + 3];
      // const y1 = this.rawPosition[0][i + 4];
      // const z1 = this.rawPosition[0][i + 5];
      lineSegmentData.push(x, y, z, 1.0);
    }
    return new Float32Array(lineSegmentData);
  }
  async update() {
    // 相机矩阵
    const camera = Engine.instance.scene.getCamera();
    const eye = vec3.fromValues(camera.at.x, camera.at.y, camera.distance);
    const up = vec3.fromValues(0, 1, 0);
    const target = vec3.fromValues(camera.at.x, camera.at.y, camera.at.z);
    this.viewMatrix = mat4.lookAt(eye, target, up);
    this.projMatrix = camera.project.elements;
    this.viewProjMatrix = mat4.multiply(this.projMatrix, this.viewMatrix);
    let cameraParam = camera.getParam("camera");
    this.rowCameraData = new Float32Array([
      ...camera.vp.elements,
      ...camera.vpInvert.elements,
      ...camera.view.elements,
      ...camera.project.elements,
      ...camera.rotation.elements,
      ...camera.trans.matrixWorld.elements,
      camera.trans.position.x,
      camera.trans.position.y,
      camera.trans.position.z,
      camera.distance,
      ...camera.size,
      camera.near,
      camera.far,
      ...camera.at,
    ]);
    this.newCameraData = new Float32Array([
      ...this.viewProjMatrix,
      ...camera.vpInvert.elements,
      ...this.viewMatrix,
      ...this.projMatrix,
      ...camera.rotation.elements,
      ...camera.trans.matrixWorld.elements,
      ...eye,
      camera.distance,
      ...camera.size,
      camera.near,
      camera.far,
      ...target,
    ]);
    // 更新相机矩阵信息渲染深度图
    cameraParam.buffer(this.newCameraData);
    const meshRednerCom = Register.instance
      .manager(ComponentType.MeshRender)
      .get("x");
    const bundles = meshRednerCom.bundles;
    let commandEncoder = Engine.instance.device.createCommandEncoder();
    this.renderPassDescriptor = this.getDescript();
    const renderPass = commandEncoder.beginRenderPass(
      this.renderPassDescriptor,
    );
    renderPass.executeBundles(bundles);
    renderPass.end();
    let cbf = commandEncoder.finish();
    Engine.instance.queue.submit([cbf]);
    this.restore();

    const topCameraData = new Float32Array([
      ...this.viewProjMatrix,
      ...Engine.instance.size
    ]);
    console.log(topCameraData,"topCameraData");
    Engine.instance.queue.writeBuffer(this.topCamera, 0, topCameraData);
    const IDwCommandEncoder = Engine.instance.device.createCommandEncoder();
    const computePass = IDwCommandEncoder.beginComputePass();
    computePass.setPipeline(this.computePipelineIDW);
    computePass.setBindGroup(0, this.IDWBindGroup);
    const workgroupCount = Math.ceil((this.pointsCount - 1) / 256);
    computePass.dispatchWorkgroups(workgroupCount);
    computePass.end();
    const outputBuffer1 = engine.device.createBuffer({
      size: this.outputBuffer.size,
      usage: GPUBufferUsage.MAP_READ | GPUBufferUsage.COPY_DST,
    });
    IDwCommandEncoder.copyBufferToBuffer(
      this.outputBuffer,
      0,
      outputBuffer1,
      0,
      this.outputBuffer.size,
    );
    Engine.instance.device.queue.submit([IDwCommandEncoder.finish()]);
    await outputBuffer1.mapAsync(GPUMapMode.READ);
    const out = new Float32Array(outputBuffer1.getMappedRange());
    console.log(out);
    // // 获取插值数量
    //  const topCameraData=new Float32Array([...this.viewProjMatrix,...camera.size])
    //  Engine.instance.queue.writeBuffer(
    //      this.topCamera,
    //      0,
    //      topCameraData
    //  );
    //
    //  const IDwCommandEncoder = Engine.instance.device.createCommandEncoder();
    //  const IDWcomputePass = IDwCommandEncoder.beginComputePass();
    //  IDWcomputePass.setPipeline(this.computePipelineIDWCount);
    //  IDWcomputePass.setBindGroup(0, this.IDWCountBindGroup);
    //  IDWcomputePass.dispatchWorkgroups(this.IDWworkgroupCount);
    //  IDWcomputePass.end();
    //  const outputBuffer = engine.device.createBuffer({
    //    size: this.sampleCountsBuffer.size,
    //    usage: GPUBufferUsage.MAP_READ | GPUBufferUsage.COPY_DST,
    //  });
    //  IDwCommandEncoder.copyBufferToBuffer(this.sampleCountsBuffer, 0, outputBuffer, 0,  this.sampleCountsBuffer.size);
    //  const commands = IDwCommandEncoder.finish({ label: "IDW" });
    //  Engine.instance.queue.submit([commands]);
    //  await outputBuffer.mapAsync(GPUMapMode.READ);
    //  const out = new Uint32Array(outputBuffer.getMappedRange());
    //  console.log(out);
  }
  updateIDWCount() {}
}
export { LineUtile };
