/* eslint-disable @typescript-eslint/no-explicit-any */
import { RandomNormal } from "./Utils/RandomNormal";
import { Tensor, TensorShape } from "../../Core/Tensor";
import { ITensorAllocator } from "../../Core/Backends/ITensorAllocator";

export enum TensorType {
    Integer,
    FloatingPoint,
}


const TensorDataType = {
    [TensorType.Integer]: "int32",
    [TensorType.FloatingPoint]: "float32",
}

export class TensorProxy {

    private _name: string;
    private _valueType: TensorType;
    private _dataType: string = TensorDataType[TensorType.FloatingPoint];
    private _shape: number[];
    private _data: Tensor = null!;

    constructor() {
        this._name = "";
        this._valueType = TensorType.FloatingPoint;
        this._shape = [];
    }

    get name(): string {
        return this._name;
    }

    set name(value: string) {
        this._name = value;
    }

    get valueType(): TensorType {
        return this._valueType;
    }

    set valueType(value: TensorType) {
        this._valueType = value;
        this._dataType = TensorDataType[value];
    }

    get dataType(): string {
        return this._dataType;
    }

    get shape(): number[] {
        return this._shape;
    }

    set shape(value: number[]) {
        this._shape = value;
    }

    get data(): Tensor {
        return this._data;
    }

    set data(value: Tensor) {
        this._data = value;
    }

    get height(): number {
        return this._shape.length === 4 ? this._shape[1] : this._shape[5];
    }

    get width(): number {
        return this._shape.length === 4 ? this._shape[2] : this._shape[6];
    }

    get channels(): number {
        return this._shape.length === 4 ? this._shape[3] : this._shape[7];
    }
}

export class TensorUtils {
    public static resizeTensor(tensor: TensorProxy, batch: number, allocator: ITensorAllocator): void {
        if (tensor.shape[0] === batch && tensor.data !== null && tensor.data.batch === batch) {
            return;
        }

        if (tensor.data !== null) {
            tensor.data = null!;
        }

        tensor.shape[0] = batch;

        if (tensor.shape.length === 4 || tensor.shape.length === 8) {
            const tensorShape = TensorShape.createWithArray([batch, tensor.height, tensor.width, tensor.channels]);
            tensor.data = allocator.alloc(tensorShape);
        } else {
            const tensorShape = TensorShape.createWithArray([batch, tensor.shape[tensor.shape.length - 1]]);
            tensor.data = allocator.alloc(tensorShape);
        }
    }

    public static tensorShapeFromBarracuda(src: TensorShape): number[] {
        if (src.height === 1 && src.width === 1) {
            return [src.batch, src.channels];
        }

        return [src.batch, src.height, src.width, src.channels];

    }

    public static tensorProxyFromBarracuda(src: Tensor, nameOverride: string | null = null): TensorProxy {
        const shape = TensorUtils.tensorShapeFromBarracuda(src.shape);

        const tensorProxy = new TensorProxy();
        tensorProxy.name = nameOverride ?? src.name;
        tensorProxy.valueType = TensorType.FloatingPoint;
        tensorProxy.shape = shape;
        tensorProxy.data = src;

        return tensorProxy;
    }

    public static fillTensorBatch(tensorProxy: TensorProxy, batch: number, fillValue: number): void {
        const height = tensorProxy.data.height;
        const width = tensorProxy.data.width;
        const channels = tensorProxy.data.channels;

        for (let h = 0; h < height; h++) {
            for (let w = 0; w < width; w++) {
                for (let c = 0; c < channels; c++) {
                    tensorProxy.data.setCache([batch, h, w, c], fillValue);
                }
            }
        }
    }

    public static fillTensorWithRandomNormal(tensorProxy: TensorProxy, randomNormal: RandomNormal): void {
        if (tensorProxy.dataType === TensorDataType[TensorType.Integer]) {
            throw new Error("Only float data types are currently supported");
        }

        if (tensorProxy.data === null) {
            throw new Error("The Tensor is not allocated");
        }

        for (let i = 0; i < tensorProxy.data.length; i++) {
            tensorProxy.data.setCache(i, randomNormal.nextFloat());
        }
    }
}