

import { BinaryReader } from "../Runtime/Utils/BinaryReader";
import NNModel from "./Internals/NNModel";
import { ActivationType, Layer, LayerType, Model } from "./Model";
import { TensorShape } from "./Tensor";
import { TensorExtension } from "./Backends/TensorExtension";
import { log } from "cc";

export class ModelLoader {

    static load(model: NNModel, verbose = false): Model {
        const result = ModelLoader.loadAsync(model.bytes, verbose);
        return result;
    }

    

    static loadAsync(bytes: ArrayBuffer, verbose: boolean = true): Model {
        const file = new BinaryReader(bytes);
        const model = new Model();
        try {

            //version
            const version = file.readInt64();
            if (version != BigInt(Model.Version).valueOf() &&
                version != BigInt(Model.LastVersionWithout8DSupport).valueOf() &&
                version != BigInt(Model.LastVersionWithoutWeightsAlignmentSupport).valueOf()) {
                throw new Error(`Model version mismatch. Expected: ${Model.Version}, but got: ${version}`);
            }


            //inputs
            let count = file.readInt32();
            model.inputs = [];
            for (let i = 0; i < count; i++) {
                const name = ModelLoader.readString(file);
                const shape = ModelLoader.readInt32Array(file);
                model.inputs.push({
                    name: name,
                    shape: shape
                });
            }
            //outputs
            model.outputs = ModelLoader.readStringArray(file);

            //memories
            count = file.readInt32();
            model.memories = [];
            for (let i = 0; i < count; i++) {
                const input = ModelLoader.readString(file);
                const shape = ModelLoader.readInt32Array(file);
                const output = ModelLoader.readString(file);
                model.memories.push({
                    input: input,
                    shape: TensorShape.createWithArray(shape),
                    output: output
                });
            }

            //layers
            count = file.readInt32();
            model.layers = new Array(count);
            for (let i = 0; i < count; i++) {
                const name = ModelLoader.readString(file);
                const layerType = file.readInt32() as LayerType;
                const activation = file.readInt32() as ActivationType;
                const layer: Layer = new Layer(
                    name,
                    layerType,
                    activation
                );
                ModelLoader.readInt32Array(file); //dummy
                ModelLoader.readInt32Array(file); //dummy
                layer.pad = ModelLoader.readInt32Array(file);
                layer.stride = ModelLoader.readInt32Array(file);
                layer.pool = ModelLoader.readInt32Array(file);
                layer.axis = ModelLoader.convertLayerAxisFor8DShapeSupportIfNeeded(file.readInt32(), Number(version), layerType);
                layer.alpha = file.readFloat32();
                layer.beta = file.readFloat32();
                ModelLoader.readInt32Array(file); //dummy
                layer.inputs = ModelLoader.readStringArray(file);

                const datasetsLength = file.readInt32();
                layer.datasets = new Array(datasetsLength);
                for (let j = 0; j < datasetsLength; j++) {
                    const datasetName = ModelLoader.readString(file);
                    const datasetShape = TensorShape.createWithArray(ModelLoader.readInt32Array(file));
                    const offset = file.readInt64();
                    const itemSizeInBytes = file.readInt32();
                    const length = file.readInt32();
                    layer.datasets[j] = {
                        name: datasetName,
                        shape: datasetShape,
                        offset: Number(offset),
                        itemSizeInBytes: itemSizeInBytes,
                        length: length
                    };

                }

                model.layers[i] = layer;

                if (verbose) {
                    log(`layer ${i}, ${layer.name} type: ${layer.type} ${layer.activation !== ActivationType.None ? `activation ${layer.activation} ` : ''}tensors: ${layer.datasets.length} inputs: ${layer.inputs.join(',')}`);
                }

                if (verbose) {
                    layer.datasets.forEach((dataset, index) => {
                        log(`      Tensor:${dataset.shape} offset: ${dataset.offset} len: ${dataset.length} `);
                    });
                }

            }

            //skip weights
            let numWeightsToRead = 0;
            for (let l = 0; l < model.layers.length; l++) {
                for (let d = 0; d < model.layers[l].datasets.length; d++) {
                    numWeightsToRead += model.layers[l].datasets[d].length;
                }
            }

            let skipOffset = 4;
            if (version >= 20) {
                skipOffset = file.readInt32();
            }

            if (version >= 19) {
                file.offset = file.offset + skipOffset * numWeightsToRead;
            }

            //
            // model.IrSource = ModelLoader.readString(file);
            // model.IrVersion = ModelLoader.readString(file);
            // model.ProducerName = ModelLoader.readString(file);

            //todo warnings
        } catch (e) {
            log(e);
        }
        return model;
    }

    private static readString(file: BinaryReader): string {
        const length = file.readInt32();
        const buffer = new Uint8Array(file.readBytes(length));
        return new TextDecoder().decode(buffer);
    }

    private static readInt32Array(file: BinaryReader): number[] {
        const length = file.readInt32();
        const result = new Array(length);
        for (let i = 0; i < length; i++) {
            result[i] = file.readInt32();
        }
        return result;
    }

    private static readStringArray(file: BinaryReader): string[] {
        const length = file.readInt32();
        const result = new Array(length);
        for (let i = 0; i < length; i++) {
            result[i] = ModelLoader.readString(file);
        }
        return result;
    }

    static convertLayerAxisFor8DShapeSupportIfNeeded(axis: number, version: number, layerType: LayerType): number {
        if (version > Model.LastVersionWithout8DSupport) {
            return axis;
        }

        if (layerType == LayerType.ReduceL2 ||
            layerType == LayerType.ReduceLogSum ||
            layerType == LayerType.ReduceLogSumExp ||
            layerType == LayerType.ReduceMax ||
            layerType == LayerType.ReduceMean ||
            layerType == LayerType.ReduceMin ||
            layerType == LayerType.ReduceProd ||
            layerType == LayerType.ReduceSum ||
            layerType == LayerType.ReduceSumSquare ||
            layerType == LayerType.Gather ||
            layerType == LayerType.Concat
        ) {
            axis = TensorExtension.convert4DTo8DAxis(axis);
        }

        return axis;

    }
}