import { ITensorAllocator } from "../../Core/Backends/ITensorAllocator"
import { TensorCachingAllocator } from "../../Core/Backends/TensorCachingAllocator"
import { IWorker, WorkerFactory, WorkerType } from "../../Core/Backends/Worker"
import NNModel from "../../Core/Internals/NNModel"
import { Model } from "../../Core/Model"
import { Tensor } from "../../Core/Tensor"
import Macro from "../../Macro"
import { ActionSpec } from "../Actuators/ActionSpec"
import { ActionBuffers } from "../Actuators/IActionReceiver"
import { AgentInfo } from "../Agent"
import { InferenceDevice } from "../Policies/BarracudaPolicy"
import { ISensor } from "../Sensors/ISensor"
import { SensorShapeValidator } from "../Sensors/SensorShapeValidator"
import { ModelExtensions } from "./ModelExtensions"
import { TensorApplier } from "./TensorApplier"
import { TensorGenerator } from "./TensorGenerator"
import { TensorProxy, TensorUtils } from "./TensorProxy"

export type AgentInfoSensorsPair = {
	agentInfo: AgentInfo,
	sensors: ISensor[]
}


export class ModelRunner {
	private _infos: AgentInfoSensorsPair[] = [];
	private _lastActionsReceived: { [key: number]: ActionBuffers } = {};
	private _orderedAgentsRequestingDecisions: number[] = [];

	private _tensorAllocator: ITensorAllocator;
	private _tensorGenerator: TensorGenerator;
	private _tensorApplier: TensorApplier;

	private _model: NNModel;
	private _modelName: string;
	private _inferenceDevice: InferenceDevice;
	private _engine: IWorker | null = null;
	private _verbose = false;
	private _deterministicInference: boolean;
	private _outputNames: string[];
	private _inferenceInputs: TensorProxy[];
	private _inferenceOutputs: TensorProxy[];
	private _inputsByName: { [key: string]: Tensor };
	private _memories: { [key: number]: number[] } = {};

	private _sensorShapeValidator = new SensorShapeValidator();

	private _observationsInitialized = false;

	constructor(
		model: NNModel,
		actionSpec: ActionSpec,
		inferenceDevice: InferenceDevice,
		seed = 0,
		deterministicInference = false
	) {
		let barracudaModel: Model | null = null;
		this._model = model;
		this._modelName = model?.name;
		this._inferenceDevice = inferenceDevice;
		this._deterministicInference = deterministicInference;
		this._tensorAllocator = new TensorCachingAllocator();
		if (model != null) {
			// #ifdef BARRACUDA_VERBOSE
			// this._verbose = true;
			// #endif

			// D.logEnabled = this._verbose;
			
			// barracudaModel = ModelLoader.load(model);
			barracudaModel = new Model();

			// const failedCheck = BarracudaModelParamLoader.CheckModelVersion(barracudaModel);
			// if (failedCheck != null) {
			// 	if (failedCheck.CheckType == BarracudaModelParamLoader.FailedCheck.CheckTypeEnum.Error) {
			// 		throw new UnityAgentsException(failedCheck.Message);
			// 	}
			// }

			let executionDevice: WorkerType
			switch (inferenceDevice) {
				case InferenceDevice.CPU:
					executionDevice = WorkerType.TS;
					break;
				case InferenceDevice.GPU:
					executionDevice = WorkerType.Compute;
					break;
				case InferenceDevice.Default:
				default:
					executionDevice = WorkerType.Compute;
					break;
			}
			this._engine = WorkerFactory.createWorker(executionDevice, barracudaModel, model.bytes, this._verbose);
		} else {
			barracudaModel = null;
			this._engine = null;
		}

		this._inferenceInputs = ModelExtensions.getInputTensors(barracudaModel!);
		this._outputNames = ModelExtensions.getOutputNames(barracudaModel!, this._deterministicInference);
		this._tensorGenerator = new TensorGenerator(
			seed, this._tensorAllocator, this._memories, barracudaModel!, this._deterministicInference);
		this._tensorApplier = new TensorApplier(
			actionSpec, seed, this._tensorAllocator, this._memories, barracudaModel, this._deterministicInference);
		this._inputsByName = {};
		this._inferenceOutputs = [];
	}

	get InferenceDevice(): InferenceDevice {
		return this._inferenceDevice;
	}

	get Model(): NNModel {
		return this._model;
	}

	private prepareBarracudaInputs(infInputs: TensorProxy[]): void {
		this._inputsByName = {};
		for (let i = 0; i < infInputs.length; i++) {
			const inp = infInputs[i];
			this._inputsByName[inp.name] = inp.data;
		}
	}

	public dispose(): void {
		if (this._engine != null)
			this._engine.dispose();
		this._tensorAllocator?.reset(false);
	}

	private fetchBarracudaOutputs(names: string[]): void {
		this._inferenceOutputs = [];
		for (const n of names) {
			const output = this._engine?.peekOutput(n);
			this._inferenceOutputs.push(TensorUtils.tensorProxyFromBarracuda(output!, n));
		}
	}


	/**
	 * 存储代理的观测数据并请求决策。
	 * @param info - 代理的信息。
	 * @param sensors - 捕捉代理观测的传感器列表。
	 */
	public putObservations(info: AgentInfo, sensors: ISensor[]): void {
		// 如果处于开发模式，则验证传感器形状
		if (Macro.CCC_DEV) {
			this._sensorShapeValidator.validateSensors(sensors);
		}

		// 存储代理信息和传感器
		this._infos.push({
			agentInfo: info,
			sensors: sensors
		});

		// 将代理添加到请求决策的代理列表中
		this._orderedAgentsRequestingDecisions.push(info.episodeId);

		// 初始化代理的最后接收到的动作
		if (!(info.episodeId in this._lastActionsReceived)) {
			this._lastActionsReceived[info.episodeId] = ActionBuffers.empty();
		}

		// 如果代理已完成，则从最后接收到的动作中移除该代理
		if (info.done) {
			delete this._lastActionsReceived[info.episodeId];
		}
	}


	/**
	 * 
	 * 从引擎中获取输出张量，并将其应用于代理。
	 * 
	 * 
	 * @returns 
	 */
	public decideBatch(): void {
		const currentBatchSize = this._infos.length;
		if (currentBatchSize === 0) {
			return;
		}
		if (!this._observationsInitialized) {
			const firstInfo = this._infos[0];
			this._tensorGenerator.initializeObservations(firstInfo.sensors, this._tensorAllocator);
			this._observationsInitialized = true;
		}

		//Prepare the input tensors to be feed into the engine
		this._tensorGenerator.generateTensors(this._inferenceInputs, currentBatchSize, this._infos);

		this.prepareBarracudaInputs(this._inferenceInputs);

		this._engine?.execute(this._inputsByName);

		this.fetchBarracudaOutputs(this._outputNames);

		//Apply the tensors to the agents
		this._tensorApplier.applyTensors(this._inferenceOutputs, this._orderedAgentsRequestingDecisions, this._lastActionsReceived);

		this._infos = [];
		this._orderedAgentsRequestingDecisions = [];
	}

	public hasModel(other: NNModel, otherInferenceDevice: InferenceDevice): boolean {
		return this._model === other && this._inferenceDevice === otherInferenceDevice;
	}

	public getAction(agentId: number): ActionBuffers {
		if (agentId in this._lastActionsReceived) {
			return this._lastActionsReceived[agentId];
		}
		return ActionBuffers.empty();
	}
}
