/* eslint-disable @typescript-eslint/no-unused-vars */
import { ITensorAllocator } from '../../Core/Backends/ITensorAllocator';
import { TensorShape } from '../../Core/Tensor';
import { ObservationWriter } from '../Sensors/ObservationWriter';
import { AgentInfoSensorsPair } from './ModelRunner';
import { IGenerator } from './TensorGenerator';
import { TensorProxy, TensorUtils } from './TensorProxy';
import { RandomNormal } from './Utils/RandomNormal';

/**
 * @en
 * Generates the Tensor corresponding to the BatchSize input : Will be a one dimensional
 * integer array of size 1 containing the batch size.
 *
 * @zh
 * 生成与 BatchSize 输入对应的 Tensor：将是一个大小为 1 的一维整数数组，包含批次大小。
 *
 */
export class BatchSizeGenerator implements IGenerator {
	private _allocator: ITensorAllocator;

	constructor(allocator: ITensorAllocator) {
		this._allocator = allocator;
	}

	generate(tensorProxy: TensorProxy, batchSize: number, infos: AgentInfoSensorsPair[]): void {
		tensorProxy.data = this._allocator.alloc(TensorShape.createWithArray([1, 1]));
		tensorProxy.data.setCache(0, batchSize);
	}
}

/**
 * @en
 * Reshapes a Tensor so that its first dimension becomes equal to the current batch size
 * and initializes its content to be zeros. Will only work on 2-dimensional tensors.
 * he second dimension of the Tensor will not be modified.
 *
 * @zh
 * 重新整形一个 Tensor，使其第一个维度等于当前批次大小，并将其内容初始化为零。仅适用于二维张量。
 * Tensor 的第二个维度将不会被修改。
 *
 */
export class BiDimensionalOutputGenerator implements IGenerator {
	private _allocator: ITensorAllocator;

	constructor(allocator: ITensorAllocator) {
		this._allocator = allocator;
	}

	generate(tensorProxy: TensorProxy, batchSize: number, infos: AgentInfoSensorsPair[]): void {
		TensorUtils.resizeTensor(tensorProxy, batchSize, this._allocator);
	}
}

export class SequenceLengthGenerator implements IGenerator {
	private _allocator: ITensorAllocator;

	constructor(allocator: ITensorAllocator) {
		this._allocator = allocator;
	}

	generate(tensorProxy: TensorProxy, batchSize: number, infos: AgentInfoSensorsPair[]): void {
		tensorProxy.shape = [];
		tensorProxy.data = this._allocator.alloc(TensorShape.createWithArray([1, 1]));
		tensorProxy.data.setCache(0, 1);
	}
}

export class RecurrentInputGenerator implements IGenerator {
	private _allocator: ITensorAllocator;
	private _memories: {[id:number]: number[]} = {};

	constructor(allocator: ITensorAllocator, memories: {[id:number]: number[]}) {
		this._allocator = allocator;
		this._memories = memories;
	}

	generate(tensorProxy: TensorProxy, batchSize: number, infos: AgentInfoSensorsPair[]): void {
		TensorUtils.resizeTensor(tensorProxy, batchSize, this._allocator);

		const memorySize = tensorProxy.data.width;

		let agentIndex = 0;
		for (let infoIndex = 0; infoIndex < infos.length; infoIndex++) {
			const infoSensorPair = infos[infoIndex];
			const info = infoSensorPair.agentInfo;
			let memory: number[] = [];

			if (info.done) {
				delete this._memories[info.episodeId];
			}
			memory = this._memories[info.episodeId];
			if (!memory) {
				for (let j = 0; j < memorySize; j++) {
					tensorProxy.data.setCache([agentIndex, 0, j, 0], 0);
				}
				agentIndex++;
				continue;
			}
			for (let j = 0; j < Math.min(memorySize, memory.length); j++) {
				if (j >= memory.length) {
					break;
				}
				tensorProxy.data.setCache([agentIndex, 0, j, 0], memory[j]);
			}
			agentIndex++;
		}
	}
}

export class PreviousActionInputGenerator implements IGenerator {
	private _allocator: ITensorAllocator;

	constructor(allocator: ITensorAllocator) {
		this._allocator = allocator;
	}

	generate(tensorProxy: TensorProxy, batchSize: number, infos: AgentInfoSensorsPair[]): void {
		TensorUtils.resizeTensor(tensorProxy, batchSize, this._allocator);

		const actionSize = tensorProxy.shape[tensorProxy.shape.length - 1];
		let agentIndex = 0;
		for (let infoIndex = 0; infoIndex < infos.length; infoIndex++) {
			const infoSensorPair = infos[infoIndex];
			const info = infoSensorPair.agentInfo;
			const pastAction = info.storedActions.discreteActions;
			if (!pastAction.isEmpty()) {
				for (let j = 0; j < actionSize; j++) {
					tensorProxy.data.setCache([agentIndex, j], pastAction.getValue(j));
				}
			}

			agentIndex++;
		}
	}
}

export class ActionMaskInputGenerator implements IGenerator {
	private _allocator: ITensorAllocator;

	constructor(allocator: ITensorAllocator) {
		this._allocator = allocator;
	}

	generate(tensorProxy: TensorProxy, batchSize: number, infos: AgentInfoSensorsPair[]): void {
		TensorUtils.resizeTensor(tensorProxy, batchSize, this._allocator);

		const maskSize = tensorProxy.shape[tensorProxy.shape.length - 1];
		let agentIndex = 0;
		for (let infoIndex = 0; infoIndex < infos.length; infoIndex++) {
			const infoSensorPair = infos[infoIndex];
			const agentInfo = infoSensorPair.agentInfo;
			const maskList = agentInfo.discreteActionMasks;
			for (let j = 0; j < maskSize; j++) {
				const isUnmasked = maskList != null && maskList[j] ? 0.0 : 1.0;
				tensorProxy.data.setCache([agentIndex, j], isUnmasked);
			}
			agentIndex++;
		}
	}
}

export class RandomNormalInputGenerator implements IGenerator {
	private readonly m_RandomNormal: RandomNormal;
	private readonly m_Allocator: ITensorAllocator;

	constructor(seed: number, allocator: ITensorAllocator) {
		this.m_RandomNormal = new RandomNormal(seed);
		this.m_Allocator = allocator;
	}

	generate(tensorProxy: TensorProxy, batchSize: number, infos: AgentInfoSensorsPair[]): void {
		TensorUtils.resizeTensor(tensorProxy, batchSize, this.m_Allocator);
		TensorUtils.fillTensorWithRandomNormal(tensorProxy, this.m_RandomNormal);
	}
}

export class ObservationGenerator implements IGenerator {
	private readonly m_Allocator: ITensorAllocator;
	private m_SensorIndices: number[] = [];
	private m_ObservationWriter: ObservationWriter = new ObservationWriter();

	constructor(allocator: ITensorAllocator) {
		this.m_Allocator = allocator;
	}

	public addSensorIndex(sensorIndex: number): void {
		this.m_SensorIndices.push(sensorIndex);
	}

	public generate(tensorProxy: TensorProxy, batchSize: number, infos: AgentInfoSensorsPair[]): void {
		TensorUtils.resizeTensor(tensorProxy, batchSize, this.m_Allocator);
		let agentIndex = 0;
		for (let infoIndex = 0; infoIndex < infos.length; infoIndex++) {
			const info = infos[infoIndex];
			if (info.agentInfo.done) {
				// If the agent is done, we might have a stale reference to the sensors
				// e.g. a dependent object might have been disposed.
				// To avoid this, just fill observation with zeroes instead of calling sensor.Write.
				TensorUtils.fillTensorBatch(tensorProxy, agentIndex, 0.0);
			} else {
				let tensorOffset = 0;
				// Write each sensor consecutively to the tensor
				for (let sensorIndexIndex = 0; sensorIndexIndex < this.m_SensorIndices.length; sensorIndexIndex++) {
					const sensorIndex = this.m_SensorIndices[sensorIndexIndex];
					const sensor = info.sensors[sensorIndex];
					this.m_ObservationWriter.setTarget(tensorProxy, agentIndex, tensorOffset);
					const numWritten = sensor.write(this.m_ObservationWriter);
					tensorOffset += numWritten;
				}
			}
			agentIndex++;
		}
	}
}
