using System.Collections;
using System.Collections.Generic;
using UnityEngine;

namespace JufGame
{
	public class Optimizer : MonoBehaviour
	{
		public ComputeShader optimizer;
		public WeightBiasMemory memory;
		public bool isTraining;
		public float targetError = 0.001f;
		public float curError;
		public int batch;
		public float learningRate = 0.1f;
		public float[] input;
		public float[] target;
		private MLP trainNet;
		private LossFunc lastLayer;
		private int kernelUpadate;
		private int threadGroupsX, threadGroupsY;
		private float[] lossArr;

		private void Start()
		{
			curError = float.MaxValue;
			trainNet = new MLP(memory, batch);
			lastLayer = new LossFunc(memory.lossFunc, batch, memory.WeiBiasArray[^1].outputCount);
			lossArr = new float[lastLayer.lossBuffer.count];
			kernelUpadate = optimizer.FindKernel("Update");
			optimizer.SetInt("batch", batch);
			optimizer.SetFloat("learningRate", learningRate);
			threadGroupsX = Mathf.CeilToInt(memory.WeiBiasArray[^1].outputCount / 8.0f);
			threadGroupsY = Mathf.CeilToInt(batch / 8.0f);
		}

		private void Update()
		{
			if(isTraining && curError > targetError)
			{
				curError = 0;
				ComputeGradient(input, target);
				lastLayer.lossBuffer.GetData(lossArr);
				foreach(var l in lossArr)
				{
					curError += l;
				}
				curError /= lossArr.Length;
				UpdateWeightAndBias();
			}
			else
			{
				memory.isFinishedWeightAndBias = true;
				trainNet.Predict(input);
				var res = trainNet.outputData;
				foreach(var v in res)
				{
					Debug.Log(v);
				}
			}
		}

		private void UpdateWeightAndBias()
		{
			foreach(var h in trainNet.hiddenLayers)
			{
				if(h is Affine)
				{
					var curLayer = h as Affine;
					optimizer.SetInt("inputCount", curLayer.dWeights.Length);
					optimizer.SetBuffer(kernelUpadate, "gradsBuffer", curLayer.dWeightBuffer);
					optimizer.SetBuffer(kernelUpadate, "updatedBuffer", curLayer.weightBuffer);
					optimizer.Dispatch(kernelUpadate, threadGroupsX, threadGroupsY, 1);
					curLayer.weightBuffer.GetData(curLayer.weights);

					optimizer.SetInt("inputCount", curLayer.dBias.Length);
					optimizer.SetBuffer(kernelUpadate, "gradsBuffer", curLayer.dBiasBuffer);
					optimizer.SetBuffer(kernelUpadate, "updatedBuffer", curLayer.biasBuffer);
					optimizer.Dispatch(kernelUpadate, threadGroupsX, threadGroupsY, 1);
					curLayer.biasBuffer.GetData(curLayer.bias);
				}
			}
		}

		private void ComputeLoss(float[] input, float[] target)
		{
			trainNet.Predict(input);
			lastLayer.SetTarget(target);
			lastLayer.Forward(trainNet.hiddenLayers[^1].outputBuffer);
		}

		private void ComputeGradient(float[] input, float[] target)
		{
			ComputeLoss(input, target);
			lastLayer.SetTarget(target);
			lastLayer.Backward(null);
			var dout = lastLayer.dInputBuffer;
			for(int i = trainNet.hiddenLayers.Length - 1; i > -1; --i)
			{
				trainNet.hiddenLayers[i].Backward(dout);
				dout = trainNet.hiddenLayers[i].dInputBuffer;
			}
		}

		public void Release()
		{
			trainNet?.Release();
			lastLayer?.Release();
		}

		private void OnDestroy()
		{
			Release();
		}
		private void OnDisable()
		{
			Release();
		}
	}
}
