﻿

using System;

namespace FeedbackNetwork.network.layer
{
    public class FullConnectLayer: Layer
    {
        protected FloatTensor input_data;//batch*M
        private Random rn = new Random((int)DateTime.Now.Ticks);

        public FullConnectLayer(int input_size, int node_number) : base(input_size, node_number)
        {

        }
        public override FloatTensor Forward(FloatTensor input)
        {
            this.input_data = input;
            FloatTensor result = input.MatrixMul(this.weight);
            result = result.RowAdd(this.bias);
            return result;
        }

    
        /// <summary>
        /// 反向传播
        /// </summary>
        /// <param name="input">BxN, BxN</param>
        /// <returns>batchxM</returns>
        public override FloatTensor[] Backward(FloatTensor[] delta)
        {
            int B = this.input_data.GetDimensionShape(0);//Batch
            int M = this.weight.GetDimensionShape(0);
            int N = this.weight.GetDimensionShape(1);

            FloatTensor new_bias_delta = delta[1].MatrixMul(this.weight.Transpose());// BxM
            FloatTensor bias_delta = delta[1].ColSum();
            
            //BxM的传递给前一层的权重的梯度
            FloatTensor transport_delta = new FloatTensor(new int[] { B, M }, 0f);//BxM
            FloatTensor weight_delta = new FloatTensor(new int[] { M, N }, 0f);
            for (int i = 0; i < this.input_data.GetDimensionShape(0); i++)
            {
                FloatTensor row_input = this.input_data.GetDimensionData(0, i).Transpose();
                FloatTensor row_delta = delta[0].GetDimensionData(0, i);
                FloatTensor temp_delta = row_input.MatrixMul(row_delta); // MxN
                weight_delta = weight_delta.Add(temp_delta);
                transport_delta.SetDimensionData(0, i, temp_delta.RowSum().Transpose());
            }
            
            weight_delta = weight_delta.Mul(this.lr);
            bias_delta = bias_delta.Mul(this.lr);
            if (rn.Next(0, 100) < 10 )// 10%的随机梯度
            {
                weight_delta = weight_delta.Add(0.1f);
                bias_delta = bias_delta.Add(-0.1f);

            }
            this.weight = this.weight.Add(weight_delta);// 更新权重
            this.bias = this.bias.Add(bias_delta);// 更新bias

            return new FloatTensor[] {transport_delta, new_bias_delta };

        }

        public override string ToString()
        {
            String result = "全连接层， 节点数： " + this.weight.GetDimensionShape(1).ToString() + ", 输入：Bx"+ this.weight.GetDimensionShape(0).ToString() + "\n";
            result = result + "权重数据: \n";
            result = result + this.weight.ToString() + "\n";
            result = result + "偏置数据: \n";
            result = result + this.bias.ToString() + "\n";
            return result;
        }
    }
}
