﻿using System;
using System.Collections.Generic;

namespace Conv
{
    public class FeatureMap: Layer
    {
        //This constructor using for subSampling FeatureMap builting; May be better to move it to other class
        public FeatureMap(Layer FM, int resampleRatio, Func<double, double> activationFunc, Func<double, double> activationFuncDerivative)
        {
            Width = FM.Width / resampleRatio;
            Height = FM.Height / resampleRatio;
            //todo make single Msk foreach Neuron
            //MaskSubsampling:Mask - Mask with single weight
            Msk = new Mask(1,1);
            Msk.Weights[0, 0].Value = 1.0;
            //Msk.Weights = new Weight[resampleRatio,resampleRatio];
            /*for (int i = 0; i < resampleRatio; i++)
            {
                for (int j = 0; j < resampleRatio; j++)
                {
                    Msk.Weights[i, j] = SingleWeight;
                }
            }*/
            Neurons = new Neuron[Width, Height];
            for (int i = 0; i < Width; i++)
            {
                for (int j = 0; j < Height; j++)
                {
                    Neurons[i, j] = new Neuron(){ActivationFunc = activationFunc,ActivationFuncDerivative = activationFuncDerivative};
                    ConnectNeuronToFMap(FM, Neurons[i, j], resampleRatio,Msk.Weights[0,0],Msk.Biass, i, j);
                    //ConnectNeuronToFMap(FM, Neurons[i, j], Msk, i, j);
                }
            }
        }
        private void ConnectNeuronToFMap(Layer fm, Neuron neuron, int resampleRatio,Weight weight, Weight biass, int left, int up)
        {
            for (int i = 0; i < resampleRatio; i++)
            {
                for (int j = 0; j < resampleRatio; j++)
                {
                    neuron.Inputs.Add(new Connection() { From = fm.Neurons[left + i, up + j], To = neuron, Weight = weight });
                }
            }

            //add biass
            neuron.Inputs.Add(new Connection() { From = BiasNeuron, To = neuron, Weight = biass });

        }

        //This constructor using for convolutional FeatureMap builting
        public FeatureMap(List<Layer> fmList, int maskWidth, int maskHeight, Func<double, double> activationFunc, Func<double, double> activationFuncDerivative)
        {
            //setting FM W and H from previous
            Width = fmList[0].Width - (maskWidth - 1);
            Height = fmList[0].Height - (maskHeight - 1);
            /*this.Width = fmList[0].Width;
            this.Height = fmList[0].Height;*/
            Neurons = new Neuron[Width,Height];
            Msk = new Mask(maskWidth, maskHeight);
            for (int i = 0; i < Width; i++)
            {
                for (int j = 0; j < Height; j++)
                {
                    Neurons[i, j] = new Neuron()
                                        {ActivationFunc = activationFunc, ActivationFuncDerivative = activationFuncDerivative};

                }
            }
            foreach (var featureMap in fmList)
            {
                
                for (int i = 0; i < Width; i++)
                    for (int j = 0; j < Height; j++)
                        ConnectNeuronToFMap(featureMap, Neurons[i, j], Msk, i, j);
            }
        }

        private void ConnectNeuronToFMap(Layer fm, Neuron neuron, Mask weights, int left, int up)
        {
            for (int i = 0; i < weights.Width; i++)
            {
                for (int j = 0; j < weights.Height; j++)
                {
                    neuron.Inputs.Add(new Connection() { From = fm.Neurons[left + i, up + j], To = neuron, Weight = weights.Weights[i, j] });
                }
            }

            //add biass
            neuron.Inputs.Add(new Connection(){From = BiasNeuron,To = neuron, Weight = weights.Biass});

        }
        //this Old constructor using for convolutional FeatureMap builting
        public FeatureMap(Layer previous, Mask msk, int width, int height, Func<double, double> activationFunc, Func<double, double> activationFuncDerivative)
        {
            //todo add Mask here or to Layer
            Width = width;
            Height = height;
            Msk = msk;
            Neurons = new Neuron[Width, Height];
            //here should be initialization of neuron inputs with shared weight
            // [i,j] - left top point of window
            for (int i = 0; i < width; i++)
            {
                for (int j = 0; j < height; j++)
                {
                    //old shit
                    //Neurons[i, j] = new Neuron(Msk, GetInputsByIndex(previous, msk, i, i), activationFunc, activationFuncDerivative);
                    Neurons[i, j] = new Neuron() { ActivationFunc = activationFunc, ActivationFuncDerivative = activationFuncDerivative };
                    ConnectNeuronToFMap(previous, Neurons[i, j], Msk, i, j);
                }
            }
        }
        
        //old shit
        private static Neuron[,] GetInputsByIndex(Layer previous, Mask msk,int i,int j)
        {
            var inputs = new Neuron[msk.Width,msk.Height];;
            for (int k = 0; k < msk.Width; k++)
            {
                for (int l = 0; l < msk.Height; l++)
                {
                    inputs[k, l] = previous.Neurons[i + k, j + l];
                }
            }
            return inputs;
        }
    }
}