package classifier

import (
	"errors"
	"fmt"
	"math"
	"math/rand"
	"perceptron/perceptron"
	"perceptron/perceptron/functions"

	"gonum.org/v1/gonum/blas/blas64"
	"gonum.org/v1/gonum/floats"
)

type Classifier struct {
	layers         []perceptron.Layer
	inputParamSize int
	iters          int
	learningRate   float64
	countClasses   int

	activation  func(float64) float64 // activation function
	dActivation func(float64) float64 // derivative of activation function
}

func (c *Classifier) Init(layers []uint, iters int, inputParamSize int, countClasses int, learningRate float64) error {
	if iters <= 0 || inputParamSize <= 0 || layers == nil || len(layers) == 0 || learningRate < 0 /*||  countClasses < 2 */ {
		return errors.New("invalid params")
	}
	c.layers = make([]perceptron.Layer, 0)
	c.inputParamSize = inputParamSize
	c.iters = iters
	c.learningRate = learningRate
	c.countClasses = countClasses

	c.activation = functions.Sigmoid
	c.dActivation = functions.SigmoidDerivative

	layers = append([]uint{uint(inputParamSize)}, layers...)
	layers = append(layers, uint(countClasses))

	// Инициализация слоев и их нейронов
	for iLayer, size := range layers {
		var neurons []perceptron.Neuron
		for i := uint(0); i < size; i++ {
			// Инициализация весов случайными значениями
			if iLayer == 0 {
				neurons = append(neurons, perceptron.Neuron{Weights: nil})
			} else {
				weights := make([]float64, layers[iLayer-1])
				for j := range weights {
					weights[j] = rand.Float64()*2 - 1.
				}
				neurons = append(neurons, perceptron.Neuron{Weights: weights})
			}
		}
		c.layers = append(c.layers, perceptron.Layer{
			Neurons: neurons,
		})
	}

	/* fmt.Println("слои")
	for i, l := range c.layers {
		fmt.Println("слой", i, "нейронов", len(l.Neurons))
	} */

	return nil
}

func (c *Classifier) Fit(x [][]float64, y []float64) {
	for i := 0; i < c.iters; i++ {
		//прямой проход
		c.Run(x[i%len(x)])
		//fmt.Println(i, a)

		//обратное распространение
		//yVector := make([]float64, c.countClasses)
		//yVector[int(y[i%len(y)])] = 1.0
		c.backPropagation(y[i%len(y)])

		c.changeWeight(x[i%len(x)])

		/* if i%1000 == 999 {
			c.learningRate *= 0.5
		} */

	}

}

func (c *Classifier) changeWeight(x []float64) {
	for iLayer, Layer := range c.layers {
		for _, Neuron := range Layer.Neurons {
			for iWeight, Weight := range Neuron.Weights {
				a := c.learningRate
				if iLayer == 0 {
					Neuron.Weights[iWeight] = Weight + a*Neuron.Err*c.dActivation(Neuron.Value)*x[iWeight] //* Neuron
				} else {
					Neuron.Weights[iWeight] = Weight + a*Neuron.Err*c.dActivation(Neuron.Value)*c.activation(c.layers[iLayer-1].Neurons[iWeight].Value)
				}
			}
		}

	}
}

func (c *Classifier) backPropagation(y float64) {
	for iNeuron := range c.layers[len(c.layers)-1].Neurons {

		if iNeuron == int(y) {
			c.layers[len(c.layers)-1].Neurons[iNeuron].Err = 1 - c.activation(c.layers[len(c.layers)-1].Neurons[iNeuron].Value)
		} else {
			c.layers[len(c.layers)-1].Neurons[iNeuron].Err = -c.activation(c.layers[len(c.layers)-1].Neurons[iNeuron].Value)
		}

	}

	for iLayer := len(c.layers) - 2; iLayer >= 0; iLayer-- {
		for iNeuron := range c.layers[iLayer].Neurons {
			c.layers[iLayer].Neurons[iNeuron].Err = 0
			for jNeuron := range c.layers[iLayer+1].Neurons {
				mErr := c.layers[iLayer+1].Neurons[jNeuron].Err * c.layers[iLayer+1].Neurons[jNeuron].Weights[iNeuron]
				c.layers[iLayer].Neurons[iNeuron].Err += mErr
			}
		}
	}
}

func (c *Classifier) Run(inputs []float64) []float64 {

	for i, layer := range c.layers {
		//fmt.Println("i", i, "inputs", inputs)
		if i == 0 {
			for i := range layer.Neurons {
				layer.Neurons[i].Value = inputs[i]
				//inputs[i] = c.activation(inputs[i])
			}
			continue
		}

		outputs := make([]float64, 0)

		inputsVector := blas64.Vector{
			N:    len(inputs),
			Data: inputs,
			Inc:  1,
		}
		for in, neuron := range layer.Neurons {
			weightsVector := blas64.Vector{
				N:    len(neuron.Weights),
				Data: neuron.Weights,
				Inc:  1,
			}

			dotProduct := blas64.Dot(inputsVector, weightsVector)
			layer.Neurons[in].Value = dotProduct

			outputs = append(outputs, c.activation(layer.Neurons[in].Value))

			if math.IsNaN(layer.Neurons[in].Value) {
				panic("NAN VALUE")
			}
			//fmt.Println("val", layer.Neurons[in].Value, "dot", dotProduct)
			//outputs = append(outputs, c.activation(layer.Neurons[in].Value))
		}
		inputs = outputs

	}
	return inputs
}

func (c *Classifier) Test(x [][]float64, y []float64, name []string) string {
	result := [][]float64{make([]float64, c.countClasses), make([]float64, c.countClasses)}

	for i, inp := range x {
		out := c.Run(inp)
		res := floats.MaxIdx(out)
		if res == int(y[i]) {
			result[0][res]++
		} else {
			result[1][res]++
		}

	}
	return fmt.Sprintln(name, "\ntrue", result[0], "\nfalse", result[1])
}
