pub mod activation;

use std::path::Path;

pub use activation::Activation;
use crate::{
    canvas::{Canvas, Color}, color, function, matrix::{Matrix, MatrixView, Vector}
};


#[derive(Debug, Clone)]
pub struct MLPClassifier {
    weights: Vec<Matrix>,
    biases: Vec<Matrix>,
    layers: Vec<Matrix>,

    layer_size: usize,

    activation: Activation,
}

impl MLPClassifier {
    pub fn new(archs: &[usize], activation: Activation) -> Self {
        const MIN: f64 = 0.;
        const MAX: f64 = 1.;

        assert!(archs.len() != 0);
        let layer_size = archs.len() - 1;

        let mut weights = vec![];
        let mut biases = vec![];
        let mut layers = vec![Matrix::zeros((archs[0], 1))];
        for (&arch, &last_arch) in archs.iter().skip(1).zip(archs.iter()) {
            weights.push(0.01 * Matrix::randoms((arch, last_arch), (MIN, MAX)));
            biases.push(0.01 * Matrix::randoms((arch, 1), (MIN, MAX)));
            layers.push(Matrix::zeros((arch, 1)));
        }

        Self {
            weights, biases, layers, layer_size, activation
        }
    }

    pub fn layer_neuron_size(&self, index: usize) -> usize {
        assert!(self.layers[index].col_size() == 1);
        self.layers[index].row_size()
    }

    pub fn train(&mut self, train_input: &[Vector], train_output: &[Vector], epochs: usize, learn_rate: f64) {
        for _ in 0..epochs {
            for (input, output) in Iterator::zip(train_input.iter(), train_output.iter()) {
                self.forward(input.col_view());
                self.backward(output.col_view(), learn_rate);
            }
        }       
    }

    pub fn predict(&mut self, input: &Vector) -> Vector {
        self.forward(input.col_view());
        self.layers.last().unwrap().clone().to_vector()
    }

    fn forward(&mut self, input: MatrixView) {
        assert_eq!(self.layers[0].size(), input.size());
        self.layers[0].view_mut().copy_from(input);

        for i in 0..self.layer_size {
            let (left, right) = self.layers.split_at_mut(i + 1);
            right[0].dot_by(&self.weights[i], &left[i]);
            right[0].sum_eq(&self.biases[i]);
            self.activation.actived(right[0].view_mut());
        }
    }

    fn backward(&mut self, output: MatrixView, learn_rate: f64) {
        assert_eq!(output.col_size(), 1);

        let y_predict = self.layers.last().unwrap().view();
        let mut gradient_neurons = output - y_predict;

        for i in (1..=self.layer_size).rev() {
            let gradient_bias = gradient_neurons * self.activation.dactive(self.layers[i].view()); 
            
            let gradient_weight = 
                gradient_bias.reshape_to_col_view().dot(self.layers[i-1].reshape_to_row_view());
            gradient_neurons = 
                gradient_bias.reshape_to_row_view().dot(self.weights[i-1].view()).reshape_to_col();
            
            self.weights[i - 1] += learn_rate * gradient_weight;
            self.biases[i - 1] += learn_rate * gradient_bias;
        }
    }
}

impl MLPClassifier {
    pub fn visualize_to<P: AsRef<Path>>(&self, path: P) -> Result<(), String> {
        const IMAGE_WIDTH: usize = 900;
        const IMAGE_HEIGHT: usize = 600;
        const BACK_COLOR: Color = color!(0x18, 0x18, 0x18);
        const NEURON_RADIUS: i32 = 25;
        const LAYER_BORDER_HPAD: i32 = 100;
        const LAYER_BORDER_VPAD: i32 = 50;
        
        let mut canvas = Canvas::new(IMAGE_WIDTH, IMAGE_HEIGHT);
        canvas.fill(BACK_COLOR);
        // canvas.rectangle(color, x, y, width, height);

        let mlp_width = canvas.width() as i32 - 2 * LAYER_BORDER_HPAD;
        let mlp_height = canvas.height() as i32 - 2 * LAYER_BORDER_VPAD;
        let layer_hpad = mlp_width / (self.layers.len() - 1) as i32;

        let neuron_positions = (0..self.layers.len()).map(|layer| -> Vec<(i32, i32)> {
            let neuron_size = self.layer_neuron_size(layer) as i32;
            let layer_vpad = mlp_height / neuron_size;
        
            let layer_x = LAYER_BORDER_HPAD + layer_hpad * layer as i32;
            let layer_y = LAYER_BORDER_VPAD;

            (0..neuron_size as i32).map(|i| {
                let cx = layer_x;
                let cy = layer_y + i * layer_vpad + layer_vpad / 2;
                (cx, cy)
            }).collect()
        }).collect::<Vec<_>>();

        for (layer, (layer_neurons, next_layer_neurons)) in 
            neuron_positions.iter().zip(neuron_positions.iter().skip(1)).enumerate() 
        {
            // From each layer to all next layer
            for (neuron_index, neuron) in layer_neurons.iter().enumerate() {
                for (next_neuron_index, next_neuron) in next_layer_neurons.iter().enumerate() {
                    let color = {
                        let weight = self.weights[layer][(next_neuron_index, neuron_index)];
                        let red = (function::sigmoid(weight) * 255 as f64) as u8;
                        let green = ((1. - function::sigmoid(weight)) * 255 as f64) as u8;
                        color!(red, green, 127)
                    };
                    canvas.line(color, neuron.0, neuron.1, next_neuron.0, next_neuron.1);
                }
            }
        }

        for (layer, neurons) in neuron_positions.iter().enumerate() {
            for (index, neuron) in neurons.iter().enumerate() {
                let color = if layer == 0 {
                    Color::light_gray()
                } else {
                    let bias = self.biases[layer - 1][(index, 0)];
                    let blue = (function::sigmoid(bias) * 255 as f64) as u8;
                    let red = ((1. - function::sigmoid(bias)) * 255 as f64) as u8;
                    color!(red, 127, blue)
                };
                canvas.circle(color, neuron.0, neuron.1, NEURON_RADIUS);
            }
        }
        
        canvas.save_png(path)?;
        Ok(())
    }
}