use crate::Var;
use crate::var;
use super::active::Activation;

use rand::{self, Rng};

/*

    x0 --+
         |   +-----+
         +-->|     |
    x1 ----->| +b  |------>()
         +-->|     |
         |   +-----+
    x2 --+

*/

#[derive(Debug)]
pub struct Neural {
    weights: Vec<Var>,
    bias: Var,
    activation: Option<Activation>,
}

impl Neural {
    pub fn new(input_size: usize, activation: Option<Activation>) -> Self {
        let mut rng = rand::rng();
        let weights = (0..input_size).into_iter().map(|_| var!(rng.random_range(0f64..1f64))).collect();
        let bias = var!(rng.random_range(0f64..1f64));
        Self { weights, bias, activation }
    }

    pub fn forward(&self, xs: &[Var]) -> Var {
        assert_eq!(xs.len(), self.weights.len());
        let res = xs.iter().zip(self.weights.iter())
            .map(|(x, w)| -> Var { x * w } )
            .sum::<Var>() + 
            &self.bias;

        match self.activation {
            Some(activation) => activation.forward(&res),
            None => res
        }
    }

    pub fn parameters(&self) -> impl Iterator<Item = &Var> {
        self.weights.iter().chain([&self.bias].into_iter())
    }
}

#[cfg(test)]
mod test {
    #[allow(unused)]
    use super::*;

    #[test]
    fn test_forward() {
        let neural = Neural::new(2, None);
        let xs = [var!(0), var!(0)];
        let y = neural.forward(&xs);
        y.backward();
        // println!("{}");
    }

    #[test]
    fn test_train() {
        let xs = [var!(0.5), var!(0.5)];
        let target = var!(0.67);

        let neural = Neural::new(2, Some(Activation::Tanh));
        // y = w0 * x1 + w0 + x2 + b

        for t in 0..5000 {
            let y = neural.forward(&xs);
            let loss = (&y - &target).pow(2.);
            println!("{t}, y = {}, loss = {}", y.data(), loss.data());

            for param in neural.parameters() {
                param.zero_gard();
            }

            loss.backward();

            for param in neural.parameters() {
                param.increase_data(-0.001 * param.gard());
            }
        }
    }
}
