use std::fmt::Debug;
use num_traits::Num;
use crate::autograd::function_ctx::FunctionCtx;
use crate::Tensor;

pub enum ForwardArgs<'a, T>{
    TensorTensor(&'a Tensor<T>, &'a Tensor<T>),
    TensorOther(&'a Tensor<T>, &'a [usize]),
}

impl<'a, T> ForwardArgs<'a, T>
    where
        T: Copy + Num + 'static,
{
    pub fn requires_grad(&self) -> bool {
        match self {
            ForwardArgs::TensorTensor(lhs, rhs) =>
                lhs.requires_grad() || rhs.requires_grad(),
            ForwardArgs::TensorOther(tensor, _) => tensor.requires_grad(),
        }
    }
}

pub trait Function<T> {
    fn new(ctx: FunctionCtx<T>) -> Self
        where
            Self: Sized;

    fn ctx(&self) -> &FunctionCtx<T>;

    fn forward(ctx: &mut FunctionCtx<T>, args: ForwardArgs<T>) -> Tensor<T>
        where
            T: Copy + Num + 'static,
            Self: Sized + 'static;

    fn backward(&self, grad_output: Tensor<T>) -> Vec<Option<Tensor<T>>>;

    fn apply(args: ForwardArgs<T>) -> Tensor<T>
        where
            T: Copy + Num + 'static,
            Self: Sized + 'static,
    {
        let forward_inputs = match args {
            ForwardArgs::TensorTensor(lhs, rhs) =>
                [lhs.clone(), rhs.clone()].into(),
            ForwardArgs::TensorOther(lhs, _) => [lhs.clone()].into(),
        };
        let requires_grad = args.requires_grad();

        let mut ctx = FunctionCtx::new(forward_inputs);
        let tensor = Self::forward(&mut ctx, args);

        {
            let mut data = (*tensor.ptr).borrow_mut();
            data.requires_grad = requires_grad;
            if data.requires_grad {
                data.grad_fn = Some(Box::new(Self::new(ctx)));
            }
        }
        tensor
    }

    fn apply_backward(&self, grad_output: Tensor<T>)
        where
            T: Copy + Num + 'static,
    {
        let grad_inputs = self.backward(grad_output)
            .into_iter()
            .map(|tensor| tensor.map(|tensor| tensor.into()))
            .collect::<Vec<Option<Tensor<T>>>>();

        let forward_inputs = self.ctx().forward_inputs();
        assert_eq!(forward_inputs.len(), grad_inputs.len()); // TODO add message

        forward_inputs
            .iter()
            .zip(grad_inputs.into_iter())
            .filter(|(tensor, _)| tensor.requires_grad())
            .for_each(|(tensor, grad)| {
                if let Some(grad) = grad {
                    tensor.backward(grad);
                }
            });
    }
}
