use num_traits::Num;
use crate::autograd::function::{ForwardArgs, Function};
use crate::autograd::function_ctx::FunctionCtx;
use crate::Tensor;

pub struct ReluBackward<T> {
    ctx: FunctionCtx<T>,
}

impl<T> Function<T> for ReluBackward<T>
    where
        T: Copy + Num + PartialOrd + 'static,
{
    fn new(ctx: FunctionCtx<T>) -> Self {
        Self { ctx }
    }

    fn ctx(&self) -> &FunctionCtx<T> {
        &self.ctx
    }

    fn forward(ctx: &mut FunctionCtx<T>, args: ForwardArgs<T>) -> Tensor<T> {
        if let ForwardArgs::TensorOther(tensor, _) = args {
            ctx.save_tensors([tensor.clone()].into());
            tensor.map(|x| if x > T::zero() { x } else { T::zero() })
        } else {
            unreachable!()
        }
    }

    fn backward(&self, grad_output: Tensor<T>) -> Vec<Option<Tensor<T>>> {
        let tensor: &Tensor<T> = &self.ctx.tensors()[0];
        let grad = tensor.zip(&grad_output,
            |a, b| if a > T::zero() { b } else { T::zero() });
        [grad.into()].into()
    }
}
