use std::rc::Rc;
use num_traits::Num;
use crate::autograd::function::{ForwardArgs, Function};
use crate::autograd::function_ctx::FunctionCtx;
use crate::misc::shape_to_strides;
use crate::Tensor;

pub struct SelectBackward<T> {
    ctx: FunctionCtx<T>,
}

impl<T> Function<T> for SelectBackward<T>
    where
        T: Copy + Num + 'static,
{
    fn new(ctx: FunctionCtx<T>) -> Self {
        Self { ctx }
    }

    fn ctx(&self) -> &FunctionCtx<T> {
        &self.ctx
    }

    fn forward(ctx: &mut FunctionCtx<T>, args: ForwardArgs<T>) -> Tensor<T> {
        if let ForwardArgs::TensorOther(tensor, index) = args {
            let cell = (*tensor.ptr).borrow();
            ctx.save_others([index, cell.shape.as_ref()].into());

            assert!(
                index.len() <= cell.shape.len(),
                "too many indices for tensor of dimension {:?}", index.len());

            cell.shape.iter()
                .zip(index.as_ref().iter())
                .enumerate()
                .inspect(|(i, (size, index))|
                    assert!(
                        size > index,
                        "index {:?} is out of bounds for dimension {:?} with size {:?}",
                        index, i, size))
                .count();

            let offset = cell.strides
                .iter()
                .zip(index)
                .map(|(i, stride)| i * stride)
                .sum();

            let shape = cell.shape
                .iter()
                .skip(index.len())
                .copied()
                .collect();

            let strides = cell.strides
                .iter()
                .skip(index.len())
                .copied()
                .collect();

            return (Rc::clone(&cell.data), offset, shape, strides).into()
        }
        unreachable!()
    }

    fn backward(&self, grad_output: Tensor<T>) -> Vec<Option<Tensor<T>>> {
        let index = self.ctx.others()[0];
        let shape = self.ctx.others()[1].to_vec().into_boxed_slice();

        let start: usize = index
            .iter()
            .zip(shape_to_strides(shape.as_ref()).iter())
            .map(|(i, stride)| i * stride)
            .sum();

        let mut grad = vec![T::zero(); shape.iter().product()]
            .into_boxed_slice();

        grad_output
            .flat_iter()
            .enumerate()
            .for_each(|(i, value)| grad[start + i] = value);

        let tensor = Tensor::from((grad.into(), shape));
        [tensor.into()].into()
    }
}
