use itertools::{EitherOrBoth, Itertools};
use num_traits::Num;
use crate::autograd::function::{ForwardArgs, Function};
use crate::autograd::function_ctx::FunctionCtx;
use crate::Tensor;

pub struct ExpandBackward<T> {
    ctx: FunctionCtx<T>,
}

impl<T> Function<T> for ExpandBackward<T>
    where
        T: Copy + Num + 'static,
{
    fn new(ctx: FunctionCtx<T>) -> Self {
        Self { ctx }
    }

    fn ctx(&self) -> &FunctionCtx<T> {
        &self.ctx
    }

    fn forward(_ctx: &mut FunctionCtx<T>, args: ForwardArgs<T>) -> Tensor<T> {
        if let ForwardArgs::TensorOther(tensor, shape) = args {
            if tensor.shape().as_ref() == shape {
                return tensor.clone();
            }

            let shape = shape
                .to_vec()
                .into_boxed_slice();

            let strides = shape
                .iter()
                .rev()
                .zip_longest(tensor.shape().iter().rev())
                .enumerate()
                .map(|(i, pair)| match pair {
                    EitherOrBoth::Both(size_theirs, size_ours) => {
                        if size_ours == size_theirs {
                            tensor.stride()[tensor.stride().len() - 1 - i]
                        } else if *size_ours == 1 {
                            0
                        } else {
                            panic!("size") // TODO add message
                        }
                    },
                    EitherOrBoth::Left(_) => 0,
                    EitherOrBoth::Right(_) => panic!("right"), // TODO add message
                })
                .rev()
                .collect::<Box<[usize]>>();

            return (tensor.data(), tensor.offset(), shape, strides).into()
        }
        unreachable!()
    }

    fn backward(&self, grad_output: Tensor<T>) -> Vec<Option<Tensor<T>>> {
        unimplemented!()
    }
}
