use super::atomic::{AtomicUsize, Ordering};
use alloc::boxed::Box;
use core::marker::PhantomData;
use core::ops::Deref;
use core::ptr;
use core::ptr::NonNull;

const MAX_REFCOUNT: usize = (isize::MAX) as usize;

#[repr(C)]
struct ArcInner<T: Sized> {
    strong: AtomicUsize,
    data: T,
}

pub struct Arc<T: Sized> {
    ptr: NonNull<ArcInner<T>>,
    phantom: PhantomData<ArcInner<T>>,
}

unsafe impl<T: Sized + Sync + Send> Send for Arc<T> {}
unsafe impl<T: Sized + Sync + Send> Sync for Arc<T> {}

impl<T: Sized> Arc<T> {
    pub fn new(data: T) -> Self {
        let inner = Box::new(ArcInner {
            strong: AtomicUsize::new(1),
            data,
        });
        Self {
            ptr: unsafe { NonNull::new_unchecked(Box::into_raw(inner)) },
            phantom: PhantomData,
        }
    }

    unsafe fn from_inner(ptr: NonNull<ArcInner<T>>) -> Self {
        Self {
            ptr,
            phantom: PhantomData,
        }
    }

    unsafe fn from_ptr(ptr: *mut ArcInner<T>) -> Self {
        Self::from_inner(NonNull::new_unchecked(ptr))
    }

    #[inline(never)]
    unsafe fn drop_slow(&mut self) {
        // Destroy the data at this time, even though we must not free the box
        // allocation itself (there might still be weak pointers lying around).
        ptr::drop_in_place(Self::get_mut_unchecked(self));
    }

    #[inline]
    pub unsafe fn get_mut_unchecked(this: &mut Self) -> &mut T {
        // We are careful to *not* create a reference covering the "count" fields, as
        // this would alias with concurrent access to the reference counts (e.g. by `Weak`).
        &mut (*this.ptr.as_ptr()).data
    }

    fn inner(&self) -> &ArcInner<T> {
        unsafe { &*(self.ptr.as_ptr() as *const ArcInner<T>) }
    }
}

impl<T: Sized> Clone for Arc<T> {
    fn clone(&self) -> Self {
        let old_size = self.inner().strong.fetch_add(1, Ordering::Relaxed);
        if old_size > MAX_REFCOUNT {
            panic!("ref count exceed {}", MAX_REFCOUNT);
        }
        unsafe { Self::from_inner(NonNull::new_unchecked(self.ptr.as_ptr())) }
    }
}

impl<T: Sized> Deref for Arc<T> {
    type Target = T;

    #[inline]
    fn deref(&self) -> &T {
        &self.inner().data
    }
}

impl<T: Sized> Drop for Arc<T> {
    fn drop(&mut self) {
        if self.inner().strong.fetch_sub(1, Ordering::Release) > 1 {
            return;
        }
        unsafe {
            let inner = Box::from_raw(self.ptr.as_ptr());
            drop(inner);
        }
    }
}
