use core::{alloc::Layout, ptr::NonNull};

/// A memory chunk.
/// In a free chunk:
/// - The first 8 bytes are metadata.
///    - The lower 3 bits are the status of the chunk:
///       - 001: The current chunk is in use.
///       - 010: The lower chunk is in use, and the current chunk is free.
///       - 011: Both the lower and the current chunks are in use.
///       - 000: Both the lower and the current chunks are free.
///              However, this situation should not happen because the chunks should be
///              coalesced when they are free.
///              So when 000 occurs, the `size` part of the metadata is actually
///              the padding between the metadata and user data (for alignment).
///              i.e., the chunk itself is a padding.
///    - The other bits are the size of the chunk
///     (Because the size of the chunk is always a multiple of 8, the lower 3 bits are always 0).
/// - The next 8 bytes are the pointer to the previous chunk.
/// - The next 8 bytes are the pointer to the next chunk.
/// - The rest of the chunk is the data.
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct Chunk {
    pub data: NonNull<u8>,
    pub limit: NonNull<u8>,
}

/// The memory chunk management with coalescing and splitting.
impl Chunk {
    // To get the size of the chunk
    pub const META_MASK: u64 = 0b111;
    // The bit to indicate the current chunk is in use
    pub const IN_USE_BIT: u32 = 0;
    // The bit to indicate the lower chunk is in use
    pub const LOWER_IN_USE_BIT: u32 = 1;
    // The minimum size of a chunk
    // 8 bytes for prev ptr + 8 bytes for next ptr + 8 bytes for metadata
    pub const MIN_SIZE: usize = 32;
    // META_SIZE
    pub const META_SIZE: usize = 8;

    /// Create a new chunk from the given data and limit.
    pub fn new(data: NonNull<u8>, limit: NonNull<u8>) -> Self {
        Chunk { data, limit }
    }

    /// Get the immutable metadata.
    pub unsafe fn get_meta(&self) -> u64 {
        unsafe { *self.data.cast::<u64>().as_ref() }
    }

    /// Get the mutable metadata.
    pub unsafe fn get_mut_meta(&mut self) -> &mut u64 {
        unsafe { self.data.cast::<u64>().as_mut() }
    }

    /// Get whether the current chunk is in use.
    pub unsafe fn get_cur_in_use(&self) -> bool {
        let meta = self.get_meta();
        meta & (1 << Self::IN_USE_BIT) != 0
    }

    pub unsafe fn set_cur_in_use(&mut self, in_use: bool) {
        let meta = unsafe { self.get_mut_meta() };
        if in_use {
            *meta |= 1 << Self::IN_USE_BIT;
        } else {
            *meta &= !(1 << Self::IN_USE_BIT);
        }
    }

    /// Get whether the lower chunk is in use.
    pub unsafe fn get_lower_in_use(&self) -> bool {
        let meta = self.get_meta();
        meta & (1 << Self::LOWER_IN_USE_BIT) != 0
    }

    /// Set the status of the lower chunk.
    pub unsafe fn set_lower_in_use(&mut self, in_use: bool) {
        let meta = unsafe { self.get_mut_meta() };
        if in_use {
            *meta |= 1 << Self::LOWER_IN_USE_BIT;
        } else {
            *meta &= !(1 << Self::LOWER_IN_USE_BIT);
        }
    }

    /// Get the size of the chunk.
    pub unsafe fn get_size(&self) -> usize {
        let meta = self.get_meta();
        (meta & !Self::META_MASK) as usize
    }

    /// Get the padding of the chunk.
    pub unsafe fn get_padding(&self) -> usize {
        let meta = self.get_meta();
        (meta & !Self::META_MASK) as usize
    }

    pub unsafe fn set_size(&mut self, size: usize) {
        let meta = unsafe { self.get_mut_meta() };
        *meta = (*meta & Self::META_MASK) | (size as u64);
    }

    /// Get the previous chunk.
    pub unsafe fn get_prev(&self) -> Option<Self> {
        let prev_ptr = *self.data.as_ptr().add(8).cast::<*mut u8>();
        if prev_ptr.is_null() {
            None
        } else {
            Some(Self::new(NonNull::new_unchecked(prev_ptr), self.limit))
        }
    }

    /// Set the previous chunk.
    pub unsafe fn set_prev(&mut self, prev: Option<Self>) {
        let prev_ptr = prev.map_or(core::ptr::null_mut(), |chunk| chunk.data.as_ptr());
        unsafe {
            *self.data.as_ptr().add(8).cast::<*mut u8>() = prev_ptr;
        }
    }

    /// Get the next chunk.
    pub unsafe fn get_next(&self) -> Option<Self> {
        let next_ptr = *self.data.as_ptr().add(16).cast::<*mut u8>();
        if next_ptr.is_null() {
            None
        } else {
            Some(Self::new(NonNull::new_unchecked(next_ptr), self.limit))
        }
    }

    /// Set the next chunk.
    pub unsafe fn set_next(&mut self, next: Option<Self>) {
        let next_ptr = next.map_or(core::ptr::null_mut(), |chunk| chunk.data.as_ptr());
        unsafe {
            *self.data.as_ptr().add(16).cast::<*mut u8>() = next_ptr;
        }
    }

    /// Get whether the chunk is the top chunk.
    pub unsafe fn is_top(&self) -> bool {
        self.get_next().is_none()
    }

    /// Get the footer of the chunk.
    pub unsafe fn get_footer(&self) -> *mut u64 {
        self.data.as_ptr().add(self.get_size() - 8).cast::<u64>()
    }

    pub unsafe fn get_higher_chunk(&self) -> Self {
        // use the padding store in the `size` part of the metadata
        let padding = unsafe { self.get_padding() };
        let higher_data = unsafe { self.data.as_ptr().add(padding) };
        let higher_limit = self.limit;
        Self::new(NonNull::new_unchecked(higher_data), higher_limit)
    }

    /// Set the status of the current chunk.
    pub unsafe fn set_cur_status(&mut self, in_use: bool, size: usize) {
        let prev_in_use = unsafe { self.get_cur_in_use() };
        // set cur in use
        unsafe { self.set_cur_in_use(in_use) };
        // set size
        unsafe { self.set_size(size) };

        if prev_in_use || in_use || unsafe { !self.is_top() } {
            unsafe { self.get_higher_chunk().set_lower_in_use(in_use) };
        }

        if !in_use {
            let footer = unsafe { self.get_footer() };
            unsafe { *footer = size as u64 };
        }
    }

    pub unsafe fn get_user_data(&self, layout: Layout) -> Option<NonNull<u8>> {
        // get the address of the user data
        let addr = unsafe { self.data.as_ptr().offset(8) };
        // get the offset for alignment
        let align_offset = addr.align_offset(layout.align());

        let required_size = layout.size() + align_offset;
        let remaining_size = unsafe { self.get_size() } - 8;
        if required_size > remaining_size {
            None
        } else {
            Some(NonNull::new_unchecked(unsafe { addr.add(align_offset) }))
        }
    }

    pub unsafe fn from_user_data(user_data: *mut u8, layout: Layout, limit: NonNull<u8>) -> Self {
        let mut chunk = Self::new(
            NonNull::new_unchecked(unsafe { user_data.offset(-8) }),
            limit,
        );
        if layout.align() <= 8 {
            return chunk;
        }
        if unsafe { !chunk.get_cur_in_use() } {
            chunk.data =
                NonNull::new_unchecked(unsafe { chunk.data.as_ptr().sub(chunk.get_padding()) });
        }
        chunk
    }

    pub unsafe fn split(&mut self, layout: Layout) -> Option<Self> {
        let user_data = (unsafe { self.get_user_data(layout) }).unwrap();

        let padding_size = unsafe {
            user_data
                .as_ptr()
                .offset(-8)
                .offset_from(self.data.as_ptr())
        };

        let mut new_size = usize::max(8 + padding_size as usize + layout.size(), Self::MIN_SIZE);
        // align the size to 8 bytes
        if new_size % 8 != 0 {
            new_size += 8 - new_size % 8;
        }
        let remain_size = unsafe { self.get_size() - new_size };

        if remain_size < Self::MIN_SIZE {
            None
        } else {
            let mut remain = Self::new(
                NonNull::new_unchecked(unsafe { self.data.as_ptr().add(new_size) }),
                self.limit,
            );
            unsafe {
                remain.set_cur_status(false, remain_size);
                self.set_cur_status(self.get_cur_in_use(), new_size);
            }
            Some(remain)
        }
    }

    pub unsafe fn coalesce(&mut self, chunk: Self) {
        unsafe { self.set_cur_status(self.get_cur_in_use(), self.get_size() + chunk.get_size()) }
    }

    pub unsafe fn get_free_lower_chunk(&self) -> Option<Self> {
        if unsafe { self.get_lower_in_use() } {
            None
        } else {
            let lower_size = unsafe { *self.data.as_ptr().offset(-8).cast::<u64>() };
            Some(Self::new(
                NonNull::new_unchecked(unsafe { self.data.as_ptr().sub(lower_size as usize) }),
                self.limit,
            ))
        }
    }

    pub unsafe fn get_free_higher_chunk(&self) -> Option<Self> {
        let higher_chunk = self.get_higher_chunk();
        if unsafe { higher_chunk.get_cur_in_use() } {
            None
        } else {
            Some(higher_chunk)
        }
    }
}
