use std::arch::asm;
use std::sync::atomic::Ordering;

use super::*;

pub const WORD_SIZE: usize = core::mem::size_of::<usize>();
pub const WORD_BITS: usize = usize::BITS as usize;


#[inline]
pub fn _mi_wsize_from_size(size: usize) -> usize {
    (size + WORD_SIZE - 1) / WORD_SIZE
}

#[inline]
pub fn mi_heap_is_default(heap: *const MiHeap) -> bool {
        heap == get_heap_default()
}
#[inline]
pub fn mi_heap_is_backing(heap: *const MiHeap) -> bool {
    unsafe {
        (*(*heap).tld).heap_backing == heap as *mut MiHeap
    }
}

#[inline]
pub fn mi_block_nextx(block: *mut MiBlock) -> *mut MiBlock {
    unsafe {
        (*block).next
    }
}

#[inline]
pub fn mi_block_set_nextx(_: usize, block: *mut MiBlock, next: *mut MiBlock) {
    unsafe {
        (*block).next = next;
    }
}


#[inline]
pub fn mi_block_next(_: *mut MiPage, block: *mut MiBlock) -> *mut MiBlock {
    mi_block_nextx(block)
}

#[inline]
pub fn mi_block_set_next(_: *mut MiPage, block: *mut MiBlock, next: *mut MiBlock) {
    unsafe { (*block).next = next; }
}


#[inline]
pub fn mi_thread_id() -> usize {
    let tid: usize;
    unsafe {
        asm!("mov {}, fs:0", out(reg) tid, options(nostack, nomem));
    }
    tid
}

#[inline]
pub fn mi_page_mostly_used(page: *const MiPage) -> bool {
    if page.is_null() {
        return true;
    }
    unsafe {
        let frac = (*page).reserved as usize / 8;
        (*page).reserved as usize - (*page).used + (*page).thread_freed.load(Ordering::SeqCst) < frac
    }
}


#[inline]
pub fn mi_heap_is_initialized(heap: *const MiHeap) -> bool {
    heap != &_MI_HEAP_EMPTY as *const MiHeap
}


#[inline]
pub fn mi_page_queue(heap: *const MiHeap, block_size: usize) -> *const MiPageQueue {
    unsafe { 
        & ((*heap).pages[_mi_bin(block_size)])
    }
}

/// are all blocks in a page freed?
#[inline]
pub fn mi_page_all_free(page: *const MiPage) -> bool {
    unsafe {
        (*page).used - (*page).thread_freed.load(Ordering::SeqCst) == 0
    }
}


// are there immediately available blocks
#[inline]
pub fn mi_page_immediate_available(page: *const MiPage) -> bool {
    unsafe {
        !(*page).free.is_null()
    }
}

#[inline]
pub fn _mi_heap_get_free_small_page(heap: *const MiHeap, block_size: usize) -> *mut MiPage {
    unsafe {
        (*heap).pages_free_direct[_mi_wsize_from_size(block_size)]   
    }
}


#[inline]
pub fn _mi_align_up(sz: usize, alignment: usize) -> usize {
    let mut x = (sz / alignment) * alignment;
    if x < sz {
        x += alignment;
    }
    if x < sz {
        0
    } else {
        x
    }
}

pub fn mi_align_up_ptr(p: *mut u8, alignment: usize) -> *mut u8 {
    let x = p as usize;
    let x = _mi_align_up(x, alignment);
    x as *mut u8
}

pub fn _mi_align_down(sz: usize, alignment: usize) -> usize {
    (sz / alignment) * alignment
}

pub fn mi_align_down_ptr(p: *mut u8, alignment: usize) -> *mut u8 {
    let x = p as usize;
    let x = _mi_align_down(x, alignment);
    x as *mut u8
}


#[inline]
pub fn _mi_ptr_segment(p: *mut u8) -> *mut MiSegment {
    let x = p as usize;
    let x = x & !MI_SEGMENT_MASK;
    x as *mut MiSegment
}

#[inline]
pub fn _mi_page_segment(page: *const MiPage) -> *mut MiSegment {
    _mi_ptr_segment(page as *mut u8)
}

#[inline]
pub fn _mi_segment_page_of(segment: *mut MiSegment, p: *mut u8) -> *mut MiPage {
    unsafe {
        let diff = p as usize - segment as usize;
        let idx = diff >> (*segment).page_shift;
        _mi_segment_page_idx(segment, idx)
    }
}

pub fn _mi_segment_page_idx(segment: *mut MiSegment, idx: usize) -> *mut MiPage {
    unsafe {
        let page = (*segment).pages.as_ptr() as *mut MiPage;
        &mut *page.offset(idx as isize)
    }
}

#[inline]
pub fn _mi_page_start(segment: *mut MiSegment, page: *mut MiPage, page_size: &mut usize) -> *mut u8 {
    _mi_segment_page_start(segment, page, page_size)
}

/// Adjust a block that was allocated aligned, to the actual start of the block in the page
pub fn _mi_page_ptr_unalign(segment: *mut MiSegment, page: *mut MiPage, p: *mut u8) -> *mut MiBlock {
    unsafe {
        let diff = p.offset_from(_mi_page_start(segment, page, &mut 0));
        let adjust = diff as usize % (*page).block_size;
        p.sub(adjust) as *mut MiBlock
    }
}