use super::{RawTask, TaskRef};
use core::mem;
use core::ptr;
use core::sync::atomic::{AtomicPtr, Ordering};

#[repr(C)]
pub(crate) struct TaskQueue {
    tasks: AtomicPtr<RawTask>,
    fd: i32,
}

unsafe impl Sync for TaskQueue {}
unsafe impl Send for TaskQueue {}

impl TaskQueue {
    pub(crate) fn new() -> Self {
        let fd = unsafe { libc::eventfd(0, libc::EFD_CLOEXEC | libc::EFD_NONBLOCK) };
        if fd == -1 {
            panic!("TaskQueue: evenfd return -1");
        }
        Self {
            tasks: AtomicPtr::default(),
            fd,
        }
    }

    pub(crate) fn fd(&self) -> i32 {
        self.fd
    }

    pub(crate) fn push(&self, task: TaskRef) {
        let task = task.leak();
        let mut next = self.tasks.load(Ordering::Relaxed);
        loop {
            unsafe { (*task).node.set_next(next) };
            match self
                .tasks
                .compare_exchange_weak(next, task, Ordering::Release, Ordering::Relaxed)
            {
                Ok(_) => {
                    if next.is_null() {
                        self.notify();
                    }
                    return;
                }
                Err(old) => next = old,
            }
        }
    }

    pub(crate) fn pop(&self) -> *mut RawTask {
        let mut val = 0_u64;
        let _ = unsafe {
            libc::read(
                self.fd,
                &mut val as *mut _ as *mut libc::c_void,
                mem::size_of_val(&val),
            )
        };
        self.tasks
            .swap(ptr::null_mut::<RawTask>(), Ordering::Acquire)
    }

    fn notify(&self) {
        let val = u64::MAX - 1;
        let _ = unsafe {
            libc::write(
                self.fd,
                &val as *const _ as *const libc::c_void,
                mem::size_of_val(&val),
            )
        };
    }
}

impl Drop for TaskQueue {
    fn drop(&mut self) {
        unsafe {
            libc::close(self.fd);
        }
    }
}
