#ifndef INC_ARM_H
#define INC_ARM_H

#include <stdint.h>

#define ARM_NUM_CPU (4)

static inline void
delay(int32_t count)
{
    asm volatile("__delay_%=: subs %[count], %[count], #1; bne __delay_%=\n":
                 "=r"(count): [count]"0"(count) : "cc");
}

/* Wait N microsec. */
static inline void
delayus(uint32_t n)
{
    uint64_t f, t, r;
    /* Get the current counter frequency */
    asm volatile ("mrs %[freq], cntfrq_el0" : [freq]"=r"(f));
    /* Read the current counter. */
    asm volatile ("mrs %[cnt], cntpct_el0" : [cnt]"=r"(t));
    /* Calculate expire value for counter */
    t += f / 1000000 * n;
    do {
        asm volatile ("mrs %[cnt], cntpct_el0" : [cnt]"=r"(r));
    } while (r < t);
}

static inline uint64_t
arm_timestamp()
{
    uint64_t t;
    asm volatile ("mrs %[cnt], cntpct_el0" : [cnt]"=r"(t) :: "memory");
    return t;
}

static inline void
put32(uint64_t p, uint32_t x)
{
    *(volatile uint32_t *)p = x;
}

static inline uint32_t
get32(uint64_t p)
{
    return *(volatile uint32_t *)p;
}

/* Unmask DAIF to start interrupt. */
static inline void
arm_enable_interrupt()
{
    asm volatile("msr daif, %[x]" : : [x]"r"(0));
}

/* Mask DAIF to close interrupt. */
static inline void
arm_disable_interrupt()
{
    asm volatile("msr daif, %[x]" : : [x]"r"(0xF << 6));
}

/* Brute-force data and instruction synchronization barrier. */
static inline void
_disb()
{
    asm volatile("dsb sy; isb");
}

static inline void arm_full_system_barrier() {
    _disb();
}

static inline void arm_fence() {
    asm volatile ("dsb sy" ::: "memory");
}

/* Data cache clean and invalidate by virtual address to point of coherency. */
static inline void
dccivac(void *p, int n)
{
    while (n--)
        asm volatile("dc civac, %[x]" : : [x]"r"(p + n));
}

/* Get Exception Syndrome Register (EL1). */
static inline uint64_t
arm_get_esr()
{
    uint64_t r;
    asm volatile("mrs %[x], esr_el1" : [x]"=r"(r));
    return r;
}

/* Get Exception Link Register (EL1). */
static inline uint64_t
arm_get_elr()
{
    uint64_t r;
    asm volatile("mrs %[x], elr_el1" : [x]"=r"(r));
    return r;
}

static inline uint64_t arm_get_sp() {
    uint64_t r;
    asm volatile("mov %[x], sp" : [x]"=r"(r));
    return r;
}

/* Set Exception Syndrome Register (EL1). */
static inline void
arm_set_esr(uint64_t r)
{
    asm volatile("msr esr_el1, %[x]" : : [x]"r"(r));
}

/* Set vector base (virtual) address register (EL1). */
static inline void
arm_set_vbar(void *p)
{
    arm_full_system_barrier();
    asm volatile("msr vbar_el1, %[x]" : : [x]"r"(p));
    arm_full_system_barrier();
}

static inline void arm_tlbi_vmalle1() {
    asm volatile("tlbi vmalle1");
    arm_full_system_barrier();
}

/* Set Translation Table Base Register 0 (EL1). */
static inline void
arm_set_ttbr0(uint64_t p)
{
    asm volatile("msr ttbr0_el1, %[x]" : : [x]"r"(p));
    arm_full_system_barrier();
    arm_tlbi_vmalle1();
}

/* Set Translation Table Base Register 1 (EL1). */
static inline void
arm_set_ttbr1(uint64_t p)
{
    asm volatile("msr ttbr1_el1, %[x]" : : [x]"r"(p));
    arm_full_system_barrier();
    arm_tlbi_vmalle1();
}

static inline int
arm_cpuid()
{
    int64_t id;
    asm volatile("mrs %[x], mpidr_el1" : [x]"=r"(id));
    return id & 0xFF;
}

/**
 * get current TTBR0_EL1
 */
static inline uint64_t *arm_get_ttbr0_el1() {
    uint64_t *ptr;
    asm volatile (
        "mrs %[ptr], ttbr0_el1"
        : [ptr]"=r" (ptr)
    );
    return ptr;
}

static inline void arm_svc(int sysnum) {
    asm volatile ("mov x8,%[x]; svc 0x00" :: [x]"r"(sysnum) : "x8");
}

#endif
