/*
 * Copyright (c) 2020-202, KungFu32_RTT Development Team
 *
 * SPDX-License-Identifier: Apache-2.0
 *
 * Change Logs:
 * Date           Author       Notes
 * 2020/10/14     McKinsey     The unify KungFu porting implementation
*/
#include <rtthread.h>

asm(
".text""\n"
".EQU    SYS_MCTL,0x402000B4""\n"
".EQU    SYS_VECTOFF,0x402000BC""\n"
".EQU    INT_CTL0,0x40200000""\n"
".EQU    INT_EIF0,0x4020001C""\n"
".EQU    INT_IP2,0x4020003C""\n"
".EQU    INT_SOFTSV_PRI,0x00F00000""\n"
".EQU    INT_CTL0_DSALIGN_POS,7""\n"
".EQU    INT_EIF0_PENDSVIF_POS,14""\n"
".EQU    INT_CTL0_AIE_POS,0""\n"
);



/*
 * rt_base_t rt_hw_interrupt_disable();
*/
rt_base_t __attribute__((noinline)) rt_hw_interrupt_disable(void)
{
    asm volatile(
    "    LD       r0,#INT_CTL0""\n"
    "    LD.W     r0,[r0]""\n"
    "    DSI""\n"
    "    JMP lr""\n"
    );
}

/*
 *  rt_hw_interrupt_enable(rt_base_t level);
*/
void __attribute__((noinline)) rt_hw_interrupt_enable(rt_base_t level)
{
    asm volatile(
    "    LD       r1,#INT_CTL0""\n"
    "    ST.W     [r1],r0""\n"
    );
}

/*
 * void rt_hw_context_switch(rt_uint32_t from, rt_uint32_t to);
 * r0 --> from
 * r1 --> to
 */

void __attribute__((noinline)) rt_hw_context_switch(rt_uint32_t from, rt_uint32_t to)
{
    asm volatile(
        /* set rt_thread_switch_interrupt_flag to 1 */
    "    LD      r2, #rt_thread_switch_interrupt_flag""\n"
    "    LD.W    r3, [r2]""\n"
    "    CMP     r3, #1""\n"
    "    JZ      _reswitch""\n"
    "    MOV     r3, #1""\n"
    "    ST.W    [r2], r3""\n"

    "    LD      r2, #rt_interrupt_from_thread""\n"   /* set rt_interrupt_from_thread */
    "    ST.W    [r2], r0""\n"

    "_reswitch:""\n"
    "    LD      r2, #rt_interrupt_to_thread""\n"     /* set rt_interrupt_to_thread */
    "    ST.W    [r2], r1""\n"

    "    LD      r0, #INT_EIF0""\n"              /* trigger the SOFTSV exception (causes context switch) */
    "    SET     [r0], #INT_EIF0_PENDSVIF_POS""\n"
    );
}

void __attribute__((noinline)) rt_hw_context_switch_interrupt(rt_uint32_t from, rt_uint32_t to)
{
    rt_hw_context_switch(from,to);
}

/*
 * void rt_hw_context_switch_to(rt_uint32_t to);
 * r0 --> to
 */

void __attribute__((noinline)) rt_hw_context_switch_to(rt_uint32_t to)
{
    asm volatile(
    "    LD    r1, #rt_interrupt_to_thread""\n"
    "    ST.W  [r1], r0""\n"

            /* set from thread to 0 */
    "    LD    r1, #rt_interrupt_from_thread""\n"
    "    MOV   r0, #0""\n"
    "    ST.W  [r1], r0""\n"

            /* set interrupt flag to 1 */
    "    LD      r1, #rt_thread_switch_interrupt_flag""\n"
    "    MOV     r0, #1""\n"
    "    ST.W    [r1], r0""\n"

            /* set the SOFTSV exception priority */
    "    LD      r0, #INT_IP2""\n"
    "    LD      r1, #INT_SOFTSV_PRI""\n"
    "    LD.W    r2, [r0]""\n"              /* read       */
    "    ORL     r1,r1,r2""\n"              /* modify     */
    "    ST.W    [r0], r1""\n"              /* write-back */

    "    LD      r0, #INT_EIF0""\n"      /* trigger the SOFTSV exception (causes context switch) */
    "    SET     [r0], #INT_EIF0_PENDSVIF_POS""\n"

            /* restore MSP */
    "    LD      r0, #SYS_VECTOFF""\n"
    "    LD.W    r0, [r0]""\n"
    "    LD.W    r0, [r0]""\n"
    "    MOV     msp, r0""\n"

            /* enable interrupts at processor level */
    "    ENI""\n"
    );
}



/* r0 --> switch from thread stack
 * r1 --> switch to thread stack
 * psr, pc, lr, r12, r3, r2, r1, r0 are pushed into [from] stack
 */

void __attribute__((interrupt)) _SoftSV_exception (void)
{
    asm volatile(
        /* disable interrupt to protect context switch */
    "   DSI""\n"                                                 // Prevent interruption during context switch
        /* get rt_thread_switch_interrupt_flag */
    "    MOV     r2, msp""\n"
    "    LD      r0, #rt_thread_switch_interrupt_flag""\n"
    "    LD.W    r1, [r0]""\n"
    "    CMP     r1, #0x00""\n"
    "    JZ      _softsv_exit""\n"        /* SOFTSV already handled */

    "    MOV     r1, #0x00""\n"
    "    ST.W    [r0], r1""\n"

    "    LD      r0, #rt_interrupt_from_thread""\n"
    "    LD.W    r1, [r0]""\n"
    "    CMP     r1, #0x00""\n"
    "    JZ      switch_to_thread""\n"    /* skip register save at the first time */

    "    MOV     sp, psp""\n"
    "    PUSH    {r6 - r12}""\n"
    "    PUSH    r5""\n"
    "    MOV     r1, sp""\n"                 /* get from thread stack pointer */

    "    LD.W    r0, [r0]""\n"
    "    ST.W    [r0], r1""\n"                 /* update from thread stack pointer */

    "switch_to_thread:""\n"
    "    LD      r0, #rt_interrupt_to_thread""\n"
    "    LD.W    r0, [r0]""\n"
    "    LD.W    r0, [r0]""\n"                /* load thread stack pointer */

    "    MOV     sp, r0""\n"             /* get to thread stack pointer */
    "    POP     r5""\n"
    "    POP     {r6 - r12}""\n"          /* pop r6 - r12 register */
    "    MOV     psp, sp""\n"
    "    MOV     msp, r2""\n"

    "_softsv_exit:""\n"

    "    MOV  r1, #0x04""\n"
    "    ORL  lr, r1""\n"
        /* restore interrupt */
    "    CLR    PSW, #29""\n"
    "    ENI""\n"
    "    JMP  lr""\n"
    );
}


void __attribute__((interrupt)) HardFault_Handler (void)
{
    asm volatile(
    "    PUSH    {r6 - r12}""\n"
    "    PUSH    r5""\n"                    /* push r5 - r12 register */

    "    LD      r5,#rt_hw_hard_fault_exception""\n"
    "    LJMP    r5""\n"

    "    POP     r5""\n"
    "    POP     {r6 - r12}""\n"             /* pop r5 - r12 register */

    "    JMP     lr""\n"
    );
}
/*
 * rt_uint32_t rt_hw_interrupt_check(void);
 * R0 --> state
 */
rt_uint32_t __attribute__((noinline)) rt_hw_interrupt_check(void)
{
    asm volatile(
    "    LD.w     r0,[sp+#0x1C]""\n"
    "    JMP      lr""\n"
    );
}

