#include "sdkconfig.h"

.equ MODE_USR, 0x10
.equ MODE_FIQ, 0x11
.equ MODE_IRQ, 0x12
.equ MODE_SVC, 0x13
.equ MODE_ABT, 0x17
.equ MODE_UND, 0x1B
.equ MODE_SYS, 0x1f

.section .text /* The code segment, RD, program’s executable code */
/*******************************************************************************
*
* sysInit - start after boot
*
* This routine is the system start-up entry point for VxWorks in RAM, the
* first code executed after booting.  It disables interrupts, sets up
* the stack, and jumps to the C routine usrInit() in usrConfig.c.
*
* The initial stack is set to grow down from the address of sysInit().  This
* stack is used only by usrInit() and is never used again.  Memory for the
* stack must be accounted for when determining the system load address.
*
* NOTE: This routine should not be called by the user.
*
* RETURNS: N/A
*
* void sysInit (UINT32 startType)       /@ THIS IS NOT A CALLABLE ROUTINE @/
*
*/
#include "sdkconfig.h"

/* Qemu no need to switch from aarch64 to aarch32 */

.section .Startup_Aarch32, "a"
.global Startup_Aarch32  


Startup_Aarch32:

#ifdef CONFIG_USE_AARCH64_L1_TO_AARCH32
    .long 0xd5384240 	/* mrs	x0, currentel                      */
    .long 0xd342fc00 	/* lsr	x0, x0, #2                         */
    .long 0x92400400 	/* and	x0, x0, #0x3                       */
    .long 0xf1000c1f 	/* cmp	x0, #0x3                           */
    .long 0x540003a1 	/* b.ne	1d0080c4 <el2_mode>                */

el3_mode:
    .long 0xd53ecca0 	/* mrs	x0, s3_6_c12_c12_5 - ICC_SRE_EL3   */
    .long 0xb2400c00 	/* orr	x0, x0, #0xf                       */
    .long 0xd51ecca0 	/* msr	s3_6_c12_c12_5, x0                 */
    .long 0xd5033fdf 	/* isb                                     */
    .long 0xd53cc9a0 	/* mrs	x0, s3_4_c12_c9_5 - ICC_SRE_EL2    */
    .long 0xb2400c00 	/* orr	x0, x0, #0xf                       */
    .long 0xd51cc9a0 	/* msr	s3_4_c12_c9_5, x0                  */
    .long 0xd5033fdf 	/* isb                                     */
    .long 0xd538cca0 	/* mrs	x0, s3_0_c12_c12_5 - ICC_SRE_EL1   */
    .long 0xb2400000 	/* orr	x0, x0, #0x1                       */
    .long 0xd518cca0 	/* msr	s3_0_c12_c12_5, x0                 */
    .long 0xd5033fdf 	/* isb                                     */

    .long 0xd2803620 	/* mov	x0, #0x1b1                         */
    .long 0xd51e1100 	/* msr	scr_el3, x0                        */
    .long 0xd2867fe0 	/* mov	x0, #0x33ff                        */
    .long 0xd51c1140 	/* msr	cptr_el2, x0                       */
    .long 0xd2810000 	/* mov	x0, #0x800                         */
    .long 0xf2a61a00 	/* movk	x0, #0x30d0, lsl #16               */
    .long 0xd5181000 	/* msr	sctlr_el1, x0                      */
    .long 0x910003e0 	/* mov	x0, sp                             */
    .long 0xd51c4100 	/* msr	sp_el1, x0                         */
    .long 0xd53ec000 	/* mrs	x0, vbar_el3                       */
    .long 0xd518c000 	/* msr	vbar_el1, x0                       */
    .long 0xd2803a60 	/* mov	x0, #0x1d3                         */
    .long 0xd51e4000 	/* msr	spsr_el3, x0                       */
    .long 0x10000500 	/* adr	x0, 1d008158 <el1_mode>          */
    .long 0xd51e4020 	/* msr	elr_el3, x0                        */
    .long 0xd69f03e0 	/* eret                                    */

el2_mode:
    .long 0xd53cc9a0 	/* mrs	x0, s3_4_c12_c9_5 - ICC_SRE_EL2    */
    .long 0xb2400c00 	/* orr	x0, x0, #0xf                       */
    .long 0xd51cc9a0 	/* msr	s3_4_c12_c9_5, x0                  */
    .long 0xd5033fdf 	/* isb                                     */
    .long 0xd538cca0 	/* mrs	x0, s3_0_c12_c12_5 - ICC_SRE_EL1   */
    .long 0xb2400000 	/* orr	x0, x0, #0x1                       */
    .long 0xd518cca0 	/* msr	s3_0_c12_c12_5, x0                 */
    .long 0xd5033fdf 	/* isb                                     */
    .long 0xd53ce100 	/* mrs	x0, cnthctl_el2                    */
    .long 0xb2400400 	/* orr	x0, x0, #0x3                       */
    .long 0xd51ce100 	/* msr	cnthctl_el2, x0                    */
    .long 0xd51ce07f 	/* msr	cntvoff_el2, xzr                   */
    .long 0xd5380000 	/* mrs	x0, midr_el1                       */
    .long 0xd53800a1 	/* mrs	x1, mpidr_el1                      */
    .long 0xd51c0000 	/* msr	vpidr_el2, x0                      */
    .long 0xd51c00a1 	/* msr	vmpidr_el2, x1                     */
    .long 0xd2867fe0 	/* mov	x0, #0x33ff                        */
    .long 0xd51c1140 	/* msr	cptr_el2, x0                       */
    .long 0xd51c117f 	/* msr	hstr_el2, xzr                      */
    .long 0xd2a00600 	/* mov	x0, #0x300000                      */
    .long 0xd5181040 	/* msr	cpacr_el1, x0                      */
    .long 0xd2800000 	/* mov	x0, #0x0                           */
    .long 0xb2630000 	/* orr	x0, x0, #0x20000000                */
    .long 0xd51c1100 	/* msr	hcr_el2, x0                        */
    .long 0xd53c1100 	/* mrs	x0, hcr_el2                        */
    .long 0xd2810000 	/* mov	x0, #0x800                         */
    .long 0xf2a61a00 	/* movk	x0, #0x30d0, lsl #16               */
    .long 0xd5181000 	/* msr	sctlr_el1, x0                      */
    .long 0x910003e0 	/* mov	x0, sp                             */
    .long 0xd51c4100 	/* msr	sp_el1, x0                         */
    .long 0xd53cc000 	/* mrs	x0, vbar_el2                       */
    .long 0xd518c000 	/* msr	vbar_el1, x0                       */
    .long 0xd2803a60 	/* mov	x0, #0x1d3                         */
    .long 0xd51c4000 	/* msr	spsr_el2, x0                       */
    .long 0x10000060 	/* adr	x0, 1d008158 <el1_mode>          */
    .long 0xd51c4020 	/* msr	elr_el2, x0                        */
    .long 0xd69f03e0 	/* eret                                    */
el1_mode:
#endif

.global RST_Handler
RST_Handler:
    cpsid i						/* Mask interrupts */
    
    ldr  r0, =#0x0
    mcr  p15, 0, r0, c1, c0, 0  /* reset control register */
    isb

    /* set VBAR to the system_vectors address in linker script */
	ldr	r0, =system_vectors
	mcr	p15, 0, r0, c12, c0, 0

/* from ARMv8 */
/* Check for HYP mode */
    mrs r0, cpsr_all
    and r0, r0, #0x1F
    mov r8, #0x1A
    cmp r0, r8
    beq overHyped
    b continue

/* Get out of HYP mode */
overHyped: 
    adr r1, continue
    msr ELR_hyp, r1
    mrs r1, cpsr_all
    and r1, r1, #0x1f    ;@ CPSR_MODE_MASK
    orr r1, r1, #0x13    ;@ CPSR_MODE_SUPERVISOR
    msr SPSR_hyp, r1
    eret

continue:
    cps MODE_SVC

invalidate_caches_tlb:
	mov	r0,#0				    /* r0 = 0  */
	mcr	p15, 0, r0, c8, c7, 0   /* invalidate TLBs */
	mcr	p15, 0, r0, c7, c5, 0	/* invalidate icache */
	mcr	p15, 0, r0, c7, c5, 6	/* Invalidate branch predictor array */
	bl	invalidate_dcache		/* invalidate dcache */

disable_cache_mmu:
    @ Disable MMU
    mrc p15, 0, r1, c1, c0, 0 @ Read Control Register configuration data
    bic r1, r1, #0x1
    mcr p15, 0, r1, c1, c0, 0 @ Write Control Register configuration data
    @ Disable L1 Caches
    mrc p15, 0, r1, c1, c0, 0 @ Read Control Register configuration data
    bic r1, r1, #(0x1 << 12) @ Disable I Cache
    bic r1, r1, #(0x1 << 2) @ Disable D Cache
    mcr p15, 0, r1, c1, c0, 0 @ Write Control Register configuration data    

    /* disable the data alignment check */
    mrc p15, 0, r1, c1, c0, 0
    bic r1, #(1<<1)
    mcr p15, 0, r1, c1, c0, 0

    /* FIQ stack */
    /* Current Program Status Register, 
       change cpsr without affacting the 
       condition flags */
    msr cpsr_c, MODE_FIQ /* switch the processor to FIQ mode */
    ldr r1, =_fiq_stack_start /* load start address into R1 */
    ldr sp, =_fiq_stack_end /* load the end address of the FIQ stack into SP */
    movw r0, #0xFEFE
    movt r0, #0xFEFE /* corresponds to loading x << 16 | y into r0 */

/* This is the loop that actually fills the stack with 0xFEFEFEFE */
fiq_loop:
    cmp r1, sp /* compares the value in R1 to the value in SP */
    strlt r0, [r1], #4 /* If R1 is less than SP, 
                        the value in R0 will be written to the 
                        address stored in R1,
                        and R1 gets increased by 4 */
    blt fiq_loop /* the loop continues as long as R1 is less than SP */

    /* IRQ stack */
    msr cpsr_c, MODE_IRQ
    ldr r1, =_irq_stack_start
    ldr sp, =_irq_stack_end

/* fill irq stack */
irq_loop:
    cmp r1, sp
    strlt r0, [r1], #4
    blt irq_loop

    /* System mode */
    msr cpsr_c, MODE_SYS
    ldr r1, =_sys_stack_start
    ldr sp, =_sys_stack_end

sys_loop:
    cmp r1, sp
    strlt r0, [r1], #4
    blt sys_loop

/* PUT SVC IN THE END, START UP WITH SVC MODE!!! */
    /* Supervisor mode */
    msr cpsr_c, MODE_SVC
    ldr r1, =_svc_stack_start
    ldr sp, =_svc_stack_end

/* fill the supervisor mode stack */
svc_loop:
    cmp r1, sp
    strlt r0, [r1], #4
    blt svc_loop

    /* Start copying data */
    ldr r0, =_text_end /* section address has been defined in ld script */
    ldr r1, =_data_start
    ldr r2, =_data_end

/* loop continues over the entirety of .data in ROM */
data_loop:
    cmp r1, r2
    ldrlt r3, [r0], #4 /* t load 4 bytes of data from ROM into R3 */
    strlt r3, [r1], #4
    blt data_loop

/* Initialize .bss */
clear_bss:
    mov r0, #0
    ldr r1, =_bss_start
    ldr r2, =_bss_end

/* loop over memory between the addresses _bss_start and _bss_end */
/* there is no ROM address, and zeros just take up space */
bss_loop:
    cmp r1, r2
    strlt r0, [r1], #4
    blt bss_loop

@ using fpu
mov r4, #0xfffffff
mcr p15, 0, r4, c1, c0, 2

#ifdef CONFIG_USE_CACHE
enable_branch_pred:
    mrc p15, 0, r0, c1, c0, 0
    orr     r0, r0, #(1<<11)
    mcr p15, 0, r0, c1, c0, 0 

init_cache_mmu:
    ldr r0, =platform_mem_desc
    ldr r1, =platform_mem_desc_size
    ldr r1, [r1]
    bl InitMMUTable
    bl InitCache
#endif  

extra_init:
    @ Enable access to FP registers.
    mov r1, #(0xF << 20)
    mcr p15, 0, r1, c1, c0, 2 // CPACR full access to cp11 and cp10.
    @ Enable Floating point and Neon unit.
    mov r1, #(0x1 << 30)
    vmsr FPEXC, r1
    ISB @Ensure the enable operation takes effect.

start_up:
    cpsie   i                   /* enable irq */
    b	    c_startup			/* jump to C startup code */
	b       Abort_Exception

/*
 *************************************************************************
 *
 * invalidate_dcache - invalidate the entire d-cache by set/way
 *
 * Note: for Cortex-A53, there is no cp instruction for invalidating
 * the whole D-cache. Need to invalidate each line.
 *
 *************************************************************************
 */
invalidate_dcache:
	mrc	p15, 1, r0, c0, c0, 1		/* read CLIDR */
	ands	r3, r0, #0x7000000
	mov	r3, r3, lsr #23			/* cache level value (naturally aligned) */
	beq	finished
	mov	r10, #0				/* start with level 0 */
loop1:
	add	r2, r10, r10, lsr #1		/* work out 3xcachelevel */
	mov	r1, r0, lsr r2			/* bottom 3 bits are the Cache type for this level */
	and	r1, r1, #7			/* get those 3 bits alone */
	cmp	r1, #2
	blt	skip				/* no cache or only instruction cache at this level */
	mcr	p15, 2, r10, c0, c0, 0		/* write the Cache Size selection register */
	isb					/* isb to sync the change to the CacheSizeID reg */
	mrc	p15, 1, r1, c0, c0, 0		/* reads current Cache Size ID register */
	and	r2, r1, #7			/* extract the line length field */
	add	r2, r2, #4			/* add 4 for the line length offset (log2 16 bytes) */
	ldr	r4, =0x3ff
	ands	r4, r4, r1, lsr #3		/* r4 is the max number on the way size (right aligned) */
	clz	r5, r4				/* r5 is the bit position of the way size increment */
	ldr	r7, =0x7fff
	ands	r7, r7, r1, lsr #13		/* r7 is the max number of the index size (right aligned) */
loop2:
	mov	r9, r4				/* r9 working copy of the max way size (right aligned) */
loop3:
	orr	r11, r10, r9, lsl r5		/* factor in the way number and cache number into r11 */
	orr	r11, r11, r7, lsl r2		/* factor in the index number */
	mcr	p15, 0, r11, c7, c6, 2		/* invalidate by set/way */
	subs	r9, r9, #1			/* decrement the way number */
	bge	loop3
	subs	r7, r7, #1			/* decrement the index */
	bge	loop2
skip:
	add	r10, r10, #2			/* increment the cache number */
	cmp	r3, r10
	bgt	loop1

finished:
	mov	r10, #0				/* switch back to cache level 0 */
	mcr	p15, 2, r10, c0, c0, 0		/* select current cache level in cssr */
	dsb
	isb

	bx	lr

.end
/**
* @} End of "addtogroup a53_32_boot_code".
*/