#include <arch.h>
#include <asm_macros.h>
#include <plat.h>

 .global plat_get_core_pos
 .global plat_set_stack
 .global plat_handle_interrupt
 .global plat_handle_exception
 .global bzero


/*
 * int plat_get_core_pos(uint32_t mpidr);
 * Clobber: r0
 */
func plat_get_core_pos
    and r1, r0, #MPIDR_CPU_MASK
    and r0, r0, #MPIDR_CLUSTER_MASK
    add r0, r1, r0, LSR #6
    bx  lr
endfunc plat_get_core_pos

/*
 * void plat_set_stack(void);
 * For the current CPU, this function sets the stack
 * pointer to a stack allocated in normal memory.
 */
func plat_set_stack
    mov r4, lr

    /* r0 holds CPU number */
    mov r6, r0

    msr CPSR_c, #(MODE32_fiq | I_BIT | F_BIT)
    get_my_mp_stack fiq_stack_top PLAT_STACK_SIZE
    mov sp, r0

    mov r0, r6
    msr CPSR_c, #(MODE32_irq | I_BIT | F_BIT)
    get_my_mp_stack irq_stack_top PLAT_STACK_SIZE
    mov sp, r0

    mov r0, r6
    msr CPSR_c, #(MODE32_abt | I_BIT | F_BIT)
    get_my_mp_stack abt_stack_top PLAT_STACK_SIZE
    mov sp, r0

    mov r0, r6
    msr CPSR_c, #(MODE32_und | I_BIT | F_BIT)
    get_my_mp_stack und_stack_top PLAT_STACK_SIZE
    mov sp, r0

    mov r0, r6
    msr CPSR_c, #(MODE32_svc | I_BIT | F_BIT)
    get_my_mp_stack svc_stack_top PLAT_STACK_SIZE
    mov sp, r0

    mov r0, r6
    msr CPSR_c, #(MODE32_svc | I_BIT | F_BIT)
    get_my_mp_stack svc_stack_top PLAT_STACK_SIZE
    mov sp, r0

    bx  r4
endfunc plat_set_stack

/*
 * void plat_handle_interrupt(void);
 * platform specific interrup handler
 */
func plat_handle_interrupt
    cpsid   i   /* Disable interrupts to prevent nesting */

    push {lr}
    bl gic_handle
    pop {lr}

    cpsie   i   /* Re-enable interrupts */

    bx lr
endfunc plat_handle_interrupt

/*
 * void plat_handle_exception(void);
 * platform specific exception handler
 */
func plat_handle_exception
    b   .
endfunc plat_handle_exception

/* -----------------------------------------------------------------------
 * void bzero(void *mem, unsigned int length);
 *
 * Initialise a memory region to 0.
 * -----------------------------------------------------------------------
 */
func bzero
	/*
	 * Readable names for registers
	 *
	 * Registers r0, r1 and r2 are also set by zeromem which
	 * branches into the fallback path directly, so cursor, length and
	 * stop_address should not be retargeted to other registers.
	 */
	cursor       .req r0 /* Start address and then current address */
	length       .req r1 /* Length in bytes of the region to zero out */
	/*
	 * Reusing the r1 register as length is only used at the beginning of
	 * the function.
	 */
	stop_address .req r1  /* Address past the last zeroed byte */
	zeroreg1     .req r2  /* Source register filled with 0 */
	zeroreg2     .req r3  /* Source register filled with 0 */
	tmp	     .req r12 /* Temporary scratch register */

	mov	zeroreg1, #0

	/* stop_address is the address past the last to zero */
	add	stop_address, cursor, length

	/*
	 * Length cannot be used anymore as it shares the same register with
	 * stop_address.
	 */
	.unreq	length

	/*
	 * If the start address is already aligned to 8 bytes, skip this loop.
	 */
	tst	cursor, #(8-1)
	beq	.Lzeromem_8bytes_aligned

	/* Calculate the next address aligned to 8 bytes */
	orr	tmp, cursor, #(8-1)
	adds	tmp, tmp, #1
	/* If it overflows, fallback to byte per byte zeroing */
	beq	.Lzeromem_1byte_aligned
	/* If the next aligned address is after the stop address, fall back */
	cmp	tmp, stop_address
	bhs	.Lzeromem_1byte_aligned

	/* zero byte per byte */
1:
	strb	zeroreg1, [cursor], #1
	cmp	cursor, tmp
	bne	1b

	/* zero 8 bytes at a time */
.Lzeromem_8bytes_aligned:

	/* Calculate the last 8 bytes aligned address. */
	bic	tmp, stop_address, #(8-1)

	cmp	cursor, tmp
	bhs	2f

	mov	zeroreg2, #0
1:
	stmia	cursor!, {zeroreg1, zeroreg2}
	cmp	cursor, tmp
	blo	1b
2:

	/* zero byte per byte */
.Lzeromem_1byte_aligned:
	cmp	cursor, stop_address
	beq	2f
1:
	strb	zeroreg1, [cursor], #1
	cmp	cursor, stop_address
	bne	1b
2:
	bx	lr

	.unreq	cursor
	/*
	 * length is already unreq'ed to reuse the register for another
	 * variable.
	 */
	.unreq	stop_address
	.unreq	zeroreg1
	.unreq	zeroreg2
	.unreq	tmp
endfunc bzero


/*
 * rom stack, core0 only
 */
#define STACK_ALIGN	6
.if ((PLAT_STACK_SIZE & ((1 << STACK_ALIGN) - 1)) <> 0)
  .error "Stack size not correctly aligned"
.endif
.section .stack, "aw", %nobits
.align STACK_ALIGN
fiq_stack:
	.space (PLAT_STACK_SIZE*PLAT_CORE_COUNT - 4), 0
fiq_stack_top:
	.space 8, 0
irq_stack:
	.space (PLAT_STACK_SIZE*PLAT_CORE_COUNT - 4), 0
irq_stack_top:
	.space 8, 0
abt_stack:
	.space (PLAT_STACK_SIZE*PLAT_CORE_COUNT - 4), 0
abt_stack_top:
	.space 8, 0
und_stack:
	.space (PLAT_STACK_SIZE*PLAT_CORE_COUNT - 4), 0
und_stack_top:
	.space 8, 0
svc_stack:
	.space (PLAT_STACK_SIZE*PLAT_CORE_COUNT - 4), 0
svc_stack_top:
	.space 8, 0
sys_stack:
	.space (PLAT_STACK_SIZE*PLAT_CORE_COUNT - 4), 0
sys_stack_top:
	.space 8, 0

