/*
 * MIT License
 *
 * Copyright (c) 2018, Sergey Matyukevich
 *           (c) 2020, Santiago Pagani <santiagopagani@gmail.com>
 *
 * Permission is hereby granted, free of charge, to any person
 * obtaining a copy of this software and associated documentation
 * files (the "Software"), to deal in the Software without
 * restriction, including without limitation the rights to use, copy,
 * modify, merge, publish, distribute, sublicense, and/or sell copies
 * of the Software, and to permit persons to whom the Software is
 * furnished to do so, subject to the following conditions:
 *
 * The above copyright notice and this permission notice shall be
 * included in all copies or substantial portions of the Software.
 *
 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
 * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
 * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
 * HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
 * WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
 * DEALINGS IN THE SOFTWARE.
 *
 */

#include <uk/arch/lcpu.h>
#include <uk/asm.h>
#include <uk/arch/types.h>
#include <uk/plat/offset.h>
#include <uk/syscall.h>
#include <uk/plat/config.h>

.macro EXCHANGE_SP_WITH_X0
	add sp, sp, x0	// new_sp = sp + x0
	sub x0, sp, x0	// new_x0 = new_sp - x0 = sp + x0 - x0 = sp
	sub sp, sp, x0	// new_sp = new_sp - new_x0 = sp + x0 - sp = x0
.endm

.macro SAVE_REGS

	/* Save general purpose registers */
	stp	x0, x1, [sp, #16 * 0]
	stp	x2, x3, [sp, #16 * 1]
	stp	x4, x5, [sp, #16 * 2]
	stp	x6, x7, [sp, #16 * 3]
	stp	x8, x9, [sp, #16 * 4]
	stp	x10, x11, [sp, #16 * 5]
	stp	x12, x13, [sp, #16 * 6]
	stp	x14, x15, [sp, #16 * 7]
	stp	x16, x17, [sp, #16 * 8]
	stp	x18, x19, [sp, #16 * 9]
	stp	x20, x21, [sp, #16 * 10]
	stp	x22, x23, [sp, #16 * 11]
	stp	x24, x25, [sp, #16 * 12]
	stp	x26, x27, [sp, #16 * 13]
	stp	x28, x29, [sp, #16 * 14]

	/* Save LR and exception PC */
	mrs	x21, elr_el1
	stp	x30, x21, [sp, #16 * 15]

	/* Save pstate and exception status register */
	mrs	x22, spsr_el1
	mrs	x23, esr_el1
	stp	x22, x23, [sp, #16 * 16]
.endm

.macro RESTORE_REGS
	/* Mask IRQ to make sure restore would not be interrupted by IRQ */
	msr	daifset, #(8 | 4 | 2 | 1)

	/* Restore pstate and exception status register */
	ldp x22, x23, [sp, #16 * 16]
	msr spsr_el1, x22
	msr esr_el1, x23

	/* Restore LR and exception PC */
	ldp x30, x21, [sp, #16 * 15]
	msr elr_el1, x21

	/* Restore general purpose registers */
	ldp x28, x29, [sp, #16 * 14]
	ldp x26, x27, [sp, #16 * 13]
	ldp x24, x25, [sp, #16 * 12]
	ldp x22, x23, [sp, #16 * 11]
	ldp x20, x21, [sp, #16 * 10]
	/* Skip x18, x19 */
	ldp x16, x17, [sp, #16 * 8]
	ldp x14, x15, [sp, #16 * 7]
	ldp x12, x13, [sp, #16 * 6]
	ldp x10, x11, [sp, #16 * 5]
	ldp x8, x9, [sp, #16 * 4]
	ldp x6, x7, [sp, #16 * 3]
	ldp x4, x5, [sp, #16 * 2]
	ldp x2, x3, [sp, #16 * 1]
	ldp x0, x1, [sp, #16 * 0]

	/* Restore stack pointer for exception from EL1 */
	ldr x18, [sp, #__SP_OFFSET]
	mov x19, sp
	mov sp, x18

	/* Restore x18, x19 */
	ldp x18, x19, [x19, #16 * 9]
	eret

.endm
/*  Independent stack area:
 *  CPU_EXCEPT_STACK_SIZE  CPU_EXCEPT_STACK_SIZE
 * <---------------------><--------------------->
 * |============================================|
 * |                     |                      |
 * |       trap stack    |        IRQ stack     |
 * |                     |                      |
 * |=============================================
 */
.macro SAVE_IRQ_CONTEXT
	/* Use TPIDRRO_EL0 as scratch register. It is fine to do so because
	 * it will always hold a value the application can't modify and we will
	 * always be able to restore it to its desired known value anytime we
	 * want. Thus, temporarily store x0.
	 */
	msr     tpidrro_el0, x0
	mov	x0, sp
	sub	sp, sp, #__TRAP_STACK_SIZE
	str     x0, [sp, #__SP_OFFSET]
	mrs	x0, tpidrro_el0
	SAVE_REGS

	/*
	 * Maintain interrupte nesting level.
	 */
	mrs	x0, tpidr_el1
	ldr	x1, [x0, #LCPU_INTERRUPT_NESTED_OFFSET]
	add	x2, x1, #1
	str	x2, [x0, #LCPU_INTERRUPT_NESTED_OFFSET]
	/*
	 * If there is interrupt nesting, no need to switch sp,
	 * the current sp is the interrupt stack.
	 */
	cbnz	x1, done
	/*
	 * If no interrupt nesting, switch sp to the interrupt .
	 */
	mrs	x0, tpidr_el1
	ldr	x0, [x0, #LCPU_INTERRUPT_SP_OFFSET]
	EXCHANGE_SP_WITH_X0
	add     sp, sp, #CPU_EXCEPT_STACK_SIZE
	/* Store old sp on stack */
	str	x0, [sp, #-16]!
done:
.endm

.macro SAVE_SYNC_CONTEXT
	msr     tpidrro_el0, x0

	/* Is this trap a system call? */
	mrs	x0, esr_el1
	and	x0, x0, #ESR_EC_MASK
	orr	x0, xzr, x0, lsr #ESR_EC_SHIFT
	cmp	x0, #ESR_EL1_EC_SVC64
	bne	2f

	mrs     x0, tpidr_el1
	ldr	x0, [x0, #LCPU_EXCEPTION_SP_OFFSET]
	/* NOTE: We should normally align the stack before doing this
	 * subtraction because we must ensure that the `ectx` field
	 * is aligned to the corresponding ECTX alignment.
	 */
	sub	x0, x0, #UK_SYSCALL_CTX_SIZE
	EXCHANGE_SP_WITH_X0
	/* Store old SP in exception stack */
	str	x0, [sp, #__SP_OFFSET]
	b	3f
2:
	mrs     x0, tpidr_el1
	ldr	x0, [x0, #LCPU_EXCEPTION_SP_OFFSET]
	EXCHANGE_SP_WITH_X0
	/* Store old SP in exception stack */
	str     x0, [sp, #-16]
	sub	sp, sp, #__TRAP_STACK_SIZE
3:
	/* Restore x0 */
	mrs     x0, tpidrro_el0
	SAVE_REGS
.endm

.macro RESTORE_IRQ_CONTEXT
	/*
 	 * if interrupte nesting level reduced to zero，sp should switch from
	 * irq stack to thread stack.
 	 */
	mrs	x0, tpidr_el1
	ldr	x1, [x0, #LCPU_INTERRUPT_NESTED_OFFSET]
	sub	x1, x1, #1
	str	x1, [x0, #LCPU_INTERRUPT_NESTED_OFFSET]
	cbnz	x1, restore

	ldr	x0, [sp]
	mov	sp, x0
restore:
	RESTORE_REGS

.endm

.macro RESTORE_SYNC_CONTEXT
	RESTORE_REGS
.endm

/* Bad Abort numbers */
#define BAD_SYNC  0
#define BAD_IRQ   1
#define BAD_FIQ   2
#define BAD_ERROR 3

/**
 * TODO:
 * Here we use SAVE_SYNC_CONTEXT for forward compatibility.
 * Change this behavior when we determined which stack to use for invalid exception.
 */
#define el_invalid(name, reason, el)	\
.align 6;				\
name##_invalid:				\
	SAVE_SYNC_CONTEXT;		\
	mov x0, sp;			\
	mov x1, el;			\
	mov x2, #(reason);		\
	mrs x3, far_el1;		\
	b   invalid_trap_handler;	\
ENDPROC(name##_invalid);		\

el_invalid(el1_sync, BAD_SYNC, 1);
el_invalid(el0_sync, BAD_SYNC, 0);
el_invalid(el1_irq, BAD_IRQ, 1);
el_invalid(el0_irq, BAD_IRQ, 0);
el_invalid(el1_fiq, BAD_FIQ, 1);
el_invalid(el0_fiq, BAD_FIQ, 0);
el_invalid(el1_error, BAD_ERROR, 1);
el_invalid(el0_error, BAD_ERROR, 0);

/*
 * Macro for Exception vectors.
 */
.macro	ventry	label
.align	7
b	\label
.endm

/*
 * Exception vectors.
 */
.align	11
.globl vectors_el1
vectors_el1:
	ventry el1_sync_invalid			/* Synchronous EL1t       */
	ventry el1_irq_invalid			/* IRQ EL1t               */
	ventry el1_fiq_invalid			/* FIQ EL1t               */
	ventry el1_error_invalid		/* Error EL1t             */

	ventry el1_sync				/* Synchronous EL1h       */
	ventry el1_irq				/* IRQ EL1h               */
	ventry el1_fiq_invalid			/* FIQ EL1h               */
	ventry el1_error_invalid		/* Error EL1h             */

	ventry el0_sync_invalid			/* Synchronous 64-bit EL0 */
	ventry el0_irq_invalid			/* IRQ 64-bit EL0         */
	ventry el0_fiq_invalid			/* FIQ 64-bit EL0         */
	ventry el0_error_invalid		/* Error 64-bit EL0       */

	ventry el0_sync_invalid			/* Synchronous 32-bit EL0 */
	ventry el0_irq_invalid			/* IRQ 32-bit EL0         */
	ventry el0_fiq_invalid			/* FIQ 32-bit EL0         */
	ventry el0_error_invalid		/* Error 32-bit EL0       */

.align 6
el1_sync:
	SAVE_SYNC_CONTEXT
	mov	x0, sp
	mrs	x1, FAR_EL1
	bl	trap_el1_sync
	RESTORE_SYNC_CONTEXT

.align 6
el1_irq:
	SAVE_IRQ_CONTEXT
	msr 	daifclr, #(8 | 4 | 1)
	ldr	x0, [sp]
	bl	trap_el1_irq
	RESTORE_IRQ_CONTEXT
