/**
 * Copyright (c) 2021-2022 Huawei Device Co., Ltd.
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 * http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */

#include "arch/asm_support.h"
#include "arch/aarch64/helpers_aarch64.S"

// extern "C" uint64_t InvokeBuiltinHandleException(JSThread *thread, JSTaggedValue retval)
.extern InvokeBuiltinHandleException

// CompiledCodeToBuiltinBridge, follows DynamicMethod calling convention
.global CompiledCodeToBuiltinBridge
.type CompiledCodeToBuiltinBridge, %function
CompiledCodeToBuiltinBridge:
    CFI_STARTPROC
    CFI_DEF_CFA(sp, 0)

    // setup frame
    stp fp, lr, [sp, #-16]!
    CFI_ADJUST_CFA_OFFSET(2 * 8)
    CFI_REL_OFFSET(lr, 8)
    CFI_REL_OFFSET(fp, 0)
    mov fp, sp
    CFI_DEF_CFA_REGISTER(fp)
    mov x9, #CFRAME_KIND_NATIVE
    stp x9, x0, [sp, #-16]!
    str fp, [THREAD_REG, #MANAGED_THREAD_FRAME_OFFSET]
    mov w9, #1
    strb w9, [THREAD_REG, #MANAGED_THREAD_FRAME_KIND_OFFSET]
    // sp must be 16 bytes aligned

    // Skip locals
    sub sp, sp, #(CFRAME_LOCALS_COUNT * 8)

    // save all the callee saved registers to the stack
    // stack walker will read them during stack unwinding
    PUSH_CALLEE_REGS sp
    CFI_REL_OFFSET(THREAD_REG, -((CFRAME_CALLEE_REGS_START_SLOT + 0) * 8))
    CFI_REL_OFFSET(x27, -((CFRAME_CALLEE_REGS_START_SLOT + 1) * 8))
    CFI_REL_OFFSET(x26, -((CFRAME_CALLEE_REGS_START_SLOT + 2) * 8))
    CFI_REL_OFFSET(x25, -((CFRAME_CALLEE_REGS_START_SLOT + 3) * 8))
    CFI_REL_OFFSET(x24, -((CFRAME_CALLEE_REGS_START_SLOT + 4) * 8))
    CFI_REL_OFFSET(x23, -((CFRAME_CALLEE_REGS_START_SLOT + 5) * 8))
    CFI_REL_OFFSET(x22, -((CFRAME_CALLEE_REGS_START_SLOT + 6) * 8))
    CFI_REL_OFFSET(x21, -((CFRAME_CALLEE_REGS_START_SLOT + 7) * 8))
    CFI_REL_OFFSET(x20, -((CFRAME_CALLEE_REGS_START_SLOT + 8) * 8))
    CFI_REL_OFFSET(x19, -((CFRAME_CALLEE_REGS_START_SLOT + 9) * 8))
    CFI_REL_OFFSET(d15, -((CFRAME_CALLEE_REGS_START_SLOT + 10) * 8))
    CFI_REL_OFFSET(d14, -((CFRAME_CALLEE_REGS_START_SLOT + 11) * 8))
    CFI_REL_OFFSET(d13, -((CFRAME_CALLEE_REGS_START_SLOT + 12) * 8))
    CFI_REL_OFFSET(d12, -((CFRAME_CALLEE_REGS_START_SLOT + 13) * 8))
    CFI_REL_OFFSET(d11, -((CFRAME_CALLEE_REGS_START_SLOT + 14) * 8))
    CFI_REL_OFFSET(d10, -((CFRAME_CALLEE_REGS_START_SLOT + 15) * 8))
    CFI_REL_OFFSET(d9, -((CFRAME_CALLEE_REGS_START_SLOT + 16) * 8))
    CFI_REL_OFFSET(d8, -((CFRAME_CALLEE_REGS_START_SLOT + 17) * 8))

    // save arguments to the stack
    sub sp, sp, 6 * 8 // 6 unused caller gp
    stp x0, x1, [sp, #-16]!

    add x9, fp, #16 // pointer to stackArgs

    sub sp, sp, 4 * 8
    str THREAD_REG, [sp, 0 * 8]
    str x1,         [sp, 1 * 8]
    str x9,         [sp, 2 * 8]

    ldr lr, [x0, #METHOD_NATIVE_POINTER_OFFSET]
    mov x0, sp
    blr lr

    // TODO(vpukhov): check ret instead
    ldr x9, [THREAD_REG, #MANAGED_THREAD_EXCEPTION_OFFSET]
    cbnz x9, .Lhandle_exception
1:

    add sp, sp, 4 * 8 + 8 * 8 // skip call_info, skip caller gp
    POP_CALLEE_REGS sp
    CFI_RESTORE(THREAD_REG)
    CFI_RESTORE(x27)
    CFI_RESTORE(x26)
    CFI_RESTORE(x25)
    CFI_RESTORE(x24)
    CFI_RESTORE(x23)
    CFI_RESTORE(x22)
    CFI_RESTORE(x21)
    CFI_RESTORE(x20)
    CFI_RESTORE(x19)
    CFI_RESTORE(d15)
    CFI_RESTORE(d14)
    CFI_RESTORE(d13)
    CFI_RESTORE(d12)
    CFI_RESTORE(d11)
    CFI_RESTORE(d10)
    CFI_RESTORE(d9)
    CFI_RESTORE(d8)

    mov sp, fp
    ldp fp, lr, [sp], #16
    CFI_RESTORE(lr)
    CFI_RESTORE(fp)
    CFI_DEF_CFA(sp, 0)
    ret
    CFI_ENDPROC

.Lhandle_exception:
    mov x1, x0
    mov x0, THREAD_REG
    bl InvokeBuiltinHandleException
    b 1b
