/** Copyright 2019-2022 Haiku, Inc. All Rights Reserved.* Distributed under the terms of the MIT License.*/#include <arch/arm/arch_cpu.h>#include <asm_defs.h>#include "asm_offsets.h"#include "syscall_numbers.h".textFUNCTION(_thread_exit_syscall):svc #((SYSCALL_EXIT_THREAD << 5) | 1)FUNCTION_END(_thread_exit_syscall).macro xchg_sp xtadd sp, sp, \xtsub \xt, sp, \xtsub sp, sp, \xt.endm.macro EXCEPTION_ENTRY el// interrupts are automatically disabled by hardware// avoid using sp in case it is misaligned// swap sp with x19 and use it insteadxchg_sp x19// x19 is now the stack top, make room for IFRAMEsub x19, x19, #(IFRAME_sizeof)stp x0, x1, [x19, #(IFRAME_x + 0 * 8)]stp x2, x3, [x19, #(IFRAME_x + 2 * 8)]stp x4, x5, [x19, #(IFRAME_x + 4 * 8)]stp x6, x7, [x19, #(IFRAME_x + 6 * 8)]stp x8, x9, [x19, #(IFRAME_x + 8 * 8)]stp x10, x11, [x19, #(IFRAME_x + 10 * 8)]stp x12, x13, [x19, #(IFRAME_x + 12 * 8)]stp x14, x15, [x19, #(IFRAME_x + 14 * 8)]stp x16, x17, [x19, #(IFRAME_x + 16 * 8)]mov x0, sp // original x19 that we swapped with spstp x18, x0, [x19, #(IFRAME_x + 18 * 8)]stp x20, x21, [x19, #(IFRAME_x + 20 * 8)]stp x22, x23, [x19, #(IFRAME_x + 22 * 8)]stp x24, x25, [x19, #(IFRAME_x + 24 * 8)]stp x26, x27, [x19, #(IFRAME_x + 26 * 8)]stp x28, fp, [x19, #(IFRAME_x + 28 * 8)]str x30, [x19, #(IFRAME_lr)].if \el == 0mrs x0, SP_EL0.else// add sizeof back here to store original spadd x0, x19, #(IFRAME_sizeof).endifmrs x1, ELR_EL1mrs x2, SPSR_EL1mrs x3, ESR_EL1mrs x4, FAR_EL1str x0, [x19, #(IFRAME_sp)]str x1, [x19, #(IFRAME_elr)]str x2, [x19, #(IFRAME_spsr)]str x3, [x19, #(IFRAME_esr)]str x4, [x19, #(IFRAME_far)].endm.macro EXCEPTION_RETURN el// x19 is callee-saved so it still points to IFRAME// x0, x1, x18, x19 will be restored at the very endldr x0, [x19, #(IFRAME_elr)]ldr x1, [x19, #(IFRAME_spsr)]ldr x18, [x19, #(IFRAME_sp)]// x0 and x1 will be restored laterldp x2, x3, [x19, #(IFRAME_x + 2 * 8)]ldp x4, x5, [x19, #(IFRAME_x + 4 * 8)]ldp x6, x7, [x19, #(IFRAME_x + 6 * 8)]ldp x8, x9, [x19, #(IFRAME_x + 8 * 8)]ldp x10, x11, [x19, #(IFRAME_x + 10 * 8)]ldp x12, x13, [x19, #(IFRAME_x + 12 * 8)]ldp x14, x15, [x19, #(IFRAME_x + 14 * 8)]ldp x16, x17, [x19, #(IFRAME_x + 16 * 8)]// x18 and x19 will be restored laterldp x20, x21, [x19, #(IFRAME_x + 20 * 8)]ldp x22, x23, [x19, #(IFRAME_x + 22 * 8)]ldp x24, x25, [x19, #(IFRAME_x + 24 * 8)]ldp x26, x27, [x19, #(IFRAME_x + 26 * 8)]ldp x28, fp, [x19, #(IFRAME_x + 28 * 8)]ldr x30, [x19, #(IFRAME_lr)]// disable interrupts before restoring ELR/SPSR/spmsr DAIFSet, #0xfmsr ELR_EL1, x0msr SPSR_EL1, x1.if \el == 0// load stack pointer for EL0 from IFRAMEmsr SP_EL0, x18// unwind our own stack pointeradd sp, x19, #(IFRAME_sizeof).else// we stored original pointer to IFRAME, no need to unwind again theremov sp, x18.endif// finally restore remaining registersldp x0, x1, [x19, #(IFRAME_x + 0 * 8)]ldp x18, x19, [x19, #(IFRAME_x + 18 * 8)]eret.endm.macro EXCEPTION_HANDLER el name funcSTATIC_FUNCTION(handle_\name):EXCEPTION_ENTRY \el// prepare aligned sp for C functionand sp, x19, #0xfffffffffffffff0// call C handler, passing IFRAME in x0// handler can enable interrupts if it wants tomov x0, x19mov x29, x0bl \funcEXCEPTION_RETURN \elFUNCTION_END(handle_\name).endm.macro vector name.align 7b handle_\name.endm.macro vempty.align 7brk 0xfff1: b 1b.endm.align 11.globl _exception_vectors_exception_vectors:vempty /* Synchronous EL1t */vempty /* IRQ EL1t */vempty /* FIQ EL1t */vempty /* Error EL1t */vector el1h_sync /* Synchronous EL1h */vector el1h_irq /* IRQ EL1h */vector el1h_fiq /* FIQ EL1h */vector el1h_error /* Error EL1h */vector el0_sync /* Synchronous 64-bit EL0 */vector el0_irq /* IRQ 64-bit EL0 */vector el0_fiq /* FIQ 64-bit EL0 */vector el0_error /* Error 64-bit EL0 */vempty /* Synchronous 32-bit EL0 */vempty /* IRQ 32-bit EL0 */vempty /* FIQ 32-bit EL0 */vempty /* Error 32-bit EL0 */EXCEPTION_HANDLER 1 el1h_sync do_sync_handlerEXCEPTION_HANDLER 1 el1h_irq do_irq_handlerEXCEPTION_HANDLER 1 el1h_fiq do_fiq_handlerEXCEPTION_HANDLER 1 el1h_error do_error_handlerEXCEPTION_HANDLER 0 el0_sync do_sync_handlerEXCEPTION_HANDLER 0 el0_irq do_irq_handlerEXCEPTION_HANDLER 0 el0_fiq do_fiq_handlerEXCEPTION_HANDLER 0 el0_error do_error_handlerFUNCTION(_eret_with_iframe):mov x20, xzrmov x21, xzrmov x22, xzrmov x23, xzrmov x24, xzrmov x25, xzrmov x26, xzrmov x27, xzrmov x28, xzrmov x29, xzrmov x19, x0EXCEPTION_RETURN 0FUNCTION_END(_eret_with_iframe)FUNCTION(_fp_save):stp q0, q1, [x0], #32stp q2, q3, [x0], #32stp q4, q5, [x0], #32stp q6, q7, [x0], #32stp q8, q9, [x0], #32stp q10, q11, [x0], #32stp q12, q13, [x0], #32stp q14, q15, [x0], #32stp q16, q17, [x0], #32stp q18, q19, [x0], #32stp q20, q21, [x0], #32stp q22, q23, [x0], #32stp q24, q25, [x0], #32stp q26, q27, [x0], #32stp q28, q29, [x0], #32stp q30, q31, [x0], #32mrs x1, FPSRmrs x2, FPCRstr x1, [x0], #8str x2, [x0], #8// reset FPCR and FPSR to prevent userspace state affecting kernelmsr FPSR, xzrcmp x2, xzrbeq 1fmsr FPCR, xzr1:retFUNCTION_END(_fp_save)FUNCTION(_fp_restore):ldp q0, q1, [x0], #32ldp q2, q3, [x0], #32ldp q4, q5, [x0], #32ldp q6, q7, [x0], #32ldp q8, q9, [x0], #32ldp q10, q11, [x0], #32ldp q12, q13, [x0], #32ldp q14, q15, [x0], #32ldp q16, q17, [x0], #32ldp q18, q19, [x0], #32ldp q20, q21, [x0], #32ldp q22, q23, [x0], #32ldp q24, q25, [x0], #32ldp q26, q27, [x0], #32ldp q28, q29, [x0], #32ldp q30, q31, [x0], #32ldr x1, [x0], #8msr FPSR, x1// avoid restoring FPCR if it hasn't changedldr x2, [x0], #8mrs x3, FPCRcmp x3, x2beq 1fmsr FPCR, x21:retFUNCTION_END(_fp_restore)FUNCTION(_arch_context_swap):// savestp x19, x20, [x0], #16stp x21, x22, [x0], #16stp x23, x24, [x0], #16stp x25, x26, [x0], #16stp x27, x28, [x0], #16stp x29, x30, [x0], #16mov x2, spmrs x3, TPIDR_EL0stp x2, x3, [x0], #16stp d8, d9, [x0], #16stp d10, d11, [x0], #16stp d12, d13, [x0], #16stp d14, d15, [x0], #16// restoreldp x19, x20, [x1], #16ldp x21, x22, [x1], #16ldp x23, x24, [x1], #16ldp x25, x26, [x1], #16ldp x27, x28, [x1], #16ldp x29, x30, [x1], #16ldp x2, x3, [x1], #16mov sp, x2msr TPIDR_EL0, x3ldp d8, d9, [x1], #16ldp d10, d11, [x1], #16ldp d12, d13, [x1], #16ldp d14, d15, [x1], #16// pass x29 as argument to thread entry functionmov x0, x29retFUNCTION_END(_arch_context_swap)/*! \fn void arch_debug_call_with_fault_handler(cpu_ent* cpu,jmp_buf jumpBuffer, void (*function)(void*), void* parameter)Called by debug_call_with_fault_handler() to do the dirty work of settingthe fault handler and calling the function. If the function causes a pagefault, the arch_debug_call_with_fault_handler() calls longjmp() with thegiven \a jumpBuffer. Otherwise it returns normally.debug_call_with_fault_handler() has already saved the CPU's fault_handlerand fault_handler_stack_pointer and will reset them later, soarch_debug_call_with_fault_handler() doesn't need to care about it.\param cpu The \c cpu_ent for the current CPU.\param jumpBuffer Buffer to be used for longjmp().\param function The function to be called.\param parameter The parameter to be passed to the function to be called.*/FUNCTION(arch_debug_call_with_fault_handler):adrp x4, faultadd x4, x4, :lo12:faultstr x4, [x0, #CPU_ENT_fault_handler]str x1, [x0, #CPU_ENT_fault_handler_stack_pointer]mov x0, x3br x2fault:mov x0, spmov x1, #1b longjmpFUNCTION_END(arch_debug_call_with_fault_handler)/* addr_t arm64_get_fp(void) */FUNCTION(arm64_get_fp):mov x0, x29retFUNCTION_END(arm64_get_fp)