Back to home page

LXR

 
 

    


File indexing completed on 2025-05-11 08:24:23

0001 /* SPDX-License-Identifier: BSD-2-Clause */
0002 
0003 /**
0004  * @file
0005  *
0006  * @ingroup RTEMSScoreCPUAArch64
0007  *
0008  * @brief This source file contains the implementation of
0009  *   _CPU_Exception_dispatch_and_resume() and _CPU_Exception_resume().
0010  */
0011 
0012 /*
0013  * Copyright (C) 2020 On-Line Applications Research Corporation (OAR)
0014  * Written by Kinsey Moore <kinsey.moore@oarcorp.com>
0015  *
0016  * Redistribution and use in source and binary forms, with or without
0017  * modification, are permitted provided that the following conditions
0018  * are met:
0019  * 1. Redistributions of source code must retain the above copyright
0020  *    notice, this list of conditions and the following disclaimer.
0021  * 2. Redistributions in binary form must reproduce the above copyright
0022  *    notice, this list of conditions and the following disclaimer in the
0023  *    documentation and/or other materials provided with the distribution.
0024  *
0025  * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
0026  * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
0027  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
0028  * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
0029  * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
0030  * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
0031  * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
0032  * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
0033  * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
0034  * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
0035  * POSSIBILITY OF SUCH DAMAGE.
0036  */
0037 
0038 #ifdef HAVE_CONFIG_H
0039 #include "config.h"
0040 #endif
0041 
0042 #include <rtems/asm.h>
0043 
0044 .globl  _CPU_Exception_dispatch_and_resume
0045 .globl  _CPU_Exception_resume
0046 
0047 #ifdef AARCH64_MULTILIB_ARCH_V8_ILP32
0048   #ifdef RTEMS_SMP
0049     #define SELF_CPU_CONTROL_GET_REG x19
0050   #else
0051     #define SELF_CPU_CONTROL_GET_REG w19
0052   #endif
0053 #else
0054   #define SELF_CPU_CONTROL_GET_REG x19
0055 #endif
0056 #define SELF_CPU_CONTROL x19
0057 
0058 /*
0059  * This function is expected to resume execution using the CPU_Exception_frame
0060  * provided in x0. This function  does not adhere to the AAPCS64 calling
0061  * convention because all necessary state is contained within the exception
0062  * frame.
0063  */
0064 _CPU_Exception_resume:
0065 /* Reset stack pointer */
0066     mov sp, x0
0067 
0068 /* call CEF restore routine (doesn't restore lr) */
0069     bl .pop_exception_context
0070 
0071 /* get lr from CEF */
0072     ldr lr, [sp, #AARCH64_EXCEPTION_FRAME_REGISTER_LR_OFFSET]
0073 
0074 /* drop space reserved for CEF */
0075     add sp, sp, #AARCH64_EXCEPTION_FRAME_SIZE
0076 
0077 /* switch to thread stack */
0078     msr spsel, #1
0079     eret
0080 
0081 /*
0082  * This function is expected to undo dispatch disabling, perform dispatch, and
0083  * resume execution using the CPU_Exception_frame provided in x0. This function
0084  * does not adhere to the AAPCS64 calling convention because all necessary
0085  * state is contained within the exception frame.
0086  */
0087 _CPU_Exception_dispatch_and_resume:
0088 /* Get per-CPU control of current processor */
0089     GET_SELF_CPU_CONTROL    SELF_CPU_CONTROL_GET_REG
0090 
0091 /* Reset stack pointer */
0092     mov sp, x0
0093 
0094 /* Check dispatch disable and perform dispatch if necessary */
0095 /* Load some per-CPU variables */
0096     ldr w0, [SELF_CPU_CONTROL, #PER_CPU_THREAD_DISPATCH_DISABLE_LEVEL]
0097     ldrb    w1, [SELF_CPU_CONTROL, #PER_CPU_DISPATCH_NEEDED]
0098     ldr w2, [SELF_CPU_CONTROL, #PER_CPU_ISR_DISPATCH_DISABLE]
0099     ldr w3, [SELF_CPU_CONTROL, #PER_CPU_ISR_NEST_LEVEL]
0100 
0101 /* Decrement levels and determine thread dispatch state */
0102     eor w1, w1, w0
0103     sub w0, w0, #1
0104     orr w1, w1, w0
0105     orr w1, w1, w2
0106     sub w3, w3, #1
0107 
0108 /* Store thread dispatch disable and ISR nest levels */
0109     str w0, [SELF_CPU_CONTROL, #PER_CPU_THREAD_DISPATCH_DISABLE_LEVEL]
0110     str w3, [SELF_CPU_CONTROL, #PER_CPU_ISR_NEST_LEVEL]
0111 
0112 /* store should_skip_thread_dispatch in x22 */
0113     mov x22, x1
0114 
0115 /*
0116  * It is now safe to assume that the source of the exception has been resolved.
0117  * Copy the exception frame to the thread stack to be compatible with thread
0118  * dispatch. This may arbitrarily clobber corruptible registers since all
0119  * important state is contained in the exception frame.
0120  *
0121  * No need to save current LR since this will never return to the caller.
0122  */
0123     bl .move_exception_frame_and_switch_to_thread_stack
0124 
0125 /*
0126  * Check thread dispatch necessary, ISR dispatch disable and thread dispatch
0127  * disable level.
0128  */
0129     cmp     x22, #0
0130     bne     .Lno_need_thread_dispatch_resume
0131     bl      _AArch64_Exception_thread_dispatch
0132 .Lno_need_thread_dispatch_resume:
0133 /* call CEF restore routine (doesn't restore lr) */
0134     bl .pop_exception_context
0135 
0136 /* get lr from CEF */
0137     ldr lr, [sp, #AARCH64_EXCEPTION_FRAME_REGISTER_LR_OFFSET]
0138 
0139 /* drop space reserved for CEF */
0140     add sp, sp, #AARCH64_EXCEPTION_FRAME_SIZE
0141     eret
0142 
0143 /* Assumes sp currently points to the EF on the exception stack and SPSel is 0 */
0144 .move_exception_frame_and_switch_to_thread_stack:
0145     mov x1, sp                                                      /* Set x1 to the current exception frame */
0146     msr spsel, #1                                                   /* switch to thread stack */
0147     ldr x0, [x1, #AARCH64_EXCEPTION_FRAME_REGISTER_SP_OFFSET]       /* Get thread SP from exception frame since it may have been updated */
0148     mov sp, x0
0149     sub sp, sp, #AARCH64_EXCEPTION_FRAME_SIZE                       /* reserve space for CEF */
0150     mov x0, sp                                                      /* Set x0 to the new exception frame */
0151     mov x20, lr                                                     /* Save LR */
0152     bl _AArch64_Exception_frame_copy                                /* Copy exception frame to reserved thread stack space */
0153     mov lr, x20                                                     /* Restore LR */
0154     msr spsel, #0                           /* switch to exception stack */
0155     add sp, sp, #AARCH64_EXCEPTION_FRAME_SIZE           /* release space for CEF on exception stack */
0156     msr spsel, #1                           /* switch to thread stack */
0157     ret
0158 
0159 /*
0160  * Apply the exception frame to the current register status, SP points to the EF
0161  */
0162 .pop_exception_context:
0163 /* Pop daif and spsr */
0164     ldp x2, x3, [sp, #AARCH64_EXCEPTION_FRAME_REGISTER_DAIF_OFFSET]
0165 /* Restore daif and spsr */
0166     msr DAIF, x2
0167     msr SPSR_EL1, x3
0168 /* Pop FAR and ESR */
0169     ldp x2, x3, [sp, #AARCH64_EXCEPTION_FRAME_REGISTER_SYNDROME_OFFSET]
0170 /* Restore ESR and FAR */
0171     msr ESR_EL1, x2
0172     msr FAR_EL1, x3
0173 /* Pop fpcr and fpsr */
0174     ldp x2, x3, [sp, #AARCH64_EXCEPTION_FRAME_REGISTER_FPSR_OFFSET]
0175 /* Restore fpcr and fpsr */
0176     msr FPSR, x2
0177     msr FPCR, x3
0178 /* Pop VFP registers */
0179     ldp q0,  q1,  [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x000)]
0180     ldp q2,  q3,  [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x020)]
0181     ldp q4,  q5,  [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x040)]
0182     ldp q6,  q7,  [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x060)]
0183     ldp q8,  q9,  [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x080)]
0184     ldp q10, q11, [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x0a0)]
0185     ldp q12, q13, [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x0c0)]
0186     ldp q14, q15, [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x0e0)]
0187     ldp q16, q17, [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x100)]
0188     ldp q18, q19, [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x120)]
0189     ldp q20, q21, [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x140)]
0190     ldp q22, q23, [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x160)]
0191     ldp q24, q25, [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x180)]
0192     ldp q26, q27, [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x1a0)]
0193     ldp q28, q29, [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x1c0)]
0194     ldp q30, q31, [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x1e0)]
0195 /* Pop x0-x29(fp) */
0196     ldp x2,  x3,  [sp, #0x10]
0197     ldp x4,  x5,  [sp, #0x20]
0198     ldp x6,  x7,  [sp, #0x30]
0199     ldp x8,  x9,  [sp, #0x40]
0200     ldp x10, x11, [sp, #0x50]
0201     ldp x12, x13, [sp, #0x60]
0202     ldp x14, x15, [sp, #0x70]
0203     ldp x16, x17, [sp, #0x80]
0204     ldp x18, x19, [sp, #0x90]
0205     ldp x20, x21, [sp, #0xa0]
0206     ldp x22, x23, [sp, #0xb0]
0207     ldp x24, x25, [sp, #0xc0]
0208     ldp x26, x27, [sp, #0xd0]
0209     ldp x28, x29, [sp, #0xe0]
0210 /* Pop ELR, SP already popped */
0211     ldr x1, [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_SP_OFFSET + 0x8)]
0212 /* Restore exception LR */
0213     msr ELR_EL1, x1
0214     ldp x0,  x1,  [sp, #0x00]
0215 
0216 /* We must clear reservations to ensure consistency with atomic operations */
0217     clrex
0218 
0219     ret