Back to home page

LXR

 
 

    


File indexing completed on 2025-05-11 08:24:23

0001 /* SPDX-License-Identifier: BSD-2-Clause */
0002 
0003 /**
0004  * @file
0005  *
0006  * @ingroup RTEMSScoreCPUAArch64
0007  *
0008  * @brief Implementation of AArch64 exception vector table.
0009  *
0010  * This file implements the AArch64 exception vector table and its embedded
0011  * jump handlers along with the code necessary to call higher level C handlers.
0012  */
0013 
0014 /*
0015  * Copyright (C) 2020 On-Line Applications Research Corporation (OAR)
0016  * Written by Kinsey Moore <kinsey.moore@oarcorp.com>
0017  *
0018  * Redistribution and use in source and binary forms, with or without
0019  * modification, are permitted provided that the following conditions
0020  * are met:
0021  * 1. Redistributions of source code must retain the above copyright
0022  *    notice, this list of conditions and the following disclaimer.
0023  * 2. Redistributions in binary form must reproduce the above copyright
0024  *    notice, this list of conditions and the following disclaimer in the
0025  *    documentation and/or other materials provided with the distribution.
0026  *
0027  * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
0028  * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
0029  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
0030  * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
0031  * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
0032  * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
0033  * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
0034  * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
0035  * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
0036  * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
0037  * POSSIBILITY OF SUCH DAMAGE.
0038  */
0039 
0040 #ifdef HAVE_CONFIG_H
0041 #include "config.h"
0042 #endif
0043 
0044 #include <rtems/asm.h>
0045 
0046 .extern _AArch64_Exception_default
0047 
0048 .globl  bsp_start_vector_table_begin
0049 .globl  bsp_start_vector_table_end
0050 .globl  bsp_start_vector_table_size
0051 .globl  bsp_vector_table_size
0052 
0053 .section ".text"
0054 
0055 /*
0056  * This is the exception vector table and the pointers to the default
0057  * exceptions handlers. Each vector in the table has space for up to 32
0058  * instructions. The space of the last two instructions in each vector is used
0059  * for the exception handler pointer.
0060  *
0061  * The operation of all exceptions is as follows:
0062  * * An exception occurs
0063  * * A vector is chosen based on the exception type and machine state
0064  * * Execution begins at the chosen vector
0065  * * X0 and LR are pushed onto the current stack
0066  * * An unconditional branch and link is taken to the next instruction to get
0067  *   the PC
0068  * * The exception handler pointer (EHP) is retrieved from the current vector using
0069  *   the PC
0070  * * Branch and link to the EHP
0071  * * X0 and LR are popped from the current stack after returning from the EHP
0072  * * The exception returns to the previous execution state
0073  */
0074 
0075     .macro  JUMP_HANDLER
0076 /* Mask to use in BIC, lower 7 bits */
0077     mov x0, #0x7f
0078 /* LR contains PC, mask off to the base of the current vector */
0079     bic x0, lr, x0
0080 /* Load address from the last word in the vector */
0081     ldr x0, [x0,    #0x78]
0082 /*
0083  * Branch and link to the address in x0. There is no reason to save the current
0084  * LR since it has already been saved and the current contents are junk.
0085  */
0086     blr x0
0087 /* Pop x0,lr from stack */
0088     ldp x0, lr, [sp],   #0x10
0089 /* Return from exception */
0090     eret
0091     nop
0092     nop
0093     nop
0094     nop
0095     nop
0096     nop
0097     nop
0098     nop
0099     nop
0100     nop
0101     nop
0102     nop
0103     nop
0104     nop
0105     nop
0106     nop
0107     nop
0108     nop
0109     nop
0110     nop
0111     nop
0112     nop
0113     .endm
0114 
0115     .macro  JUMP_TARGET_SP0
0116 /* Takes up the space of 2 instructions */
0117 #ifdef AARCH64_MULTILIB_ARCH_V8_ILP32
0118     .word .print_exception_dump_sp0
0119     .word 0x0
0120 #else
0121     .dword .print_exception_dump_sp0
0122 #endif
0123     .endm
0124 
0125     .macro  JUMP_TARGET_SPx
0126 /* Takes up the space of 2 instructions */
0127 #ifdef AARCH64_MULTILIB_ARCH_V8_ILP32
0128     .word .print_exception_dump_spx
0129     .word 0x0
0130 #else
0131     .dword .print_exception_dump_spx
0132 #endif
0133     .endm
0134 
0135 bsp_start_vector_table_begin:
0136 .balign 0x800
0137 Vector_table_el3:
0138 /*
0139  * The exception handler for synchronous exceptions from the current EL
0140  * using SP0.
0141  */
0142 curr_el_sp0_sync:
0143     sub sp, sp, #AARCH64_EXCEPTION_FRAME_SIZE           /* reserve space for CEF */
0144     str lr, [sp, #AARCH64_EXCEPTION_FRAME_REGISTER_LR_OFFSET]   /* shove lr into CEF */
0145     bl .push_exception_context_start                /* bl to CEF store routine */
0146 /* Save original sp in x0 for .push_exception_context_finish */
0147     add x0, sp, #AARCH64_EXCEPTION_FRAME_SIZE           /* save original sp */
0148 /* Push the remainder of the context */
0149     bl .push_exception_context_finish
0150 /* get jump target and branch/link */
0151     bl curr_el_sp0_sync_get_pc      /* Get current execution address */
0152 curr_el_sp0_sync_get_pc:            /* The current PC is now in LR */
0153     mov x0, #0x7f               /* Mask to use in BIC, lower 7 bits */
0154     bic x0, lr, x0          /* Mask LR to base of current vector */
0155     ldr x1, [x0,    #0x78]          /* Load target from last word in vector */
0156     and lr, lr, #0x780          /* Mask off bits for vector number */
0157     lsr lr, lr, #7              /* Shift the vector bits down */
0158 /* Store the vector */
0159     str lr, [sp, #AARCH64_EXCEPTION_FRAME_REGISTER_VECTOR_OFFSET]
0160     mov x0, sp
0161     blr x1
0162     b twiddle
0163     nop
0164     nop
0165     nop
0166     nop
0167     nop
0168     nop
0169     nop
0170     nop
0171     nop
0172     nop
0173     nop
0174     nop
0175     nop
0176     nop
0177     nop
0178 /* Takes up the space of 2 instructions */
0179 #ifdef AARCH64_MULTILIB_ARCH_V8_ILP32
0180     .word _AArch64_Exception_default
0181     .word 0x0
0182 #else
0183     .dword _AArch64_Exception_default
0184 #endif
0185 .balign 0x80
0186 /* The exception handler for IRQ exceptions from the current EL using SP0. */
0187 curr_el_sp0_irq:
0188     stp x0, lr, [sp, #-0x10]!   /* Push x0,lr on to the stack */
0189     bl curr_el_sp0_irq_get_pc   /* Get current execution address */
0190 curr_el_sp0_irq_get_pc:         /* The current PC is now in LR */
0191     JUMP_HANDLER
0192     JUMP_TARGET_SP0
0193 .balign 0x80
0194 /* The exception handler for FIQ exceptions from the current EL using SP0. */
0195 curr_el_sp0_fiq:
0196     stp x0, lr, [sp, #-0x10]!   /* Push x0,lr on to the stack */
0197     bl curr_el_sp0_fiq_get_pc   /* Get current execution address */
0198 curr_el_sp0_fiq_get_pc:         /* The current PC is now in LR */
0199     JUMP_HANDLER
0200     JUMP_TARGET_SP0
0201 .balign 0x80
0202 /*
0203  * The exception handler for system error exceptions from the current EL using
0204  * SP0.
0205  */
0206 curr_el_sp0_serror:
0207     stp x0, lr, [sp, #-0x10]!   /* Push x0,lr on to the stack */
0208     bl curr_el_sp0_serror_get_pc    /* Get current execution address */
0209 curr_el_sp0_serror_get_pc:      /* The current PC is now in LR */
0210     JUMP_HANDLER
0211     JUMP_TARGET_SP0
0212 .balign 0x80
0213 /*
0214  * The exception handler for synchronous exceptions from the current EL using
0215  * the current SP.
0216  */
0217 curr_el_spx_sync:
0218     msr spsel, #0                           /* switch to exception stack */
0219     sub sp, sp, #AARCH64_EXCEPTION_FRAME_SIZE           /* reserve space for CEF */
0220     str lr, [sp, #AARCH64_EXCEPTION_FRAME_REGISTER_LR_OFFSET]   /* shove lr into CEF */
0221     bl .push_exception_context_start                /* bl to CEF store routine */
0222 /* Save original sp in x0 for .push_exception_context_finish */
0223     msr spsel, #1
0224     mov x0, sp
0225     msr spsel, #0
0226 /* Push the remainder of the context */
0227     bl .push_exception_context_finish
0228 /* get jump target and branch/link */
0229     bl curr_el_spx_sync_get_pc      /* Get current execution address */
0230 curr_el_spx_sync_get_pc:            /* The current PC is now in LR */
0231     mov x0, #0x7f               /* Mask to use in BIC, lower 7 bits */
0232     bic x0, lr, x0          /* Mask LR to base of current vector */
0233     ldr x1, [x0,    #0x78]          /* Load target from last word in vector */
0234     and lr, lr, #0x780          /* Mask off bits for vector number */
0235     lsr lr, lr, #7              /* Shift the vector bits down */
0236 /* Store the vector */
0237     str lr, [sp, #AARCH64_EXCEPTION_FRAME_REGISTER_VECTOR_OFFSET]
0238     mov x0, sp
0239     blr x1
0240     b twiddle
0241     nop
0242     nop
0243     nop
0244     nop
0245     nop
0246     nop
0247     nop
0248     nop
0249     nop
0250     nop
0251     nop
0252     nop
0253 /* Takes up the space of 2 instructions */
0254 #ifdef AARCH64_MULTILIB_ARCH_V8_ILP32
0255     .word _AArch64_Exception_default
0256     .word 0x0
0257 #else
0258     .dword _AArch64_Exception_default
0259 #endif
0260 .balign 0x80
0261 /*
0262  * The exception handler for IRQ exceptions from the current EL using the
0263  * current SP.
0264  */
0265 curr_el_spx_irq:
0266     stp x0, lr, [sp, #-0x10]!   /* Push x0,lr on to the stack */
0267     bl curr_el_spx_irq_get_pc   /* Get current execution address */
0268 curr_el_spx_irq_get_pc:         /* The current PC is now in LR */
0269     JUMP_HANDLER
0270     JUMP_TARGET_SPx
0271 .balign 0x80
0272 /*
0273  * The exception handler for FIQ exceptions from the current EL using the
0274  * current SP.
0275  */
0276 curr_el_spx_fiq:
0277     stp x0, lr, [sp, #-0x10]!   /* Push x0,lr on to the stack */
0278     bl curr_el_spx_fiq_get_pc   /* Get current execution address */
0279 curr_el_spx_fiq_get_pc:         /* The current PC is now in LR */
0280     JUMP_HANDLER
0281     JUMP_TARGET_SPx
0282 .balign 0x80
0283 /*
0284  * The exception handler for system error exceptions from the current EL using
0285  * the current SP.
0286  */
0287 curr_el_spx_serror:
0288     stp x0, lr, [sp, #-0x10]!   /* Push x0,lr on to the stack */
0289     bl curr_el_spx_serror_get_pc    /* Get current execution address */
0290 curr_el_spx_serror_get_pc:      /* The current PC is now in LR */
0291     JUMP_HANDLER
0292     JUMP_TARGET_SPx
0293 .balign 0x80
0294 /*
0295  * The exception handler for synchronous exceptions from a lower EL (AArch64).
0296  */
0297 lower_el_aarch64_sync:
0298     stp x0, lr, [sp, #-0x10]!   /* Push x0,lr on to the stack */
0299     bl lower_el_aarch64_sync_get_pc /* Get current execution address */
0300 lower_el_aarch64_sync_get_pc:       /* The current PC is now in LR */
0301     JUMP_HANDLER
0302     JUMP_TARGET_SPx
0303 .balign 0x80
0304 /* The exception handler for IRQ exceptions from a lower EL (AArch64). */
0305 lower_el_aarch64_irq:
0306     stp x0, lr, [sp, #-0x10]!   /* Push x0,lr on to the stack */
0307     bl lower_el_aarch64_irq_get_pc  /* Get current execution address */
0308 lower_el_aarch64_irq_get_pc:        /* The current PC is now in LR */
0309     JUMP_HANDLER
0310     JUMP_TARGET_SPx
0311 .balign 0x80
0312 /* The exception handler for FIQ exceptions from a lower EL (AArch64). */
0313 lower_el_aarch64_fiq:
0314     stp x0, lr, [sp, #-0x10]!   /* Push x0,lr on to the stack */
0315     bl lower_el_aarch64_fiq_get_pc  /* Get current execution address */
0316 lower_el_aarch64_fiq_get_pc:        /* The current PC is now in LR */
0317     JUMP_HANDLER
0318     JUMP_TARGET_SPx
0319 .balign 0x80
0320 /*
0321  * The exception handler for system error exceptions from a lower EL(AArch64).
0322  */
0323 lower_el_aarch64_serror:
0324 /* Push x0,lr on to the stack */
0325     stp x0, lr, [sp, #-0x10]!
0326 /* Get current execution address */
0327     bl lower_el_aarch64_serror_get_pc
0328 lower_el_aarch64_serror_get_pc:     /* The current PC is now in LR */
0329     JUMP_HANDLER
0330     JUMP_TARGET_SPx
0331 .balign 0x80
0332 /*
0333  * The exception handler for the synchronous exception from a lower EL(AArch32).
0334  */
0335 lower_el_aarch32_sync:
0336     stp x0, lr, [sp, #-0x10]!   /* Push x0,lr on to the stack */
0337     bl lower_el_aarch32_sync_get_pc /* Get current execution address */
0338 lower_el_aarch32_sync_get_pc:       /* The current PC is now in LR */
0339     JUMP_HANDLER
0340     JUMP_TARGET_SPx
0341 .balign 0x80
0342 /* The exception handler for the IRQ exception from a lower EL (AArch32). */
0343 lower_el_aarch32_irq:
0344     stp x0, lr, [sp, #-0x10]!   /* Push x0,lr on to the stack */
0345     bl lower_el_aarch32_irq_get_pc  /* Get current execution address */
0346 lower_el_aarch32_irq_get_pc:        /* The current PC is now in LR */
0347     JUMP_HANDLER
0348     JUMP_TARGET_SPx
0349 .balign 0x80
0350 /* The exception handler for the FIQ exception from a lower EL (AArch32). */
0351 lower_el_aarch32_fiq:
0352     stp x0, lr, [sp, #-0x10]!   /* Push x0,lr on to the stack */
0353     bl lower_el_aarch32_fiq_get_pc  /* Get current execution address */
0354 lower_el_aarch32_fiq_get_pc:        /* The current PC is now in LR */
0355     JUMP_HANDLER
0356     JUMP_TARGET_SPx
0357 .balign 0x80
0358 /*
0359  * The exception handler for the system error exception from a lower EL
0360  * (AArch32).
0361  */
0362 lower_el_aarch32_serror:
0363 /* Push x0,lr on to the stack */
0364     stp x0, lr, [sp, #-0x10]!
0365 /* Get current execution address */
0366     bl lower_el_aarch32_serror_get_pc
0367 lower_el_aarch32_serror_get_pc  :       /* The current PC is now in LR */
0368     JUMP_HANDLER
0369     JUMP_TARGET_SPx
0370 
0371 bsp_start_vector_table_end:
0372 
0373     .set    bsp_start_vector_table_size, bsp_start_vector_table_end - bsp_start_vector_table_begin
0374     .set    bsp_vector_table_size, bsp_start_vector_table_size
0375 
0376 /*
0377  * This involves switching a few things around. the real x0 and lr are on SPx
0378  * and need to be retrieved while the lr upon entry contains the pointer into
0379  * the AArch64 vector table
0380  */
0381 .print_exception_dump_spx:
0382 /* Switch to exception stack (SP0) */
0383     msr spsel, #0
0384 /* Save space for exception context */
0385     sub sp, sp, #AARCH64_EXCEPTION_FRAME_SIZE
0386 /*
0387  * Push exception vector, LR currently points into the actual exception vector
0388  * table
0389  */
0390     and lr, lr, #0x780
0391     lsr lr, lr, #7
0392     str lr, [sp, #AARCH64_EXCEPTION_FRAME_REGISTER_VECTOR_OFFSET]
0393 /* Pop x0,lr from stack, saved by generic handler */
0394 /*
0395  * This modifies the stack pointer back to the pre-vector-handler value which is
0396  * safe because this will never return
0397  */
0398     msr spsel, #1
0399     ldp x0, lr, [sp], #0x10
0400     msr spsel, #0
0401 /* Save LR */
0402     str lr, [sp, #AARCH64_EXCEPTION_FRAME_REGISTER_LR_OFFSET]
0403 /* Push the start of the context */
0404     bl .push_exception_context_start
0405 /* Save original sp in x0 for .push_exception_context_finish */
0406     msr spsel, #1
0407     mov x0, sp
0408     msr spsel, #0
0409 /* Push the remainder of the context */
0410     bl .push_exception_context_finish
0411 /* Save sp into x0 for handler */
0412     mov x0, sp
0413 /* Jump into the handler */
0414     bl _AArch64_Exception_default
0415 
0416     /* Just in case */
0417     b   twiddle
0418 
0419 .print_exception_dump_sp0:
0420 /* Save space for exception context */
0421     sub sp, sp, #AARCH64_EXCEPTION_FRAME_SIZE
0422 /*
0423  * Push exception vector, LR currently points into the actual exception vector
0424  */
0425     and lr, lr, #0x780
0426     lsr lr, lr, #7
0427     str lr, [sp, #AARCH64_EXCEPTION_FRAME_REGISTER_VECTOR_OFFSET]
0428 /* Get x0,lr from stack, saved by generic handler */
0429     add sp, sp, #AARCH64_EXCEPTION_FRAME_SIZE
0430     ldp x0, lr, [sp]
0431     sub sp, sp, #AARCH64_EXCEPTION_FRAME_SIZE
0432 /* Save LR */
0433     str lr, [sp, #AARCH64_EXCEPTION_FRAME_REGISTER_LR_OFFSET]
0434 /* Push the start of the context */
0435     bl .push_exception_context_start
0436 /* Save original sp in x0 for .push_exception_context_finish */
0437     add x0, sp, #(AARCH64_EXCEPTION_FRAME_SIZE + 0x10)
0438 /* Push the remainder of the context */
0439     bl .push_exception_context_finish
0440 /* Save sp (exception frame) into x0 for handler */
0441     mov x0, sp
0442 /* Jump into the handler */
0443     bl _AArch64_Exception_default
0444 
0445     /* Just in case */
0446 twiddle:
0447     b   twiddle
0448 
0449 /* Assumes SP is at the base of the context and LR has already been pushed */
0450 .push_exception_context_start:
0451 /* Push x0-x29(fp) */
0452     stp x0,  x1,  [sp, #0x00]
0453     stp x2,  x3,  [sp, #0x10]
0454     stp x4,  x5,  [sp, #0x20]
0455     stp x6,  x7,  [sp, #0x30]
0456     stp x8,  x9,  [sp, #0x40]
0457     stp x10, x11, [sp, #0x50]
0458     stp x12, x13, [sp, #0x60]
0459     stp x14, x15, [sp, #0x70]
0460     stp x16, x17, [sp, #0x80]
0461     stp x18, x19, [sp, #0x90]
0462     stp x20, x21, [sp, #0xa0]
0463     stp x22, x23, [sp, #0xb0]
0464     stp x24, x25, [sp, #0xc0]
0465     stp x26, x27, [sp, #0xd0]
0466     stp x28, x29, [sp, #0xe0]
0467     ret
0468 
0469 /* Expects original SP to be stored in x0 */
0470 .push_exception_context_finish:
0471 /* Get exception LR for PC */
0472     mrs x1, ELR_EL1
0473 /* Push sp and pc */
0474     stp x0, x1, [sp, #AARCH64_EXCEPTION_FRAME_REGISTER_SP_OFFSET]
0475 /* Get daif and spsr */
0476     mrs x0, DAIF
0477     mrs x1, SPSR_EL1
0478 /* Push daif and spsr */
0479     stp x0, x1, [sp, #AARCH64_EXCEPTION_FRAME_REGISTER_DAIF_OFFSET]
0480 /* Get ESR and FAR */
0481     mrs x0, ESR_EL1
0482     mrs x1, FAR_EL1
0483 /* Push FAR and ESR */
0484     stp x0, x1, [sp, #AARCH64_EXCEPTION_FRAME_REGISTER_SYNDROME_OFFSET]
0485 /* Get fpcr and fpsr */
0486     mrs x0, FPSR
0487     mrs x1, FPCR
0488 /* Push fpcr and fpsr */
0489     stp x0, x1, [sp, #AARCH64_EXCEPTION_FRAME_REGISTER_FPSR_OFFSET]
0490 /* Push VFP registers */
0491     stp q0,  q1,  [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x000)]
0492     stp q2,  q3,  [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x020)]
0493     stp q4,  q5,  [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x040)]
0494     stp q6,  q7,  [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x060)]
0495     stp q8,  q9,  [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x080)]
0496     stp q10, q11, [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x0a0)]
0497     stp q12, q13, [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x0c0)]
0498     stp q14, q15, [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x0e0)]
0499     stp q16, q17, [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x100)]
0500     stp q18, q19, [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x120)]
0501     stp q20, q21, [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x140)]
0502     stp q22, q23, [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x160)]
0503     stp q24, q25, [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x180)]
0504     stp q26, q27, [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x1a0)]
0505     stp q28, q29, [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x1c0)]
0506     stp q30, q31, [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x1e0)]
0507 /* Done, return to exception handler */
0508     ret