Back to home page

LXR

 
 

    


File indexing completed on 2025-05-11 08:23:58

0001 /* SPDX-License-Identifier: BSD-2-Clause */
0002 
0003 /**
0004  * @file
0005  *
0006  * @ingroup ppc_exc
0007  *
0008  * @brief PowerPC Exceptions implementation.
0009  */
0010 
0011 /*
0012  * Copyright (c) 2009 embedded brains GmbH & Co. KG
0013  *
0014  * Redistribution and use in source and binary forms, with or without
0015  * modification, are permitted provided that the following conditions
0016  * are met:
0017  * 1. Redistributions of source code must retain the above copyright
0018  *    notice, this list of conditions and the following disclaimer.
0019  * 2. Redistributions in binary form must reproduce the above copyright
0020  *    notice, this list of conditions and the following disclaimer in the
0021  *    documentation and/or other materials provided with the distribution.
0022  *
0023  * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
0024  * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
0025  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
0026  * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
0027  * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
0028  * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
0029  * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
0030  * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
0031  * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
0032  * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
0033  * POSSIBILITY OF SUCH DAMAGE.
0034  */
0035 
0036 #include "ppc_exc_asm_macros.h"
0037 
0038     .global ppc_exc_min_prolog_tmpl_naked
0039 
0040 ppc_exc_min_prolog_tmpl_naked:
0041 
0042     stwu    r1, -EXCEPTION_FRAME_END(r1)
0043     stw VECTOR_REGISTER, VECTOR_OFFSET(r1)
0044     li  VECTOR_REGISTER, 0
0045 
0046     /*
0047      * We store the absolute branch target address here.  It will be used
0048      * to generate the branch operation in ppc_exc_make_prologue().
0049      */
0050     .int    ppc_exc_wrap_naked
0051 
0052         .global ppc_exc_wrap_naked
0053 ppc_exc_wrap_naked:
0054 
0055     /* Save scratch registers */
0056     stw SCRATCH_REGISTER_0, SCRATCH_REGISTER_0_OFFSET(r1)
0057     stw SCRATCH_REGISTER_1, SCRATCH_REGISTER_1_OFFSET(r1)
0058     stw SCRATCH_REGISTER_2, SCRATCH_REGISTER_2_OFFSET(r1)
0059 
0060     /* Save volatile registers */
0061     stw r0, GPR0_OFFSET(r1)
0062     stw r3, GPR3_OFFSET(r1)
0063     stw r8, GPR8_OFFSET(r1)
0064     stw r9, GPR9_OFFSET(r1)
0065     stw r10, GPR10_OFFSET(r1)
0066     stw r11, GPR11_OFFSET(r1)
0067     stw r12, GPR12_OFFSET(r1)
0068 
0069     /* Save CR */
0070     mfcr    SCRATCH_REGISTER_0
0071     stw SCRATCH_REGISTER_0, EXC_CR_OFFSET(r1)
0072 
0073     /* Save SRR0 */
0074     mfspr   SCRATCH_REGISTER_0, srr0
0075     stw SCRATCH_REGISTER_0, SRR0_FRAME_OFFSET(r1)
0076 
0077     /* Save SRR1 */
0078     mfspr   SCRATCH_REGISTER_0, srr1
0079     stw SCRATCH_REGISTER_0, SRR1_FRAME_OFFSET(r1)
0080 
0081     /* Save CTR */
0082     mfctr   SCRATCH_REGISTER_0
0083     stw SCRATCH_REGISTER_0, EXC_CTR_OFFSET(r1)
0084 
0085     /* Save XER */
0086     mfxer   SCRATCH_REGISTER_0
0087     stw SCRATCH_REGISTER_0, EXC_XER_OFFSET(r1)
0088 
0089     /* Save LR */
0090     mflr    SCRATCH_REGISTER_0
0091     stw SCRATCH_REGISTER_0, EXC_LR_OFFSET(r1)
0092 
0093 #ifndef PPC_EXC_CONFIG_BOOKE_ONLY
0094 
0095     /* Load MSR bit mask */
0096     lwz SCRATCH_REGISTER_0, ppc_exc_msr_bits@sdarel(r13)
0097 
0098     /*
0099      * Change the MSR if necessary (MMU, RI), remember decision in
0100      * non-volatile CR_MSR.
0101      */
0102     cmpwi   CR_MSR, SCRATCH_REGISTER_0, 0
0103     bne CR_MSR, wrap_change_msr_naked
0104 
0105 wrap_change_msr_done_naked:
0106 
0107 #endif /* PPC_EXC_CONFIG_BOOKE_ONLY */
0108 
0109     /*
0110      * Call high level exception handler
0111      */
0112 
0113     /*
0114      * Get the handler table index from the vector number.  We have to
0115      * discard the exception type.  Take only the least significant five
0116      * bits (= LAST_VALID_EXC + 1) from the vector register.  Multiply by
0117      * four (= size of function pointer).
0118      */
0119     rlwinm  SCRATCH_REGISTER_1, VECTOR_REGISTER, 2, 25, 29
0120 
0121     /* Load handler table address */
0122     LA  SCRATCH_REGISTER_0, ppc_exc_handler_table
0123 
0124     /* Load handler address */
0125     lwzx    SCRATCH_REGISTER_0, SCRATCH_REGISTER_0, SCRATCH_REGISTER_1
0126 
0127     /*
0128      * First parameter = exception frame pointer + FRAME_LINK_SPACE
0129      *
0130      * We add FRAME_LINK_SPACE to the frame pointer because the high level
0131      * handler expects a BSP_Exception_frame structure.
0132      */
0133     addi    r3, r1, FRAME_LINK_SPACE
0134 
0135     /*
0136      * Second parameter = vector number (r4 is the VECTOR_REGISTER)
0137      *
0138      * Discard the exception type and store the vector number
0139      * in the vector register.  Take only the least significant
0140      * five bits (= LAST_VALID_EXC + 1).
0141      */
0142     rlwinm  VECTOR_REGISTER, VECTOR_REGISTER, 0, 27, 31
0143 
0144     /* Call handler */
0145     mtctr   SCRATCH_REGISTER_0
0146     bctrl
0147 
0148 #ifndef PPC_EXC_CONFIG_BOOKE_ONLY
0149 
0150     /* Restore MSR? */
0151     bne CR_MSR, wrap_restore_msr_naked
0152 
0153 wrap_restore_msr_done_naked:
0154 
0155 #endif /* PPC_EXC_CONFIG_BOOKE_ONLY */
0156 
0157     /* Restore XER and CTR */
0158     lwz SCRATCH_REGISTER_0, EXC_XER_OFFSET(r1)
0159     lwz SCRATCH_REGISTER_1, EXC_CTR_OFFSET(r1)
0160     mtxer   SCRATCH_REGISTER_0
0161     mtctr   SCRATCH_REGISTER_1
0162 
0163     /* Restore CR and LR */
0164     lwz SCRATCH_REGISTER_0, EXC_CR_OFFSET(r1)
0165     lwz SCRATCH_REGISTER_1, EXC_LR_OFFSET(r1)
0166     mtcr    SCRATCH_REGISTER_0
0167     mtlr    SCRATCH_REGISTER_1
0168 
0169     /* Restore volatile registers */
0170     lwz r0, GPR0_OFFSET(r1)
0171     lwz r3, GPR3_OFFSET(r1)
0172     lwz r8, GPR8_OFFSET(r1)
0173     lwz r9, GPR9_OFFSET(r1)
0174     lwz r10, GPR10_OFFSET(r1)
0175     lwz r11, GPR11_OFFSET(r1)
0176     lwz r12, GPR12_OFFSET(r1)
0177 
0178     /* Restore vector register */
0179     lwz VECTOR_REGISTER, VECTOR_OFFSET(r1)
0180 
0181     /* Restore scratch registers and SRRs */
0182     lwz SCRATCH_REGISTER_0, SRR0_FRAME_OFFSET(r1)
0183     lwz SCRATCH_REGISTER_1, SRR1_FRAME_OFFSET(r1)
0184     lwz SCRATCH_REGISTER_2, SCRATCH_REGISTER_2_OFFSET(r1)
0185     mtspr   srr0, SCRATCH_REGISTER_0
0186     lwz SCRATCH_REGISTER_0, SCRATCH_REGISTER_0_OFFSET(r1)
0187     mtspr   srr1, SCRATCH_REGISTER_1
0188     lwz SCRATCH_REGISTER_1, SCRATCH_REGISTER_1_OFFSET(r1)
0189 
0190     /*
0191      * We restore r1 from the frame rather than just popping (adding to
0192      * current r1) since the exception handler might have done strange
0193      * things (e.g. a debugger moving and relocating the stack).
0194      */
0195     lwz r1, 0(r1)
0196 
0197     /* Return */
0198     rfi
0199 
0200 #ifndef PPC_EXC_CONFIG_BOOKE_ONLY
0201 
0202 wrap_change_msr_naked:
0203 
0204     mfmsr   SCRATCH_REGISTER_1
0205     or  SCRATCH_REGISTER_1, SCRATCH_REGISTER_1, SCRATCH_REGISTER_0
0206     mtmsr   SCRATCH_REGISTER_1
0207     sync
0208     isync
0209     b   wrap_change_msr_done_naked
0210 
0211 wrap_restore_msr_naked:
0212 
0213     lwz SCRATCH_REGISTER_0, ppc_exc_msr_bits@sdarel(r13)
0214     mfmsr   SCRATCH_REGISTER_1
0215     andc    SCRATCH_REGISTER_1, SCRATCH_REGISTER_1, SCRATCH_REGISTER_0
0216     mtmsr   SCRATCH_REGISTER_1
0217     sync
0218     isync
0219     b   wrap_restore_msr_done_naked
0220 
0221 #endif /* PPC_EXC_CONFIG_BOOKE_ONLY */