Back to home page

LXR

 
 

    


File indexing completed on 2025-05-11 08:23:58

0001 /* SPDX-License-Identifier: BSD-2-Clause */
0002 
0003 /*
0004  * Copyright (C) 2011, 2020 embedded brains GmbH & Co. KG
0005  *
0006  * Redistribution and use in source and binary forms, with or without
0007  * modification, are permitted provided that the following conditions
0008  * are met:
0009  * 1. Redistributions of source code must retain the above copyright
0010  *    notice, this list of conditions and the following disclaimer.
0011  * 2. Redistributions in binary form must reproduce the above copyright
0012  *    notice, this list of conditions and the following disclaimer in the
0013  *    documentation and/or other materials provided with the distribution.
0014  *
0015  * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
0016  * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
0017  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
0018  * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
0019  * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
0020  * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
0021  * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
0022  * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
0023  * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
0024  * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
0025  * POSSIBILITY OF SUCH DAMAGE.
0026  */
0027 
0028 #include <bspopts.h>
0029 #include <rtems/score/percpu.h>
0030 #include <bsp/vectors.h>
0031 
0032 #ifdef PPC_EXC_CONFIG_USE_FIXED_HANDLER
0033 
0034 #define SCRATCH_0_REGISTER r0
0035 #define SCRATCH_1_REGISTER r3
0036 #define SCRATCH_2_REGISTER r4
0037 #define SCRATCH_3_REGISTER r5
0038 #define SCRATCH_4_REGISTER r6
0039 #define SCRATCH_5_REGISTER r7
0040 #define SCRATCH_6_REGISTER r8
0041 #define SCRATCH_7_REGISTER r9
0042 #define SCRATCH_8_REGISTER r10
0043 #define SCRATCH_9_REGISTER r11
0044 #define SCRATCH_10_REGISTER r12
0045 #define FRAME_REGISTER r14
0046 
0047 #define SCRATCH_0_OFFSET GPR0_OFFSET
0048 #define SCRATCH_1_OFFSET GPR3_OFFSET
0049 #define SCRATCH_2_OFFSET GPR4_OFFSET
0050 #define SCRATCH_3_OFFSET GPR5_OFFSET
0051 #define SCRATCH_4_OFFSET GPR6_OFFSET
0052 #define SCRATCH_5_OFFSET GPR7_OFFSET
0053 #define SCRATCH_6_OFFSET GPR8_OFFSET
0054 #define SCRATCH_7_OFFSET GPR9_OFFSET
0055 #define SCRATCH_8_OFFSET GPR10_OFFSET
0056 #define SCRATCH_9_OFFSET GPR11_OFFSET
0057 #define SCRATCH_10_OFFSET GPR12_OFFSET
0058 #define FRAME_OFFSET PPC_EXC_INTERRUPT_FRAME_OFFSET
0059 
0060 #ifdef RTEMS_PROFILING
0061 .macro GET_TIME_BASE REG
0062 #if defined(__PPC_CPU_E6500__)
0063     mfspr \REG, FSL_EIS_ATBL
0064 #elif defined(ppc8540)
0065     mfspr   \REG, TBRL
0066 #else /* ppc8540 */
0067     mftb    \REG
0068 #endif /* ppc8540 */
0069 .endm
0070 #endif /* RTEMS_PROFILING */
0071 
0072     .global ppc_exc_min_prolog_async_tmpl_normal
0073     .global ppc_exc_interrupt
0074 
0075 ppc_exc_min_prolog_async_tmpl_normal:
0076 
0077     stwu    r1, -PPC_EXC_INTERRUPT_FRAME_SIZE(r1)
0078     PPC_REG_STORE   SCRATCH_1_REGISTER, SCRATCH_1_OFFSET(r1)
0079     li  SCRATCH_1_REGISTER, 0xffff8000
0080 
0081     /*
0082      * We store the absolute branch target address here.  It will be used
0083      * to generate the branch operation in ppc_exc_make_prologue().
0084      */
0085     .int    ppc_exc_interrupt
0086 
0087 ppc_exc_interrupt:
0088 
0089     /* Save non-volatile FRAME_REGISTER */
0090     PPC_REG_STORE   FRAME_REGISTER, FRAME_OFFSET(r1)
0091 
0092 #ifdef RTEMS_PROFILING
0093     /* Get entry instant */
0094     GET_TIME_BASE   FRAME_REGISTER
0095     stw FRAME_REGISTER, PPC_EXC_INTERRUPT_ENTRY_INSTANT_OFFSET(r1)
0096 #endif /* RTEMS_PROFILING */
0097 
0098 #ifdef __SPE__
0099     /* Enable SPE */
0100     mfmsr   FRAME_REGISTER
0101     oris    FRAME_REGISTER, FRAME_REGISTER, MSR_SPE >> 16
0102     mtmsr   FRAME_REGISTER
0103     isync
0104 
0105     /*
0106      * Save high order part of SCRATCH_1_REGISTER here.  The low order part
0107      * was saved in the minimal prologue.
0108      */
0109     evmergehi   SCRATCH_1_REGISTER, SCRATCH_1_REGISTER, FRAME_REGISTER
0110     PPC_REG_STORE   FRAME_REGISTER, GPR3_OFFSET(r1)
0111 #endif
0112 
0113 #if defined(PPC_MULTILIB_FPU) || defined(PPC_MULTILIB_ALTIVEC)
0114     /* Enable FPU and/or AltiVec */
0115     mfmsr   FRAME_REGISTER
0116 #ifdef PPC_MULTILIB_FPU
0117     ori FRAME_REGISTER, FRAME_REGISTER, MSR_FP
0118 #endif
0119 #ifdef PPC_MULTILIB_ALTIVEC
0120     oris    FRAME_REGISTER, FRAME_REGISTER, MSR_VE >> 16
0121 #endif
0122     mtmsr   FRAME_REGISTER
0123     isync
0124 #endif
0125 
0126     /* Move frame pointer to non-volatile FRAME_REGISTER */
0127     mr  FRAME_REGISTER, r1
0128 
0129     /*
0130      * Save volatile registers.  The SCRATCH_1_REGISTER has been saved in
0131      * minimum prologue.
0132      */
0133     PPC_GPR_STORE   SCRATCH_0_REGISTER, SCRATCH_0_OFFSET(r1)
0134 #ifdef __powerpc64__
0135     PPC_GPR_STORE   r2, GPR2_OFFSET(r1)
0136     LA32    r2, .TOC.
0137 #endif
0138     PPC_GPR_STORE   SCRATCH_2_REGISTER, SCRATCH_2_OFFSET(r1)
0139     GET_SELF_CPU_CONTROL    SCRATCH_2_REGISTER
0140     PPC_GPR_STORE   SCRATCH_3_REGISTER, SCRATCH_3_OFFSET(r1)
0141     PPC_GPR_STORE   SCRATCH_4_REGISTER, SCRATCH_4_OFFSET(r1)
0142     PPC_GPR_STORE   SCRATCH_5_REGISTER, SCRATCH_5_OFFSET(r1)
0143     PPC_GPR_STORE   SCRATCH_6_REGISTER, SCRATCH_6_OFFSET(r1)
0144     PPC_GPR_STORE   SCRATCH_7_REGISTER, SCRATCH_7_OFFSET(r1)
0145     PPC_GPR_STORE   SCRATCH_8_REGISTER, SCRATCH_8_OFFSET(r1)
0146     PPC_GPR_STORE   SCRATCH_9_REGISTER, SCRATCH_9_OFFSET(r1)
0147     PPC_GPR_STORE   SCRATCH_10_REGISTER, SCRATCH_10_OFFSET(r1)
0148 
0149     /* Load ISR nest level and thread dispatch disable level */
0150     lwz SCRATCH_3_REGISTER, PER_CPU_ISR_NEST_LEVEL(SCRATCH_2_REGISTER)
0151     lwz SCRATCH_4_REGISTER, PER_CPU_THREAD_DISPATCH_DISABLE_LEVEL(SCRATCH_2_REGISTER)
0152 
0153     /* Save SRR0, SRR1, CR, XER, CTR, and LR */
0154     mfsrr0  SCRATCH_0_REGISTER
0155     mfsrr1  SCRATCH_5_REGISTER
0156     mfcr    SCRATCH_6_REGISTER
0157     mfxer   SCRATCH_7_REGISTER
0158     mfctr   SCRATCH_8_REGISTER
0159     mflr    SCRATCH_9_REGISTER
0160     PPC_REG_STORE   SCRATCH_0_REGISTER, SRR0_FRAME_OFFSET(r1)
0161     PPC_REG_STORE   SCRATCH_5_REGISTER, SRR1_FRAME_OFFSET(r1)
0162     stw SCRATCH_6_REGISTER, EXC_CR_OFFSET(r1)
0163     stw SCRATCH_7_REGISTER, EXC_XER_OFFSET(r1)
0164     PPC_REG_STORE   SCRATCH_8_REGISTER, EXC_CTR_OFFSET(r1)
0165     PPC_REG_STORE   SCRATCH_9_REGISTER, EXC_LR_OFFSET(r1)
0166 
0167 #ifdef __SPE__
0168     /* Save SPEFSCR and ACC */
0169     mfspr   SCRATCH_0_REGISTER, FSL_EIS_SPEFSCR
0170     evxor   SCRATCH_5_REGISTER, SCRATCH_5_REGISTER, SCRATCH_5_REGISTER
0171     evmwumiaa   SCRATCH_5_REGISTER, SCRATCH_5_REGISTER, SCRATCH_5_REGISTER
0172     stw SCRATCH_0_REGISTER, PPC_EXC_SPEFSCR_OFFSET(r1)
0173     evstdd  SCRATCH_5_REGISTER, PPC_EXC_ACC_OFFSET(r1)
0174 #endif
0175 
0176     /* Save volatile AltiVec context */
0177 #ifdef PPC_MULTILIB_ALTIVEC
0178 #ifdef __PPC_VRSAVE__
0179     mfvrsave    SCRATCH_0_REGISTER
0180     cmpwi   SCRATCH_0_REGISTER, 0
0181     bne .Laltivec_save
0182 
0183 .Laltivec_save_continue:
0184 #else /* __PPC_VRSAVE__ */
0185     li  SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(0)
0186     stvx    v0, r1, SCRATCH_0_REGISTER
0187     mfvscr  v0
0188     li  SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(1)
0189     stvx    v1, r1, SCRATCH_0_REGISTER
0190     li  SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(2)
0191     stvx    v2, r1, SCRATCH_0_REGISTER
0192     li  SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(3)
0193     stvx    v3, r1, SCRATCH_0_REGISTER
0194     li  SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(4)
0195     stvx    v4, r1, SCRATCH_0_REGISTER
0196     li  SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(5)
0197     stvx    v5, r1, SCRATCH_0_REGISTER
0198     li  SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(6)
0199     stvx    v6, r1, SCRATCH_0_REGISTER
0200     li  SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(7)
0201     stvx    v7, r1, SCRATCH_0_REGISTER
0202     li  SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(8)
0203     stvx    v8, r1, SCRATCH_0_REGISTER
0204     li  SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(9)
0205     stvx    v9, r1, SCRATCH_0_REGISTER
0206     li  SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(10)
0207     stvx    v10, r1, SCRATCH_0_REGISTER
0208     li  SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(11)
0209     stvx    v11, r1, SCRATCH_0_REGISTER
0210     li  SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(12)
0211     stvx    v12, r1, SCRATCH_0_REGISTER
0212     li  SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(13)
0213     stvx    v13, r1, SCRATCH_0_REGISTER
0214     li  SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(14)
0215     stvx    v14, r1, SCRATCH_0_REGISTER
0216     li  SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(15)
0217     stvx    v15, r1, SCRATCH_0_REGISTER
0218     li  SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(16)
0219     stvx    v16, r1, SCRATCH_0_REGISTER
0220     li  SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(17)
0221     stvx    v17, r1, SCRATCH_0_REGISTER
0222     li  SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(18)
0223     stvx    v18, r1, SCRATCH_0_REGISTER
0224     li  SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(19)
0225     stvx    v19, r1, SCRATCH_0_REGISTER
0226     li  SCRATCH_0_REGISTER, PPC_EXC_MIN_VSCR_OFFSET
0227     stvewx  v0, r1, SCRATCH_0_REGISTER
0228 #endif /* __PPC_VRSAVE__ */
0229 #endif /* PPC_MULTILIB_ALTIVEC */
0230 
0231 #ifdef PPC_MULTILIB_FPU
0232     /* Save volatile FPU context */
0233     stfd    f0, PPC_EXC_MIN_FR_OFFSET(0)(r1)
0234     mffs    f0
0235     stfd    f1, PPC_EXC_MIN_FR_OFFSET(1)(r1)
0236     stfd    f2, PPC_EXC_MIN_FR_OFFSET(2)(r1)
0237     stfd    f3, PPC_EXC_MIN_FR_OFFSET(3)(r1)
0238     stfd    f4, PPC_EXC_MIN_FR_OFFSET(4)(r1)
0239     stfd    f5, PPC_EXC_MIN_FR_OFFSET(5)(r1)
0240     stfd    f6, PPC_EXC_MIN_FR_OFFSET(6)(r1)
0241     stfd    f7, PPC_EXC_MIN_FR_OFFSET(7)(r1)
0242     stfd    f8, PPC_EXC_MIN_FR_OFFSET(8)(r1)
0243     stfd    f9, PPC_EXC_MIN_FR_OFFSET(9)(r1)
0244     stfd    f10, PPC_EXC_MIN_FR_OFFSET(10)(r1)
0245     stfd    f11, PPC_EXC_MIN_FR_OFFSET(11)(r1)
0246     stfd    f12, PPC_EXC_MIN_FR_OFFSET(12)(r1)
0247     stfd    f13, PPC_EXC_MIN_FR_OFFSET(13)(r1)
0248     stfd    f0, PPC_EXC_MIN_FPSCR_OFFSET(r1)
0249 #endif
0250 
0251     /* Increment ISR nest level and thread dispatch disable level */
0252     cmpwi   SCRATCH_3_REGISTER, 0
0253 #ifdef RTEMS_PROFILING
0254     cmpwi   cr2, SCRATCH_3_REGISTER, 0
0255 #endif
0256     addi    SCRATCH_3_REGISTER, SCRATCH_3_REGISTER, 1
0257     addi    SCRATCH_4_REGISTER, SCRATCH_4_REGISTER, 1
0258     stw SCRATCH_3_REGISTER, PER_CPU_ISR_NEST_LEVEL(SCRATCH_2_REGISTER)
0259     stw SCRATCH_4_REGISTER, PER_CPU_THREAD_DISPATCH_DISABLE_LEVEL(SCRATCH_2_REGISTER)
0260 
0261     /* Switch stack if necessary */
0262     mfspr   SCRATCH_0_REGISTER, SPRG1
0263     iselgt  r1, r1, SCRATCH_0_REGISTER
0264 
0265     /* Call fixed high level handler */
0266     bl  bsp_interrupt_dispatch
0267     PPC64_NOP_FOR_LINKER_TOC_POINTER_RESTORE
0268 
0269 #ifdef RTEMS_PROFILING
0270     /* Update profiling data if necessary */
0271     bne cr2, .Lprofiling_done
0272     GET_SELF_CPU_CONTROL    r3
0273     lwz r4, PPC_EXC_INTERRUPT_ENTRY_INSTANT_OFFSET(FRAME_REGISTER)
0274     GET_TIME_BASE   r5
0275     bl  _Profiling_Outer_most_interrupt_entry_and_exit
0276     PPC64_NOP_FOR_LINKER_TOC_POINTER_RESTORE
0277 .Lprofiling_done:
0278 #endif /* RTEMS_PROFILING */
0279 
0280     /* Load some per-CPU variables */
0281     GET_SELF_CPU_CONTROL    SCRATCH_1_REGISTER
0282     lbz SCRATCH_0_REGISTER, PER_CPU_DISPATCH_NEEDED(SCRATCH_1_REGISTER)
0283     lwz SCRATCH_5_REGISTER, PER_CPU_ISR_DISPATCH_DISABLE(SCRATCH_1_REGISTER)
0284     lwz SCRATCH_6_REGISTER, PER_CPU_THREAD_DISPATCH_DISABLE_LEVEL(SCRATCH_1_REGISTER)
0285     lwz SCRATCH_3_REGISTER, PER_CPU_ISR_NEST_LEVEL(SCRATCH_1_REGISTER)
0286 
0287     /*
0288      * Switch back to original stack (FRAME_REGISTER == r1 if we are still
0289      * on the IRQ stack) and restore FRAME_REGISTER.
0290      */
0291     mr  r1, FRAME_REGISTER
0292     PPC_REG_LOAD    FRAME_REGISTER, FRAME_OFFSET(r1)
0293 
0294     /* Decrement levels and determine thread dispatch state */
0295     xori    SCRATCH_0_REGISTER, SCRATCH_0_REGISTER, 1
0296     or  SCRATCH_0_REGISTER, SCRATCH_0_REGISTER, SCRATCH_5_REGISTER
0297     subi    SCRATCH_4_REGISTER, SCRATCH_6_REGISTER, 1
0298     or. SCRATCH_0_REGISTER, SCRATCH_0_REGISTER, SCRATCH_4_REGISTER
0299     subi    SCRATCH_3_REGISTER, SCRATCH_3_REGISTER, 1
0300 
0301     /* Store thread dispatch disable and ISR nest levels */
0302     stw SCRATCH_4_REGISTER, PER_CPU_THREAD_DISPATCH_DISABLE_LEVEL(SCRATCH_1_REGISTER)
0303     stw SCRATCH_3_REGISTER, PER_CPU_ISR_NEST_LEVEL(SCRATCH_1_REGISTER)
0304 
0305     /*
0306      * Check thread dispatch necessary, ISR dispatch disable and thread
0307      * dispatch disable level.
0308      */
0309     bne .Lthread_dispatch_done
0310 
0311     /* Thread dispatch */
0312 .Ldo_thread_dispatch:
0313 
0314     /* Set ISR dispatch disable and thread dispatch disable level to one */
0315     li  SCRATCH_0_REGISTER, 1
0316     stw SCRATCH_0_REGISTER, PER_CPU_ISR_DISPATCH_DISABLE(SCRATCH_1_REGISTER)
0317     stw SCRATCH_0_REGISTER, PER_CPU_THREAD_DISPATCH_DISABLE_LEVEL(SCRATCH_1_REGISTER)
0318 
0319     /*
0320      * Call _Thread_Do_dispatch(), this function will enable interrupts.
0321      * The r3 is SCRATCH_1_REGISTER.
0322      */
0323     mfmsr   r4
0324     ori r4, r4, MSR_EE
0325     bl  _Thread_Do_dispatch
0326     PPC64_NOP_FOR_LINKER_TOC_POINTER_RESTORE
0327 
0328     /* Disable interrupts */
0329     wrteei  0
0330 
0331     /* SCRATCH_1_REGISTER is volatile, we must set it again */
0332     GET_SELF_CPU_CONTROL    SCRATCH_1_REGISTER
0333 
0334     /* Check if we have to do the thread dispatch again */
0335     lbz SCRATCH_0_REGISTER, PER_CPU_DISPATCH_NEEDED(SCRATCH_1_REGISTER)
0336     cmpwi   SCRATCH_0_REGISTER, 0
0337     bne .Ldo_thread_dispatch
0338 
0339     /* We are done with thread dispatching */
0340     li  SCRATCH_0_REGISTER, 0
0341     stw SCRATCH_0_REGISTER, PER_CPU_ISR_DISPATCH_DISABLE(SCRATCH_1_REGISTER)
0342 
0343 .Lthread_dispatch_done:
0344 
0345     /* Restore volatile AltiVec context */
0346 #ifdef PPC_MULTILIB_ALTIVEC
0347 #ifdef __PPC_VRSAVE__
0348     mfvrsave    SCRATCH_0_REGISTER
0349     cmpwi   SCRATCH_0_REGISTER, 0
0350     bne .Laltivec_restore
0351 
0352 .Laltivec_restore_continue:
0353 #else /* __PPC_VRSAVE__ */
0354     li  SCRATCH_0_REGISTER, PPC_EXC_MIN_VSCR_OFFSET
0355     lvewx   v0, r1, SCRATCH_0_REGISTER
0356     mtvscr  v0
0357     li  SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(0)
0358     lvx v0, r1, SCRATCH_0_REGISTER
0359     li  SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(1)
0360     lvx v1, r1, SCRATCH_0_REGISTER
0361     li  SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(2)
0362     lvx v2, r1, SCRATCH_0_REGISTER
0363     li  SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(3)
0364     lvx v3, r1, SCRATCH_0_REGISTER
0365     li  SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(4)
0366     lvx v4, r1, SCRATCH_0_REGISTER
0367     li  SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(5)
0368     lvx v5, r1, SCRATCH_0_REGISTER
0369     li  SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(6)
0370     lvx v6, r1, SCRATCH_0_REGISTER
0371     li  SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(7)
0372     lvx v7, r1, SCRATCH_0_REGISTER
0373     li  SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(8)
0374     lvx v8, r1, SCRATCH_0_REGISTER
0375     li  SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(9)
0376     lvx v9, r1, SCRATCH_0_REGISTER
0377     li  SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(10)
0378     lvx v10, r1, SCRATCH_0_REGISTER
0379     li  SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(11)
0380     lvx v11, r1, SCRATCH_0_REGISTER
0381     li  SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(12)
0382     lvx v12, r1, SCRATCH_0_REGISTER
0383     li  SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(13)
0384     lvx v13, r1, SCRATCH_0_REGISTER
0385     li  SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(14)
0386     lvx v14, r1, SCRATCH_0_REGISTER
0387     li  SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(15)
0388     lvx v15, r1, SCRATCH_0_REGISTER
0389     li  SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(16)
0390     lvx v16, r1, SCRATCH_0_REGISTER
0391     li  SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(17)
0392     lvx v17, r1, SCRATCH_0_REGISTER
0393     li  SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(18)
0394     lvx v18, r1, SCRATCH_0_REGISTER
0395     li  SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(19)
0396     lvx v19, r1, SCRATCH_0_REGISTER
0397 #endif /* __PPC_VRSAVE__ */
0398 #endif /* PPC_MULTILIB_ALTIVEC */
0399 
0400 #ifdef PPC_MULTILIB_FPU
0401     /* Restore volatile FPU context */
0402     lfd f0, PPC_EXC_MIN_FPSCR_OFFSET(r1)
0403     mtfsf   0xff, f0
0404     lfd f0, PPC_EXC_MIN_FR_OFFSET(0)(r1)
0405     lfd f1, PPC_EXC_MIN_FR_OFFSET(1)(r1)
0406     lfd f2, PPC_EXC_MIN_FR_OFFSET(2)(r1)
0407     lfd f3, PPC_EXC_MIN_FR_OFFSET(3)(r1)
0408     lfd f4, PPC_EXC_MIN_FR_OFFSET(4)(r1)
0409     lfd f5, PPC_EXC_MIN_FR_OFFSET(5)(r1)
0410     lfd f6, PPC_EXC_MIN_FR_OFFSET(6)(r1)
0411     lfd f7, PPC_EXC_MIN_FR_OFFSET(7)(r1)
0412     lfd f8, PPC_EXC_MIN_FR_OFFSET(8)(r1)
0413     lfd f9, PPC_EXC_MIN_FR_OFFSET(9)(r1)
0414     lfd f10, PPC_EXC_MIN_FR_OFFSET(10)(r1)
0415     lfd f11, PPC_EXC_MIN_FR_OFFSET(11)(r1)
0416     lfd f12, PPC_EXC_MIN_FR_OFFSET(12)(r1)
0417     lfd f13, PPC_EXC_MIN_FR_OFFSET(13)(r1)
0418 #endif
0419 
0420 #ifdef __SPE__
0421     /* Load SPEFSCR and ACC */
0422     lwz SCRATCH_3_REGISTER, PPC_EXC_SPEFSCR_OFFSET(r1)
0423     evldd   SCRATCH_4_REGISTER, PPC_EXC_ACC_OFFSET(r1)
0424 #endif
0425 
0426     /*
0427      * We must clear reservations here, since otherwise compare-and-swap
0428      * atomic operations with interrupts enabled may yield wrong results.
0429      * A compare-and-swap atomic operation is generated by the compiler
0430      * like this:
0431      *
0432      *   .L1:
0433      *     lwarx  r9, r0, r3
0434      *     cmpw   r9, r4
0435      *     bne-   .L2
0436      *     stwcx. r5, r0, r3
0437      *     bne-   .L1
0438      *   .L2:
0439      *
0440      * Consider the following scenario.  A thread is interrupted right
0441      * before the stwcx.  The interrupt updates the value using a
0442      * compare-and-swap sequence.  Everything is fine up to this point.
0443      * The interrupt performs now a compare-and-swap sequence which fails
0444      * with a branch to .L2.  The current processor has now a reservation.
0445      * The interrupt returns without further stwcx.  The thread updates the
0446      * value using the unrelated reservation of the interrupt.
0447      */
0448     li  SCRATCH_0_REGISTER, FRAME_OFFSET
0449     stwcx.  SCRATCH_0_REGISTER, r1, SCRATCH_0_REGISTER
0450 
0451     /* Load SRR0, SRR1, CR, XER, CTR, and LR */
0452     PPC_REG_LOAD    SCRATCH_5_REGISTER, SRR0_FRAME_OFFSET(r1)
0453     PPC_REG_LOAD    SCRATCH_6_REGISTER, SRR1_FRAME_OFFSET(r1)
0454     lwz SCRATCH_7_REGISTER, EXC_CR_OFFSET(r1)
0455     lwz SCRATCH_8_REGISTER, EXC_XER_OFFSET(r1)
0456     PPC_REG_LOAD    SCRATCH_9_REGISTER, EXC_CTR_OFFSET(r1)
0457     PPC_REG_LOAD    SCRATCH_10_REGISTER, EXC_LR_OFFSET(r1)
0458 
0459     /* Restore volatile registers */
0460     PPC_GPR_LOAD    SCRATCH_0_REGISTER, SCRATCH_0_OFFSET(r1)
0461 #ifdef __powerpc64__
0462     PPC_GPR_LOAD    r2, GPR2_OFFSET(r1)
0463 #endif
0464     PPC_GPR_LOAD    SCRATCH_1_REGISTER, SCRATCH_1_OFFSET(r1)
0465     PPC_GPR_LOAD    SCRATCH_2_REGISTER, SCRATCH_2_OFFSET(r1)
0466 
0467 #ifdef __SPE__
0468     /* Restore SPEFSCR and ACC */
0469     mtspr   FSL_EIS_SPEFSCR, SCRATCH_3_REGISTER
0470     evmra   SCRATCH_4_REGISTER, SCRATCH_4_REGISTER
0471 #endif
0472 
0473     /* Restore volatile registers */
0474     PPC_GPR_LOAD    SCRATCH_3_REGISTER, SCRATCH_3_OFFSET(r1)
0475     PPC_GPR_LOAD    SCRATCH_4_REGISTER, SCRATCH_4_OFFSET(r1)
0476 
0477     /* Restore SRR0, SRR1, CR, CTR, XER, and LR plus volatile registers */
0478     mtsrr0  SCRATCH_5_REGISTER
0479     PPC_GPR_LOAD    SCRATCH_5_REGISTER, SCRATCH_5_OFFSET(r1)
0480     mtsrr1  SCRATCH_6_REGISTER
0481     PPC_GPR_LOAD    SCRATCH_6_REGISTER, SCRATCH_6_OFFSET(r1)
0482     mtcr    SCRATCH_7_REGISTER
0483     PPC_GPR_LOAD    SCRATCH_7_REGISTER, SCRATCH_7_OFFSET(r1)
0484     mtxer   SCRATCH_8_REGISTER
0485     PPC_GPR_LOAD    SCRATCH_8_REGISTER, SCRATCH_8_OFFSET(r1)
0486     mtctr   SCRATCH_9_REGISTER
0487     PPC_GPR_LOAD    SCRATCH_9_REGISTER, SCRATCH_9_OFFSET(r1)
0488     mtlr    SCRATCH_10_REGISTER
0489     PPC_GPR_LOAD    SCRATCH_10_REGISTER, SCRATCH_10_OFFSET(r1)
0490 
0491     /* Pop stack */
0492     addi    r1, r1, PPC_EXC_INTERRUPT_FRAME_SIZE
0493 
0494     /* Return */
0495     rfi
0496 
0497 #if defined(PPC_MULTILIB_ALTIVEC) && defined(__PPC_VRSAVE__)
0498 .Laltivec_save:
0499 
0500     /*
0501      * Let X be VRSAVE, calculate:
0502      *
0503      * Y = 0x77777777
0504      * Z = X & Y
0505      * Z = Z + Y
0506      * X = X | Z
0507      *
0508      * Afterwards, we have in X for each group of four VR registers:
0509      *
0510      * 0111b, if VRSAVE group of four registers == 0
0511      * 1XXXb, if VRSAVE group of four registers != 0
0512      */
0513     lis SCRATCH_5_REGISTER, 0x7777
0514     ori SCRATCH_5_REGISTER, SCRATCH_5_REGISTER, 0x7777
0515     and SCRATCH_6_REGISTER, SCRATCH_0_REGISTER, SCRATCH_5_REGISTER
0516     add SCRATCH_6_REGISTER, SCRATCH_5_REGISTER, SCRATCH_6_REGISTER
0517     or  SCRATCH_0_REGISTER, SCRATCH_0_REGISTER, SCRATCH_6_REGISTER
0518     mtcr    SCRATCH_0_REGISTER
0519 
0520     li  SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(0)
0521     stvx    v0, r1, SCRATCH_0_REGISTER
0522 
0523     /* Move VCSR to V0 */
0524     mfvscr  v0
0525 
0526     li  SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(1)
0527     stvx    v1, r1, SCRATCH_0_REGISTER
0528     li  SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(2)
0529     stvx    v2, r1, SCRATCH_0_REGISTER
0530     li  SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(3)
0531     stvx    v3, r1, SCRATCH_0_REGISTER
0532 
0533     /* Save VCSR using V0 */
0534     li  SCRATCH_0_REGISTER, PPC_EXC_MIN_VSCR_OFFSET
0535     stvewx  v0, r1, SCRATCH_0_REGISTER
0536 
0537     bf  4, .Laltivec_save_v8
0538     li  SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(4)
0539     stvx    v4, r1, SCRATCH_0_REGISTER
0540     li  SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(5)
0541     stvx    v5, r1, SCRATCH_0_REGISTER
0542     li  SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(6)
0543     stvx    v6, r1, SCRATCH_0_REGISTER
0544     li  SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(7)
0545     stvx    v7, r1, SCRATCH_0_REGISTER
0546 
0547 .Laltivec_save_v8:
0548 
0549     bf  8, .Laltivec_save_v12
0550     li  SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(8)
0551     stvx    v8, r1, SCRATCH_0_REGISTER
0552     li  SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(9)
0553     stvx    v9, r1, SCRATCH_0_REGISTER
0554     li  SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(10)
0555     stvx    v10, r1, SCRATCH_0_REGISTER
0556     li  SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(11)
0557     stvx    v11, r1, SCRATCH_0_REGISTER
0558 
0559 .Laltivec_save_v12:
0560 
0561     bf  12, .Laltivec_save_v16
0562     li  SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(12)
0563     stvx    v12, r1, SCRATCH_0_REGISTER
0564     li  SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(13)
0565     stvx    v13, r1, SCRATCH_0_REGISTER
0566     li  SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(14)
0567     stvx    v14, r1, SCRATCH_0_REGISTER
0568     li  SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(15)
0569     stvx    v15, r1, SCRATCH_0_REGISTER
0570 
0571 .Laltivec_save_v16:
0572 
0573     bf  16, .Laltivec_save_continue
0574     li  SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(16)
0575     stvx    v16, r1, SCRATCH_0_REGISTER
0576     li  SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(17)
0577     stvx    v17, r1, SCRATCH_0_REGISTER
0578     li  SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(18)
0579     stvx    v18, r1, SCRATCH_0_REGISTER
0580     li  SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(19)
0581     stvx    v19, r1, SCRATCH_0_REGISTER
0582 
0583     b   .Laltivec_save_continue
0584 
0585 .Laltivec_restore:
0586 
0587     /* Load VCSR using V0 */
0588     li  SCRATCH_5_REGISTER, PPC_EXC_MIN_VSCR_OFFSET
0589     lvewx   v0, r1, SCRATCH_5_REGISTER
0590 
0591     /* See comment at .Laltivec_save */
0592     lis SCRATCH_5_REGISTER, 0x7777
0593     ori SCRATCH_5_REGISTER, SCRATCH_5_REGISTER, 0x7777
0594     and SCRATCH_6_REGISTER, SCRATCH_0_REGISTER, SCRATCH_5_REGISTER
0595     add SCRATCH_6_REGISTER, SCRATCH_5_REGISTER, SCRATCH_6_REGISTER
0596     or  SCRATCH_0_REGISTER, SCRATCH_0_REGISTER, SCRATCH_6_REGISTER
0597     mtcr    SCRATCH_0_REGISTER
0598 
0599     /* Restore VCR using V0 */
0600     mtvscr  v0
0601 
0602     li  SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(0)
0603     lvx v0, r1, SCRATCH_0_REGISTER
0604     li  SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(1)
0605     lvx v1, r1, SCRATCH_0_REGISTER
0606     li  SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(2)
0607     lvx v2, r1, SCRATCH_0_REGISTER
0608     li  SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(3)
0609     lvx v3, r1, SCRATCH_0_REGISTER
0610 
0611     bf  4, .Laltivec_restore_v8
0612     li  SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(4)
0613     lvx v4, r1, SCRATCH_0_REGISTER
0614     li  SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(5)
0615     lvx v5, r1, SCRATCH_0_REGISTER
0616     li  SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(6)
0617     lvx v6, r1, SCRATCH_0_REGISTER
0618     li  SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(7)
0619     lvx v7, r1, SCRATCH_0_REGISTER
0620 
0621 .Laltivec_restore_v8:
0622 
0623     bf  8, .Laltivec_restore_v12
0624     li  SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(8)
0625     lvx v8, r1, SCRATCH_0_REGISTER
0626     li  SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(9)
0627     lvx v9, r1, SCRATCH_0_REGISTER
0628     li  SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(10)
0629     lvx v10, r1, SCRATCH_0_REGISTER
0630     li  SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(11)
0631     lvx v11, r1, SCRATCH_0_REGISTER
0632 
0633 .Laltivec_restore_v12:
0634 
0635     bf  12, .Laltivec_restore_v16
0636     li  SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(12)
0637     lvx v12, r1, SCRATCH_0_REGISTER
0638     li  SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(13)
0639     lvx v13, r1, SCRATCH_0_REGISTER
0640     li  SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(14)
0641     lvx v14, r1, SCRATCH_0_REGISTER
0642     li  SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(15)
0643     lvx v15, r1, SCRATCH_0_REGISTER
0644 
0645 .Laltivec_restore_v16:
0646 
0647     bf  16, .Laltivec_restore_continue
0648     li  SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(16)
0649     lvx v16, r1, SCRATCH_0_REGISTER
0650     li  SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(17)
0651     lvx v17, r1, SCRATCH_0_REGISTER
0652     li  SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(18)
0653     lvx v18, r1, SCRATCH_0_REGISTER
0654     li  SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(19)
0655     lvx v19, r1, SCRATCH_0_REGISTER
0656 
0657     b   .Laltivec_restore_continue
0658 #endif /* PPC_MULTILIB_ALTIVEC && __PPC_VRSAVE__ */
0659 
0660 /* Symbol provided for debugging and tracing */
0661 ppc_exc_interrupt_end:
0662 
0663 #endif /* PPC_EXC_CONFIG_USE_FIXED_HANDLER */