Back to home page

LXR

 
 

    


File indexing completed on 2025-05-11 08:23:59

0001 /*  cpu_asm.s   1.1 - 95/12/04
0002  *
0003  *  This file contains the assembly code for the PowerPC implementation
0004  *  of RTEMS.
0005  *
0006  *  Author: Andrew Bray <andy@i-cubed.co.uk>
0007  *
0008  *  COPYRIGHT (c) 1995 by i-cubed ltd.
0009  *
0010  *  To anyone who acknowledges that this file is provided "AS IS"
0011  *  without any express or implied warranty:
0012  *      permission to use, copy, modify, and distribute this file
0013  *      for any purpose is hereby granted without fee, provided that
0014  *      the above copyright notice and this notice appears in all
0015  *      copies, and that the name of i-cubed limited not be used in
0016  *      advertising or publicity pertaining to distribution of the
0017  *      software without specific, written prior permission.
0018  *      i-cubed limited makes no representations about the suitability
0019  *      of this software for any purpose.
0020  *
0021  *  Derived from c/src/exec/cpu/no_cpu/cpu_asm.c:
0022  *
0023  *  COPYRIGHT (c) 1989-1997.
0024  *  On-Line Applications Research Corporation (OAR).
0025  *
0026  *  Copyright (C) 2011, 2020 embedded brains GmbH & Co. KG
0027  *
0028  *  The license and distribution terms for this file may in
0029  *  the file LICENSE in this distribution or at
0030  *  http://www.rtems.org/license/LICENSE.
0031  */
0032 
0033 #include <rtems/asm.h>
0034 #include <rtems/powerpc/powerpc.h>
0035 #include <rtems/score/percpu.h>
0036 #include <libcpu/powerpc-utility.h>
0037 #include <bspopts.h>
0038 
0039 #ifdef BSP_USE_DATA_CACHE_BLOCK_TOUCH
0040   #define DATA_CACHE_TOUCH(rega, regb) \
0041     dcbt rega, regb
0042 #else
0043   #define DATA_CACHE_TOUCH(rega, regb)
0044 #endif
0045 
0046 #if BSP_DATA_CACHE_ENABLED && PPC_DEFAULT_CACHE_LINE_SIZE == 32
0047   #define DATA_CACHE_ZERO_AND_TOUCH(reg, offset) \
0048     li reg, offset; dcbz reg, r3; DATA_CACHE_TOUCH(reg, r4)
0049 #else
0050   #define DATA_CACHE_ZERO_AND_TOUCH(reg, offset)
0051 #endif
0052 
0053 #define PPC_CONTEXT_CACHE_LINE_0 (1 * PPC_DEFAULT_CACHE_LINE_SIZE)
0054 #define PPC_CONTEXT_CACHE_LINE_1 (2 * PPC_DEFAULT_CACHE_LINE_SIZE)
0055 #define PPC_CONTEXT_CACHE_LINE_2 (3 * PPC_DEFAULT_CACHE_LINE_SIZE)
0056 #define PPC_CONTEXT_CACHE_LINE_3 (4 * PPC_DEFAULT_CACHE_LINE_SIZE)
0057 #define PPC_CONTEXT_CACHE_LINE_4 (5 * PPC_DEFAULT_CACHE_LINE_SIZE)
0058 #define PPC_CONTEXT_CACHE_LINE_5 (6 * PPC_DEFAULT_CACHE_LINE_SIZE)
0059 
0060     BEGIN_CODE
0061 
0062 #if PPC_HAS_FPU == 1
0063 
0064 /*
0065  * Offsets for Context_Control_fp
0066  */
0067 
0068 #if (PPC_HAS_DOUBLE==1)
0069     .set    FP_SIZE,    8
0070 #define LDF lfd
0071 #define STF stfd
0072 #else
0073     .set    FP_SIZE,    4
0074 #define LDF lfs
0075 #define STF stfs
0076 #endif
0077 
0078     .set    FP_0, 0
0079     .set    FP_1, (FP_0 + FP_SIZE)
0080     .set    FP_2, (FP_1 + FP_SIZE)
0081     .set    FP_3, (FP_2 + FP_SIZE)
0082     .set    FP_4, (FP_3 + FP_SIZE)
0083     .set    FP_5, (FP_4 + FP_SIZE)
0084     .set    FP_6, (FP_5 + FP_SIZE)
0085     .set    FP_7, (FP_6 + FP_SIZE)
0086     .set    FP_8, (FP_7 + FP_SIZE)
0087     .set    FP_9, (FP_8 + FP_SIZE)
0088     .set    FP_10, (FP_9 + FP_SIZE)
0089     .set    FP_11, (FP_10 + FP_SIZE)
0090     .set    FP_12, (FP_11 + FP_SIZE)
0091     .set    FP_13, (FP_12 + FP_SIZE)
0092     .set    FP_14, (FP_13 + FP_SIZE)
0093     .set    FP_15, (FP_14 + FP_SIZE)
0094     .set    FP_16, (FP_15 + FP_SIZE)
0095     .set    FP_17, (FP_16 + FP_SIZE)
0096     .set    FP_18, (FP_17 + FP_SIZE)
0097     .set    FP_19, (FP_18 + FP_SIZE)
0098     .set    FP_20, (FP_19 + FP_SIZE)
0099     .set    FP_21, (FP_20 + FP_SIZE)
0100     .set    FP_22, (FP_21 + FP_SIZE)
0101     .set    FP_23, (FP_22 + FP_SIZE)
0102     .set    FP_24, (FP_23 + FP_SIZE)
0103     .set    FP_25, (FP_24 + FP_SIZE)
0104     .set    FP_26, (FP_25 + FP_SIZE)
0105     .set    FP_27, (FP_26 + FP_SIZE)
0106     .set    FP_28, (FP_27 + FP_SIZE)
0107     .set    FP_29, (FP_28 + FP_SIZE)
0108     .set    FP_30, (FP_29 + FP_SIZE)
0109     .set    FP_31, (FP_30 + FP_SIZE)
0110     .set    FP_FPSCR, (FP_31 + FP_SIZE)
0111 
0112 /*
0113  *  _CPU_Context_save_fp_context
0114  *
0115  *  This routine is responsible for saving the FP context
0116  *  at *fp_context_ptr.  If the point to load the FP context
0117  *  from is changed then the pointer is modified by this routine.
0118  *
0119  *  Sometimes a macro implementation of this is in cpu.h which dereferences
0120  *  the ** and a similarly named routine in this file is passed something
0121  *  like a (Context_Control_fp *).  The general rule on making this decision
0122  *  is to avoid writing assembly language.
0123  */
0124 
0125     ALIGN (PPC_CACHE_ALIGNMENT, PPC_CACHE_ALIGN_POWER)
0126     PUBLIC_PROC (_CPU_Context_save_fp)
0127 PROC (_CPU_Context_save_fp):
0128 /* A FP context switch may occur in an ISR or exception handler when the FPU is not
0129  * available. Therefore, we must explicitely enable it here!
0130  */
0131 #if !defined(PPC_DISABLE_MSR_ACCESS)
0132     mfmsr   r4
0133     andi.   r5,r4,MSR_FP
0134     bne 1f
0135     ori r5,r4,MSR_FP
0136     mtmsr   r5
0137     isync
0138 #endif  /* END PPC_DISABLE_MSR_ACCESS */
0139 
0140 1:
0141     lwz r3, 0(r3)
0142     STF f0, FP_0(r3)
0143     STF f1, FP_1(r3)
0144     STF f2, FP_2(r3)
0145     STF f3, FP_3(r3)
0146     STF f4, FP_4(r3)
0147     STF f5, FP_5(r3)
0148     STF f6, FP_6(r3)
0149     STF f7, FP_7(r3)
0150     STF f8, FP_8(r3)
0151     STF f9, FP_9(r3)
0152     STF f10, FP_10(r3)
0153     STF f11, FP_11(r3)
0154     STF f12, FP_12(r3)
0155     STF f13, FP_13(r3)
0156     STF f14, FP_14(r3)
0157     STF f15, FP_15(r3)
0158     STF f16, FP_16(r3)
0159     STF f17, FP_17(r3)
0160     STF f18, FP_18(r3)
0161     STF f19, FP_19(r3)
0162     STF f20, FP_20(r3)
0163     STF f21, FP_21(r3)
0164     STF f22, FP_22(r3)
0165     STF f23, FP_23(r3)
0166     STF f24, FP_24(r3)
0167     STF f25, FP_25(r3)
0168     STF f26, FP_26(r3)
0169     STF f27, FP_27(r3)
0170     STF f28, FP_28(r3)
0171     STF f29, FP_29(r3)
0172     STF f30, FP_30(r3)
0173     STF f31, FP_31(r3)
0174     mffs    f2
0175     STF f2, FP_FPSCR(r3)
0176 #if !defined(PPC_DISABLE_MSR_ACCESS)
0177     bne 1f
0178     mtmsr   r4
0179     isync
0180 #endif  /* END PPC_DISABLE_MSR_ACCESS */
0181 
0182 1:
0183     blr
0184 
0185 /*
0186  *  _CPU_Context_restore_fp_context
0187  *
0188  *  This routine is responsible for restoring the FP context
0189  *  at *fp_context_ptr.  If the point to load the FP context
0190  *  from is changed then the pointer is modified by this routine.
0191  *
0192  *  Sometimes a macro implementation of this is in cpu.h which dereferences
0193  *  the ** and a similarly named routine in this file is passed something
0194  *  like a (Context_Control_fp *).  The general rule on making this decision
0195  *  is to avoid writing assembly language.
0196  */
0197 
0198     ALIGN (PPC_CACHE_ALIGNMENT, PPC_CACHE_ALIGN_POWER)
0199     PUBLIC_PROC (_CPU_Context_restore_fp)
0200 PROC (_CPU_Context_restore_fp):
0201     lwz r3, 0(r3)
0202 /* A FP context switch may occur in an ISR or exception handler when the FPU is not
0203  * available. Therefore, we must explicitely enable it here!
0204  */
0205 #if !defined(PPC_DISABLE_MSR_ACCESS)
0206     mfmsr   r4
0207     andi.   r5,r4,MSR_FP
0208     bne 1f
0209     ori r5,r4,MSR_FP
0210     mtmsr   r5
0211     isync
0212 #endif  /* END PPC_DISABLE_MSR_ACCESS */
0213 
0214 1:
0215     LDF f2, FP_FPSCR(r3)
0216     mtfsf   255, f2
0217     LDF f0, FP_0(r3)
0218     LDF f1, FP_1(r3)
0219     LDF f2, FP_2(r3)
0220     LDF f3, FP_3(r3)
0221     LDF f4, FP_4(r3)
0222     LDF f5, FP_5(r3)
0223     LDF f6, FP_6(r3)
0224     LDF f7, FP_7(r3)
0225     LDF f8, FP_8(r3)
0226     LDF f9, FP_9(r3)
0227     LDF f10, FP_10(r3)
0228     LDF f11, FP_11(r3)
0229     LDF f12, FP_12(r3)
0230     LDF f13, FP_13(r3)
0231     LDF f14, FP_14(r3)
0232     LDF f15, FP_15(r3)
0233     LDF f16, FP_16(r3)
0234     LDF f17, FP_17(r3)
0235     LDF f18, FP_18(r3)
0236     LDF f19, FP_19(r3)
0237     LDF f20, FP_20(r3)
0238     LDF f21, FP_21(r3)
0239     LDF f22, FP_22(r3)
0240     LDF f23, FP_23(r3)
0241     LDF f24, FP_24(r3)
0242     LDF f25, FP_25(r3)
0243     LDF f26, FP_26(r3)
0244     LDF f27, FP_27(r3)
0245     LDF f28, FP_28(r3)
0246     LDF f29, FP_29(r3)
0247     LDF f30, FP_30(r3)
0248     LDF f31, FP_31(r3)
0249     bne 1f
0250 #if !defined(PPC_DISABLE_MSR_ACCESS)
0251     mtmsr   r4
0252     isync
0253 #endif  /* END PPC_DISABLE_MSR_ACCESS */
0254 
0255 1:
0256     blr
0257 #endif /* PPC_HAS_FPU == 1 */
0258 
0259     ALIGN (PPC_CACHE_ALIGNMENT, PPC_CACHE_ALIGN_POWER)
0260     PUBLIC_PROC (_CPU_Context_switch)
0261     PUBLIC_PROC (_CPU_Context_switch_no_return)
0262 PROC (_CPU_Context_switch):
0263 PROC (_CPU_Context_switch_no_return):
0264 
0265 #ifdef BSP_USE_SYNC_IN_CONTEXT_SWITCH
0266     sync
0267     isync
0268 #endif
0269 
0270 #if defined(PPC_MULTILIB_ALTIVEC) && defined(__PPC_VRSAVE__)
0271     mfvrsave    r9
0272 #endif
0273 
0274     /* Align to a cache line */
0275     CLEAR_RIGHT_IMMEDIATE   r3, r3, PPC_DEFAULT_CACHE_LINE_POWER
0276     CLEAR_RIGHT_IMMEDIATE   r5, r4, PPC_DEFAULT_CACHE_LINE_POWER
0277 
0278     DATA_CACHE_ZERO_AND_TOUCH(r10, PPC_CONTEXT_CACHE_LINE_0)
0279 
0280 #if PPC_CONTEXT_CACHE_LINE_2 <= PPC_CONTEXT_VOLATILE_SIZE
0281     DATA_CACHE_ZERO_AND_TOUCH(r11, PPC_CONTEXT_CACHE_LINE_1)
0282 #endif
0283 
0284     /* Save context to r3 */
0285 
0286     GET_SELF_CPU_CONTROL    r12
0287 #if !defined(PPC_DISABLE_MSR_ACCESS)
0288     mfmsr   r6
0289 #endif  /* END PPC_DISABLE_MSR_ACCESS */
0290     mfcr    r7
0291 #ifdef PPC_MULTILIB_ALTIVEC
0292 #ifdef __PPC_VRSAVE__
0293     /* Mark v0 as used since we need it to get the VSCR */
0294     oris    r8, r9, 0x8000
0295     mtvrsave    r8
0296 #endif
0297     mfvscr  v0
0298 #endif
0299     mflr    r8
0300     lwz r11, PER_CPU_ISR_DISPATCH_DISABLE(r12)
0301 
0302     /*
0303      * We have to clear the reservation of the executing thread.  See also
0304      * Book E section 6.1.6.2 "Atomic Update Primitives".  Recent GCC
0305      * versions use atomic operations in the C++ library for example.  On
0306      * SMP configurations the reservation is cleared later during the
0307      * context switch.
0308      */
0309 #if PPC_CONTEXT_OFFSET_GPR1 != PPC_CONTEXT_CACHE_LINE_0 \
0310   || !BSP_DATA_CACHE_ENABLED \
0311   || PPC_DEFAULT_CACHE_LINE_SIZE != 32
0312     li  r10, PPC_CONTEXT_OFFSET_GPR1
0313 #endif
0314 #ifndef RTEMS_SMP
0315     stwcx.  r1, r3, r10
0316 #endif
0317 
0318     stw r6, PPC_CONTEXT_OFFSET_MSR(r3)
0319     stw r7, PPC_CONTEXT_OFFSET_CR(r3)
0320     PPC_REG_STORE   r1, PPC_CONTEXT_OFFSET_GPR1(r3)
0321     PPC_REG_STORE   r8, PPC_CONTEXT_OFFSET_LR(r3)
0322 
0323     PPC_GPR_STORE   r14, PPC_CONTEXT_OFFSET_GPR14(r3)
0324     PPC_GPR_STORE   r15, PPC_CONTEXT_OFFSET_GPR15(r3)
0325 
0326 #if PPC_CONTEXT_OFFSET_GPR20 == PPC_CONTEXT_CACHE_LINE_2
0327     DATA_CACHE_ZERO_AND_TOUCH(r10, PPC_CONTEXT_CACHE_LINE_2)
0328 #endif
0329 
0330     PPC_GPR_STORE   r16, PPC_CONTEXT_OFFSET_GPR16(r3)
0331     PPC_GPR_STORE   r17, PPC_CONTEXT_OFFSET_GPR17(r3)
0332 
0333 #if PPC_CONTEXT_OFFSET_GPR26 == PPC_CONTEXT_CACHE_LINE_2
0334     DATA_CACHE_ZERO_AND_TOUCH(r10, PPC_CONTEXT_CACHE_LINE_2)
0335 #endif
0336 
0337     PPC_GPR_STORE   r18, PPC_CONTEXT_OFFSET_GPR18(r3)
0338     PPC_GPR_STORE   r19, PPC_CONTEXT_OFFSET_GPR19(r3)
0339 
0340 #if PPC_CONTEXT_OFFSET_GPR24 == PPC_CONTEXT_CACHE_LINE_3
0341     DATA_CACHE_ZERO_AND_TOUCH(r10, PPC_CONTEXT_CACHE_LINE_3)
0342 #endif
0343 
0344     PPC_GPR_STORE   r20, PPC_CONTEXT_OFFSET_GPR20(r3)
0345     PPC_GPR_STORE   r21, PPC_CONTEXT_OFFSET_GPR21(r3)
0346     PPC_GPR_STORE   r22, PPC_CONTEXT_OFFSET_GPR22(r3)
0347     PPC_GPR_STORE   r23, PPC_CONTEXT_OFFSET_GPR23(r3)
0348 
0349 #if PPC_CONTEXT_OFFSET_GPR28 == PPC_CONTEXT_CACHE_LINE_4
0350     DATA_CACHE_ZERO_AND_TOUCH(r10, PPC_CONTEXT_CACHE_LINE_4)
0351 #endif
0352 
0353     PPC_GPR_STORE   r24, PPC_CONTEXT_OFFSET_GPR24(r3)
0354     PPC_GPR_STORE   r25, PPC_CONTEXT_OFFSET_GPR25(r3)
0355 
0356 #if PPC_CONTEXT_OFFSET_V22 == PPC_CONTEXT_CACHE_LINE_2
0357     DATA_CACHE_ZERO_AND_TOUCH(r10, PPC_CONTEXT_CACHE_LINE_2)
0358 #endif
0359 
0360     PPC_GPR_STORE   r26, PPC_CONTEXT_OFFSET_GPR26(r3)
0361     PPC_GPR_STORE   r27, PPC_CONTEXT_OFFSET_GPR27(r3)
0362 
0363     PPC_GPR_STORE   r28, PPC_CONTEXT_OFFSET_GPR28(r3)
0364     PPC_GPR_STORE   r29, PPC_CONTEXT_OFFSET_GPR29(r3)
0365     PPC_GPR_STORE   r30, PPC_CONTEXT_OFFSET_GPR30(r3)
0366     PPC_GPR_STORE   r31, PPC_CONTEXT_OFFSET_GPR31(r3)
0367 
0368     stw r11, PPC_CONTEXT_OFFSET_ISR_DISPATCH_DISABLE(r3)
0369 
0370 #ifdef PPC_MULTILIB_ALTIVEC
0371     li  r10, PPC_CONTEXT_OFFSET_VSCR
0372     stvewx  v0, r3, r10
0373 
0374 #ifdef __PPC_VRSAVE__
0375     stw r9, PPC_CONTEXT_OFFSET_VRSAVE(r3)
0376     andi.   r9, r9, 0xfff
0377     bne .Laltivec_save
0378 
0379 .Laltivec_save_continue:
0380 #else /* __PPC_VRSAVE__ */
0381     li  r9, PPC_CONTEXT_OFFSET_V20
0382     stvx    v20, r3, r9
0383     li  r9, PPC_CONTEXT_OFFSET_V21
0384     stvx    v21, r3, r9
0385 
0386 #if PPC_CONTEXT_OFFSET_V26 == PPC_CONTEXT_CACHE_LINE_3
0387     DATA_CACHE_ZERO_AND_TOUCH(r10, PPC_CONTEXT_CACHE_LINE_3)
0388 #endif
0389 
0390     li  r9, PPC_CONTEXT_OFFSET_V22
0391     stvx    v22, r3, r9
0392     li  r9, PPC_CONTEXT_OFFSET_V23
0393     stvx    v23, r3, r9
0394     li  r9, PPC_CONTEXT_OFFSET_V24
0395     stvx    v24, r3, r9
0396     li  r9, PPC_CONTEXT_OFFSET_V25
0397     stvx    v25, r3, r9
0398 
0399 #if PPC_CONTEXT_OFFSET_V30 == PPC_CONTEXT_CACHE_LINE_4
0400     DATA_CACHE_ZERO_AND_TOUCH(r10, PPC_CONTEXT_CACHE_LINE_4)
0401 #endif
0402 
0403     li  r9, PPC_CONTEXT_OFFSET_V26
0404     stvx    v26, r3, r9
0405     li  r9, PPC_CONTEXT_OFFSET_V27
0406     stvx    v27, r3, r9
0407     li  r9, PPC_CONTEXT_OFFSET_V28
0408     stvx    v28, r3, r9
0409     li  r9, PPC_CONTEXT_OFFSET_V29
0410     stvx    v29, r3, r9
0411 
0412 #if PPC_CONTEXT_OFFSET_F17 == PPC_CONTEXT_CACHE_LINE_5
0413     DATA_CACHE_ZERO_AND_TOUCH(r10, PPC_CONTEXT_CACHE_LINE_5)
0414 #endif
0415 
0416     li  r9, PPC_CONTEXT_OFFSET_V30
0417     stvx    v30, r3, r9
0418     li  r9, PPC_CONTEXT_OFFSET_V31
0419     stvx    v31, r3, r9
0420     mfvrsave    r9
0421     stw r9, PPC_CONTEXT_OFFSET_VRSAVE(r3)
0422 #endif /* __PPC_VRSAVE__ */
0423 #endif /* PPC_MULTILIB_ALTIVEC */
0424 
0425 #ifdef PPC_MULTILIB_FPU
0426     stfd    f14, PPC_CONTEXT_OFFSET_F14(r3)
0427     stfd    f15, PPC_CONTEXT_OFFSET_F15(r3)
0428     stfd    f16, PPC_CONTEXT_OFFSET_F16(r3)
0429     stfd    f17, PPC_CONTEXT_OFFSET_F17(r3)
0430     stfd    f18, PPC_CONTEXT_OFFSET_F18(r3)
0431     stfd    f19, PPC_CONTEXT_OFFSET_F19(r3)
0432     stfd    f20, PPC_CONTEXT_OFFSET_F20(r3)
0433     stfd    f21, PPC_CONTEXT_OFFSET_F21(r3)
0434     stfd    f22, PPC_CONTEXT_OFFSET_F22(r3)
0435     stfd    f23, PPC_CONTEXT_OFFSET_F23(r3)
0436     stfd    f24, PPC_CONTEXT_OFFSET_F24(r3)
0437     stfd    f25, PPC_CONTEXT_OFFSET_F25(r3)
0438     stfd    f26, PPC_CONTEXT_OFFSET_F26(r3)
0439     stfd    f27, PPC_CONTEXT_OFFSET_F27(r3)
0440     stfd    f28, PPC_CONTEXT_OFFSET_F28(r3)
0441     stfd    f29, PPC_CONTEXT_OFFSET_F29(r3)
0442     stfd    f30, PPC_CONTEXT_OFFSET_F30(r3)
0443     stfd    f31, PPC_CONTEXT_OFFSET_F31(r3)
0444 #endif
0445 
0446 #ifdef RTEMS_SMP
0447     /*
0448      * The executing thread no longer executes on this processor.  Switch
0449      * the stack to the temporary interrupt stack of this processor.  Mark
0450      * the context of the executing thread as not executing.
0451      */
0452     msync
0453 
0454     addi    r1, r12, PER_CPU_INTERRUPT_FRAME_AREA + CPU_INTERRUPT_FRAME_SIZE
0455     li  r6, 0
0456     stw r6, PPC_CONTEXT_OFFSET_IS_EXECUTING(r3)
0457 
0458 .Lcheck_is_executing:
0459 
0460     /* Check the is executing indicator of the heir context */
0461     addi    r6, r5, PPC_CONTEXT_OFFSET_IS_EXECUTING
0462     lwarx   r7, r0, r6
0463     cmpwi   r7, 0
0464     bne .Lget_potential_new_heir
0465 
0466     /* Try to update the is executing indicator of the heir context */
0467     li  r7, 1
0468     stwcx.  r7, r0, r6
0469     bne .Lget_potential_new_heir
0470     isync
0471 #endif
0472 
0473     /* Restore context from r5 */
0474 restore_context:
0475 
0476 #if defined(__ALTIVEC__) && !defined(PPC_MULTILIB_ALTIVEC)
0477     mr  r4, r5
0478     .extern _CPU_Context_switch_altivec
0479     bl  _CPU_Context_switch_altivec
0480 #endif
0481 
0482     lwz r6, PPC_CONTEXT_OFFSET_MSR(r5)
0483     lwz r7, PPC_CONTEXT_OFFSET_CR(r5)
0484     PPC_REG_LOAD    r1, PPC_CONTEXT_OFFSET_GPR1(r5)
0485     PPC_REG_LOAD    r8, PPC_CONTEXT_OFFSET_LR(r5)
0486 
0487 #ifdef PPC_MULTILIB_ALTIVEC
0488     li  r10, PPC_CONTEXT_OFFSET_VSCR
0489     lvewx   v0, r5, r10
0490 #ifdef __PPC_VRSAVE__
0491     lwz r9, PPC_CONTEXT_OFFSET_VRSAVE(r5)
0492 #endif
0493 #endif
0494 
0495     PPC_GPR_LOAD    r14, PPC_CONTEXT_OFFSET_GPR14(r5)
0496     PPC_GPR_LOAD    r15, PPC_CONTEXT_OFFSET_GPR15(r5)
0497 
0498     DATA_CACHE_TOUCH(r0, r1)
0499 
0500     PPC_GPR_LOAD    r16, PPC_CONTEXT_OFFSET_GPR16(r5)
0501     PPC_GPR_LOAD    r17, PPC_CONTEXT_OFFSET_GPR17(r5)
0502     PPC_GPR_LOAD    r18, PPC_CONTEXT_OFFSET_GPR18(r5)
0503     PPC_GPR_LOAD    r19, PPC_CONTEXT_OFFSET_GPR19(r5)
0504 
0505     PPC_GPR_LOAD    r20, PPC_CONTEXT_OFFSET_GPR20(r5)
0506     PPC_GPR_LOAD    r21, PPC_CONTEXT_OFFSET_GPR21(r5)
0507     PPC_GPR_LOAD    r22, PPC_CONTEXT_OFFSET_GPR22(r5)
0508     PPC_GPR_LOAD    r23, PPC_CONTEXT_OFFSET_GPR23(r5)
0509 
0510     PPC_GPR_LOAD    r24, PPC_CONTEXT_OFFSET_GPR24(r5)
0511     PPC_GPR_LOAD    r25, PPC_CONTEXT_OFFSET_GPR25(r5)
0512     PPC_GPR_LOAD    r26, PPC_CONTEXT_OFFSET_GPR26(r5)
0513     PPC_GPR_LOAD    r27, PPC_CONTEXT_OFFSET_GPR27(r5)
0514 
0515     PPC_GPR_LOAD    r28, PPC_CONTEXT_OFFSET_GPR28(r5)
0516     PPC_GPR_LOAD    r29, PPC_CONTEXT_OFFSET_GPR29(r5)
0517     PPC_GPR_LOAD    r30, PPC_CONTEXT_OFFSET_GPR30(r5)
0518     PPC_GPR_LOAD    r31, PPC_CONTEXT_OFFSET_GPR31(r5)
0519 
0520 #ifdef __powerpc64__
0521     ld  r13, PPC_CONTEXT_OFFSET_TP(r5)
0522 #else
0523     lwz r2, PPC_CONTEXT_OFFSET_TP(r5)
0524 #endif
0525     lwz r11, PPC_CONTEXT_OFFSET_ISR_DISPATCH_DISABLE(r5)
0526 
0527 #ifdef PPC_MULTILIB_ALTIVEC
0528     mtvscr  v0
0529 
0530 #ifdef __PPC_VRSAVE__
0531     mtvrsave    r9
0532     andi.   r9, r9, 0xfff
0533     bne .Laltivec_restore
0534 
0535 .Laltivec_restore_continue:
0536 #else /* __PPC_VRSAVE__ */
0537     li  r9, PPC_CONTEXT_OFFSET_V20
0538     lvx v20, r5, r9
0539     li  r9, PPC_CONTEXT_OFFSET_V21
0540     lvx v21, r5, r9
0541     li  r9, PPC_CONTEXT_OFFSET_V22
0542     lvx v22, r5, r9
0543     li  r9, PPC_CONTEXT_OFFSET_V23
0544     lvx v23, r5, r9
0545     li  r9, PPC_CONTEXT_OFFSET_V24
0546     lvx v24, r5, r9
0547     li  r9, PPC_CONTEXT_OFFSET_V25
0548     lvx v25, r5, r9
0549     li  r9, PPC_CONTEXT_OFFSET_V26
0550     lvx v26, r5, r9
0551     li  r9, PPC_CONTEXT_OFFSET_V27
0552     lvx v27, r5, r9
0553     li  r9, PPC_CONTEXT_OFFSET_V28
0554     lvx v28, r5, r9
0555     li  r9, PPC_CONTEXT_OFFSET_V29
0556     lvx v29, r5, r9
0557     li  r9, PPC_CONTEXT_OFFSET_V30
0558     lvx v30, r5, r9
0559     li  r9, PPC_CONTEXT_OFFSET_V31
0560     lvx v31, r5, r9
0561     lwz r9, PPC_CONTEXT_OFFSET_VRSAVE(r5)
0562     mtvrsave    r9
0563 #endif /* __PPC_VRSAVE__ */
0564 #endif /* PPC_MULTILIB_ALTIVEC */
0565 
0566 #ifdef PPC_MULTILIB_FPU
0567     lfd f14, PPC_CONTEXT_OFFSET_F14(r5)
0568     lfd f15, PPC_CONTEXT_OFFSET_F15(r5)
0569     lfd f16, PPC_CONTEXT_OFFSET_F16(r5)
0570     lfd f17, PPC_CONTEXT_OFFSET_F17(r5)
0571     lfd f18, PPC_CONTEXT_OFFSET_F18(r5)
0572     lfd f19, PPC_CONTEXT_OFFSET_F19(r5)
0573     lfd f20, PPC_CONTEXT_OFFSET_F20(r5)
0574     lfd f21, PPC_CONTEXT_OFFSET_F21(r5)
0575     lfd f22, PPC_CONTEXT_OFFSET_F22(r5)
0576     lfd f23, PPC_CONTEXT_OFFSET_F23(r5)
0577     lfd f24, PPC_CONTEXT_OFFSET_F24(r5)
0578     lfd f25, PPC_CONTEXT_OFFSET_F25(r5)
0579     lfd f26, PPC_CONTEXT_OFFSET_F26(r5)
0580     lfd f27, PPC_CONTEXT_OFFSET_F27(r5)
0581     lfd f28, PPC_CONTEXT_OFFSET_F28(r5)
0582     lfd f29, PPC_CONTEXT_OFFSET_F29(r5)
0583     lfd f30, PPC_CONTEXT_OFFSET_F30(r5)
0584     lfd f31, PPC_CONTEXT_OFFSET_F31(r5)
0585 #endif
0586 
0587     mtlr    r8
0588     mtcr    r7
0589 #if !defined(PPC_DISABLE_MSR_ACCESS)
0590     mtmsr   r6
0591 #endif  /* END PPC_DISABLE_MSR_ACCESS */
0592     stw r11, PER_CPU_ISR_DISPATCH_DISABLE(r12)
0593 
0594 #ifdef BSP_USE_SYNC_IN_CONTEXT_SWITCH
0595     isync
0596 #endif
0597 
0598     blr
0599 
0600     PUBLIC_PROC (_CPU_Context_restore)
0601 PROC (_CPU_Context_restore):
0602     /* Align to a cache line */
0603     CLEAR_RIGHT_IMMEDIATE   r5, r3, PPC_DEFAULT_CACHE_LINE_POWER
0604 
0605     GET_SELF_CPU_CONTROL    r12
0606 
0607 #if defined(__ALTIVEC__) && !defined(PPC_MULTILIB_ALTIVEC)
0608     li  r3, 0
0609 #endif
0610 
0611 #if defined(PPC_MULTILIB_ALTIVEC) && defined(__PPC_VRSAVE__)
0612     /* Mark v0 as used since we need it to get the VSCR */
0613     mfvrsave    r9
0614     oris    r8, r9, 0x8000
0615     mtvrsave    r8
0616 #endif
0617 
0618     b   restore_context
0619 
0620 #ifdef RTEMS_SMP
0621 .Lget_potential_new_heir:
0622 
0623     /* We may have a new heir */
0624 
0625     /* Read the executing and heir */
0626     PPC_REG_LOAD    r7, PER_CPU_OFFSET_EXECUTING(r12)
0627     PPC_REG_LOAD    r8, PER_CPU_OFFSET_HEIR(r12)
0628 
0629     /*
0630      * Update the executing only if necessary to avoid cache line
0631      * monopolization.
0632      */
0633     PPC_REG_CMP r7, r8
0634     beq .Lcheck_is_executing
0635 
0636     /* Calculate the heir context pointer */
0637     sub r7, r4, r7
0638     add r4, r8, r7
0639     CLEAR_RIGHT_IMMEDIATE   r5, r4, PPC_DEFAULT_CACHE_LINE_POWER
0640 
0641     /* Update the executing */
0642     PPC_REG_STORE   r8, PER_CPU_OFFSET_EXECUTING(r12)
0643 
0644     b   .Lcheck_is_executing
0645 #endif
0646 
0647 #if defined(PPC_MULTILIB_ALTIVEC) && defined(__PPC_VRSAVE__)
0648 .Laltivec_save:
0649 
0650     /*
0651      * Let X be VRSAVE, calculate:
0652      *
0653      * Z = X & 0x777
0654      * Z = Z + 0x777
0655      * X = X | Z
0656      *
0657      * Afterwards, we have in X for each group of four non-volatile VR
0658      * registers:
0659      *
0660      * 0111b, if VRSAVE group of four registers == 0
0661      * 1XXXb, if VRSAVE group of four registers != 0
0662      */
0663     andi.   r10, r9, 0x777
0664     addi    r10, r10, 0x777
0665     or  r9, r9, r10
0666     mtcr    r9
0667 
0668     bf  20, .Laltivec_save_v24
0669     li  r9, PPC_CONTEXT_OFFSET_V20
0670     stvx    v20, r3, r9
0671     li  r9, PPC_CONTEXT_OFFSET_V21
0672     stvx    v21, r3, r9
0673     li  r9, PPC_CONTEXT_OFFSET_V22
0674     stvx    v22, r3, r9
0675     li  r9, PPC_CONTEXT_OFFSET_V23
0676     stvx    v23, r3, r9
0677 
0678 .Laltivec_save_v24:
0679 
0680     bf  24, .Laltivec_save_v28
0681     li  r9, PPC_CONTEXT_OFFSET_V24
0682     stvx    v24, r3, r9
0683     li  r9, PPC_CONTEXT_OFFSET_V25
0684     stvx    v25, r3, r9
0685     li  r9, PPC_CONTEXT_OFFSET_V26
0686     stvx    v26, r3, r9
0687     li  r9, PPC_CONTEXT_OFFSET_V27
0688     stvx    v27, r3, r9
0689 
0690 .Laltivec_save_v28:
0691 
0692     bf  28, .Laltivec_save_continue
0693     li  r9, PPC_CONTEXT_OFFSET_V28
0694     stvx    v28, r3, r9
0695     li  r9, PPC_CONTEXT_OFFSET_V29
0696     stvx    v29, r3, r9
0697     li  r9, PPC_CONTEXT_OFFSET_V30
0698     stvx    v30, r3, r9
0699     li  r9, PPC_CONTEXT_OFFSET_V31
0700     stvx    v31, r3, r9
0701 
0702     b   .Laltivec_save_continue
0703 
0704 .Laltivec_restore:
0705 
0706     /* See comment at .Laltivec_save */
0707     andi.   r10, r9, 0x777
0708     addi    r10, r10, 0x777
0709     or  r9, r9, r10
0710     mtcr    r9
0711 
0712     bf  20, .Laltivec_restore_v24
0713     li  r9, PPC_CONTEXT_OFFSET_V20
0714     lvx v20, r5, r9
0715     li  r9, PPC_CONTEXT_OFFSET_V21
0716     lvx v21, r5, r9
0717     li  r9, PPC_CONTEXT_OFFSET_V22
0718     lvx v22, r5, r9
0719     li  r9, PPC_CONTEXT_OFFSET_V23
0720     lvx v23, r5, r9
0721 
0722 .Laltivec_restore_v24:
0723 
0724     bf  24, .Laltivec_restore_v28
0725     li  r9, PPC_CONTEXT_OFFSET_V24
0726     lvx v24, r5, r9
0727     li  r9, PPC_CONTEXT_OFFSET_V25
0728     lvx v25, r5, r9
0729     li  r9, PPC_CONTEXT_OFFSET_V26
0730     lvx v26, r5, r9
0731     li  r9, PPC_CONTEXT_OFFSET_V27
0732     lvx v27, r5, r9
0733 
0734 .Laltivec_restore_v28:
0735 
0736     bf  28, .Laltivec_restore_continue
0737     li  r9, PPC_CONTEXT_OFFSET_V28
0738     lvx v28, r5, r9
0739     li  r9, PPC_CONTEXT_OFFSET_V29
0740     lvx v29, r5, r9
0741     li  r9, PPC_CONTEXT_OFFSET_V30
0742     lvx v30, r5, r9
0743     li  r9, PPC_CONTEXT_OFFSET_V31
0744     lvx v31, r5, r9
0745 
0746     b   .Laltivec_restore_continue
0747 #endif /* PPC_MULTILIB_ALTIVEC && __PPC_VRSAVE__ */