File indexing completed on 2025-05-11 08:24:25
0001
0002
0003
0004
0005
0006
0007
0008
0009
0010
0011
0012
0013
0014
0015
0016
0017
0018
0019
0020
0021
0022
0023
0024
0025
0026
0027
0028 #ifdef HAVE_CONFIG_H
0029 #include "config.h"
0030 #endif
0031
0032 #include <rtems/asm.h>
0033 #include <rtems/score/cpu.h>
0034
0035 #define LR_OFFSET 8
0036 #define CR_OFFSET 16
0037 #define OFFSET(i) ((i) * PPC_GPR_SIZE + 32)
0038 #define GPR14_OFFSET OFFSET(0)
0039 #define GPR15_OFFSET OFFSET(1)
0040 #define GPR16_OFFSET OFFSET(2)
0041 #define GPR17_OFFSET OFFSET(3)
0042 #define GPR18_OFFSET OFFSET(4)
0043 #define GPR19_OFFSET OFFSET(5)
0044 #define GPR20_OFFSET OFFSET(6)
0045 #define GPR21_OFFSET OFFSET(7)
0046 #define GPR22_OFFSET OFFSET(8)
0047 #define GPR23_OFFSET OFFSET(9)
0048 #define GPR24_OFFSET OFFSET(10)
0049 #define GPR25_OFFSET OFFSET(11)
0050 #define GPR26_OFFSET OFFSET(12)
0051 #define GPR27_OFFSET OFFSET(13)
0052 #define GPR28_OFFSET OFFSET(14)
0053 #define GPR29_OFFSET OFFSET(15)
0054 #define GPR30_OFFSET OFFSET(16)
0055 #define GPR31_OFFSET OFFSET(17)
0056
0057 #ifdef PPC_MULTILIB_FPU
0058 #define FOFFSET(i) ((i) * 8 + OFFSET(18))
0059 #define F14_OFFSET FOFFSET(0)
0060 #define F15_OFFSET FOFFSET(1)
0061 #define F16_OFFSET FOFFSET(2)
0062 #define F17_OFFSET FOFFSET(3)
0063 #define F18_OFFSET FOFFSET(4)
0064 #define F19_OFFSET FOFFSET(5)
0065 #define F20_OFFSET FOFFSET(6)
0066 #define F21_OFFSET FOFFSET(7)
0067 #define F22_OFFSET FOFFSET(8)
0068 #define F23_OFFSET FOFFSET(9)
0069 #define F24_OFFSET FOFFSET(10)
0070 #define F25_OFFSET FOFFSET(11)
0071 #define F26_OFFSET FOFFSET(12)
0072 #define F27_OFFSET FOFFSET(13)
0073 #define F28_OFFSET FOFFSET(14)
0074 #define F29_OFFSET FOFFSET(15)
0075 #define F30_OFFSET FOFFSET(16)
0076 #define F31_OFFSET FOFFSET(17)
0077 #define FPSCR_OFFSET FOFFSET(18)
0078 #define FTMP_OFFSET FOFFSET(19)
0079 #define FTMP2_OFFSET FOFFSET(20)
0080 #define FPUEND FOFFSET(21)
0081 #else
0082 #define FPUEND OFFSET(18)
0083 #endif
0084
0085 #ifdef PPC_MULTILIB_ALTIVEC
0086 #define VOFFSET(i) ((i) * 16 + ((FPUEND + 16 - 1) & ~(16 - 1)))
0087 #define V20_OFFSET VOFFSET(0)
0088 #define V21_OFFSET VOFFSET(1)
0089 #define V22_OFFSET VOFFSET(2)
0090 #define V23_OFFSET VOFFSET(3)
0091 #define V24_OFFSET VOFFSET(4)
0092 #define V25_OFFSET VOFFSET(5)
0093 #define V26_OFFSET VOFFSET(6)
0094 #define V27_OFFSET VOFFSET(7)
0095 #define V28_OFFSET VOFFSET(8)
0096 #define V29_OFFSET VOFFSET(9)
0097 #define V30_OFFSET VOFFSET(10)
0098 #define V31_OFFSET VOFFSET(11)
0099 #define VTMP_OFFSET VOFFSET(12)
0100 #define VTMP2_OFFSET VOFFSET(13)
0101 #define VRSAVE_OFFSET VOFFSET(14)
0102 #define VRSAVE2_OFFSET (VOFFSET(14) + 4)
0103 #define VSCR_OFFSET (VOFFSET(14) + 12)
0104 #define ALTIVECEND VOFFSET(15)
0105 #else
0106 #define ALTIVECEND FPUEND
0107 #endif
0108
0109 #define FRAME_SIZE \
0110 ((ALTIVECEND + CPU_STACK_ALIGNMENT - 1) & ~(CPU_STACK_ALIGNMENT - 1))
0111
0112 .global _CPU_Context_validate
0113
0114 _CPU_Context_validate:
0115
0116
0117 PPC_REG_STORE_UPDATE r1, -FRAME_SIZE(r1)
0118 mflr r4
0119 PPC_REG_STORE r4, LR_OFFSET(r1)
0120 mfcr r4
0121 stw r4, CR_OFFSET(r1)
0122 PPC_REG_STORE r14, GPR14_OFFSET(r1)
0123 PPC_REG_STORE r15, GPR15_OFFSET(r1)
0124 PPC_REG_STORE r16, GPR16_OFFSET(r1)
0125 PPC_REG_STORE r17, GPR17_OFFSET(r1)
0126 PPC_REG_STORE r18, GPR18_OFFSET(r1)
0127 PPC_REG_STORE r19, GPR19_OFFSET(r1)
0128 PPC_REG_STORE r20, GPR20_OFFSET(r1)
0129 PPC_REG_STORE r21, GPR21_OFFSET(r1)
0130 PPC_REG_STORE r22, GPR22_OFFSET(r1)
0131 PPC_REG_STORE r23, GPR23_OFFSET(r1)
0132 PPC_REG_STORE r24, GPR24_OFFSET(r1)
0133 PPC_REG_STORE r25, GPR25_OFFSET(r1)
0134 PPC_REG_STORE r26, GPR26_OFFSET(r1)
0135 PPC_REG_STORE r27, GPR27_OFFSET(r1)
0136 PPC_REG_STORE r28, GPR28_OFFSET(r1)
0137 PPC_REG_STORE r29, GPR29_OFFSET(r1)
0138 PPC_REG_STORE r30, GPR30_OFFSET(r1)
0139 PPC_REG_STORE r31, GPR31_OFFSET(r1)
0140
0141 #ifdef PPC_MULTILIB_FPU
0142 stfd f14, F14_OFFSET(r1)
0143 stfd f15, F15_OFFSET(r1)
0144 stfd f16, F16_OFFSET(r1)
0145 stfd f17, F17_OFFSET(r1)
0146 stfd f18, F18_OFFSET(r1)
0147 stfd f19, F19_OFFSET(r1)
0148 stfd f20, F20_OFFSET(r1)
0149 stfd f21, F21_OFFSET(r1)
0150 stfd f22, F22_OFFSET(r1)
0151 stfd f23, F23_OFFSET(r1)
0152 stfd f24, F24_OFFSET(r1)
0153 stfd f25, F25_OFFSET(r1)
0154 stfd f26, F26_OFFSET(r1)
0155 stfd f27, F27_OFFSET(r1)
0156 stfd f28, F28_OFFSET(r1)
0157 stfd f29, F29_OFFSET(r1)
0158 stfd f30, F30_OFFSET(r1)
0159 stfd f31, F31_OFFSET(r1)
0160 mffs f0
0161 stfd f0, FPSCR_OFFSET(r1)
0162 #endif
0163
0164 #ifdef PPC_MULTILIB_ALTIVEC
0165 mfvrsave r0
0166 stw r0, VRSAVE_OFFSET(r1)
0167 li r0, 0xffffffff
0168 mtvrsave r0
0169 mfvscr v0
0170 li r0, VSCR_OFFSET
0171 stvewx v0, r1, r0
0172 li r0, V20_OFFSET
0173 stvx v20, r1, r0
0174 li r0, V21_OFFSET
0175 stvx v21, r1, r0
0176 li r0, V22_OFFSET
0177 stvx v22, r1, r0
0178 li r0, V23_OFFSET
0179 stvx v23, r1, r0
0180 li r0, V24_OFFSET
0181 stvx v24, r1, r0
0182 li r0, V25_OFFSET
0183 stvx v25, r1, r0
0184 li r0, V26_OFFSET
0185 stvx v26, r1, r0
0186 li r0, V27_OFFSET
0187 stvx v27, r1, r0
0188 li r0, V28_OFFSET
0189 stvx v28, r1, r0
0190 li r0, V29_OFFSET
0191 stvx v29, r1, r0
0192 li r0, V30_OFFSET
0193 stvx v30, r1, r0
0194 li r0, V31_OFFSET
0195 stvx v31, r1, r0
0196 #endif
0197
0198
0199
0200
0201 addi r4, r3, 24
0202 mtcr r4
0203
0204 addi r4, r3, 25
0205 mtlr r4
0206 addi r4, r3, 26
0207 mtctr r4
0208 rlwinm r4, r3, 0, 25, 2
0209 mtxer r4
0210 addi r0, r3, 28
0211
0212
0213
0214 addi r5, r3, 1
0215 addi r6, r3, 2
0216 addi r7, r3, 3
0217 addi r8, r3, 4
0218 addi r9, r3, 5
0219 addi r10, r3, 6
0220 addi r11, r3, 7
0221 addi r12, r3, 8
0222 addi r14, r3, 9
0223 addi r15, r3, 10
0224 addi r16, r3, 11
0225 addi r17, r3, 12
0226 addi r18, r3, 13
0227 addi r19, r3, 14
0228 addi r20, r3, 15
0229 addi r21, r3, 16
0230 addi r22, r3, 17
0231 addi r23, r3, 18
0232 addi r24, r3, 19
0233 addi r25, r3, 20
0234 addi r26, r3, 21
0235 addi r27, r3, 22
0236
0237
0238 #ifdef __powerpc64__
0239 xor r28, r13, r3
0240 #else
0241 xor r28, r2, r3
0242 #endif
0243
0244
0245 addi r29, r3, 24
0246
0247
0248 mfmsr r30
0249 xor r30, r30, r3
0250
0251
0252 mr r31, r1
0253
0254 #ifdef PPC_MULTILIB_FPU
0255 .macro FILL_F i
0256 addi r4, r3, 0x100 + \i
0257 stw r4, FTMP_OFFSET(r1)
0258 addi r4, r3, 0x200 + \i
0259 stw r4, FTMP_OFFSET + 4(r1)
0260 lfd \i, FTMP_OFFSET(r1)
0261 .endm
0262
0263 FILL_F 0
0264 FILL_F 1
0265 FILL_F 2
0266 FILL_F 3
0267 FILL_F 4
0268 FILL_F 5
0269 FILL_F 6
0270 FILL_F 7
0271 FILL_F 8
0272 FILL_F 9
0273 FILL_F 10
0274 FILL_F 11
0275 FILL_F 12
0276 FILL_F 13
0277 FILL_F 14
0278 FILL_F 15
0279 FILL_F 16
0280 FILL_F 17
0281 FILL_F 18
0282 FILL_F 19
0283 FILL_F 20
0284 FILL_F 21
0285 FILL_F 22
0286 FILL_F 23
0287 FILL_F 24
0288 FILL_F 25
0289 FILL_F 26
0290 FILL_F 27
0291 FILL_F 28
0292 FILL_F 29
0293 FILL_F 30
0294 FILL_F 31
0295 #endif
0296
0297 #ifdef PPC_MULTILIB_ALTIVEC
0298 .macro FILL_V i
0299 addi r4, r3, 0x300 + \i
0300 stw r4, VTMP_OFFSET(r1)
0301 addi r4, r3, 0x400 + \i
0302 stw r4, VTMP_OFFSET + 4(r1)
0303 addi r4, r3, 0x500 + \i
0304 stw r4, VTMP_OFFSET + 8(r1)
0305 addi r4, r3, 0x600 + \i
0306 stw r4, VTMP_OFFSET + 12(r1)
0307 li r4, VTMP_OFFSET
0308 lvx \i, r1, r4
0309 .endm
0310
0311 FILL_V 0
0312 FILL_V 1
0313 FILL_V 2
0314 FILL_V 3
0315 FILL_V 4
0316 FILL_V 5
0317 FILL_V 6
0318 FILL_V 7
0319 FILL_V 8
0320 FILL_V 9
0321 FILL_V 10
0322 FILL_V 11
0323 FILL_V 12
0324 FILL_V 13
0325 FILL_V 14
0326 FILL_V 15
0327 FILL_V 16
0328 FILL_V 17
0329 FILL_V 18
0330 FILL_V 19
0331 FILL_V 20
0332 FILL_V 21
0333 FILL_V 22
0334 FILL_V 23
0335 FILL_V 24
0336 FILL_V 25
0337 FILL_V 26
0338 FILL_V 27
0339 FILL_V 28
0340 FILL_V 29
0341 FILL_V 30
0342 FILL_V 31
0343 #ifndef __PPC_VRSAVE__
0344 addi r4, r3, 0x700
0345 mtvrsave r4
0346 #endif
0347 #endif
0348
0349
0350 check:
0351 mfcr r4
0352 cmpw r4, r29
0353 bne restore
0354 addi r4, r3, 1
0355 PPC_REG_CMP r4, r5
0356 bne restore
0357 addi r4, r3, 2
0358 PPC_REG_CMP r4, r6
0359 bne restore
0360 addi r4, r3, 3
0361 PPC_REG_CMP r4, r7
0362 bne restore
0363 addi r4, r3, 4
0364 PPC_REG_CMP r4, r8
0365 bne restore
0366 addi r4, r3, 5
0367 PPC_REG_CMP r4, r9
0368 bne restore
0369 addi r4, r3, 6
0370 PPC_REG_CMP r4, r10
0371 bne restore
0372 addi r4, r3, 7
0373 PPC_REG_CMP r4, r11
0374 bne restore
0375 addi r4, r3, 8
0376 PPC_REG_CMP r4, r12
0377 bne restore
0378 #ifdef __powerpc64__
0379 lis r4, .TOC.@highest
0380 ori r4, r4, .TOC.@higher
0381 rldicr r4, r4, 32, 31
0382 oris r4, r4, .TOC.@h
0383 ori r4, r4, .TOC.@l
0384 PPC_REG_CMP r4, r2
0385 #else
0386 lis r4, _SDA_BASE_@h
0387 ori r4, r4, _SDA_BASE_@l
0388 PPC_REG_CMP r4, r13
0389 #endif
0390 bne restore
0391 addi r4, r3, 9
0392 PPC_REG_CMP r4, r14
0393 bne restore
0394 addi r4, r3, 10
0395 PPC_REG_CMP r4, r15
0396 bne restore
0397 addi r4, r3, 11
0398 PPC_REG_CMP r4, r16
0399 bne restore
0400 addi r4, r3, 12
0401 PPC_REG_CMP r4, r17
0402 bne restore
0403 addi r4, r3, 13
0404 PPC_REG_CMP r4, r18
0405 bne restore
0406 addi r4, r3, 14
0407 PPC_REG_CMP r4, r19
0408 bne restore
0409 addi r4, r3, 15
0410 PPC_REG_CMP r4, r20
0411 bne restore
0412 addi r4, r3, 16
0413 PPC_REG_CMP r4, r21
0414 bne restore
0415 addi r4, r3, 17
0416 PPC_REG_CMP r4, r22
0417 bne restore
0418 addi r4, r3, 18
0419 PPC_REG_CMP r4, r23
0420 bne restore
0421 addi r4, r3, 19
0422 PPC_REG_CMP r4, r24
0423 bne restore
0424 addi r4, r3, 20
0425 PPC_REG_CMP r4, r25
0426 bne restore
0427 addi r4, r3, 21
0428 PPC_REG_CMP r4, r26
0429 bne restore
0430 addi r4, r3, 22
0431 PPC_REG_CMP r4, r27
0432 bne restore
0433 #ifdef __powerpc64__
0434 xor r4, r13, r3
0435 #else
0436 xor r4, r2, r3
0437 #endif
0438 PPC_REG_CMP r4, r28
0439 bne restore
0440 addi r4, r3, 24
0441 PPC_REG_CMP r4, r29
0442 bne restore
0443 mfmsr r4
0444 xor r4, r4, r3
0445 PPC_REG_CMP r4, r30
0446 bne restore
0447 addi r4, r3, 25
0448 mflr r5
0449 PPC_REG_CMP r4, r5
0450 bne restore
0451 addi r4, r3, 26
0452 mfctr r5
0453 PPC_REG_CMP r4, r5
0454 bne restore
0455 rlwinm r4, r3, 0, 25, 2
0456 mfxer r5
0457 cmpw r4, r5
0458 bne restore
0459 addi r4, r3, 28
0460 PPC_REG_CMP r4, r0
0461 bne restore
0462 PPC_REG_CMP r31, r1
0463 bne restore
0464
0465 #ifdef PPC_MULTILIB_FPU
0466 .macro CHECK_F i
0467 stfd \i, FTMP_OFFSET(r1)
0468 lwz r5, FTMP_OFFSET(r1)
0469 addi r4, r3, 0x100 + \i
0470 cmpw r5, r4
0471 bne restore
0472 lwz r5, FTMP_OFFSET + 4(r1)
0473 addi r4, r3, 0x200 + \i
0474 cmpw r5, r4
0475 bne restore
0476 .endm
0477
0478
0479 stfd f0, FTMP_OFFSET(r1)
0480 mffs f0
0481 stfd f0, FTMP2_OFFSET(r1)
0482 lwz r4, FTMP2_OFFSET + 4(r1)
0483 lwz r5, FPSCR_OFFSET + 4(r1)
0484 cmpw r5, r4
0485 bne restore
0486 lfd f0, FTMP_OFFSET(r1)
0487
0488 CHECK_F 0
0489 CHECK_F 1
0490 CHECK_F 2
0491 CHECK_F 3
0492 CHECK_F 4
0493 CHECK_F 5
0494 CHECK_F 6
0495 CHECK_F 7
0496 CHECK_F 8
0497 CHECK_F 9
0498 CHECK_F 10
0499 CHECK_F 11
0500 CHECK_F 12
0501 CHECK_F 13
0502 CHECK_F 14
0503 CHECK_F 15
0504 CHECK_F 16
0505 CHECK_F 17
0506 CHECK_F 18
0507 CHECK_F 19
0508 CHECK_F 20
0509 CHECK_F 21
0510 CHECK_F 22
0511 CHECK_F 23
0512 CHECK_F 24
0513 CHECK_F 25
0514 CHECK_F 26
0515 CHECK_F 27
0516 CHECK_F 28
0517 CHECK_F 29
0518 CHECK_F 30
0519 CHECK_F 31
0520 #endif
0521
0522 #ifdef PPC_MULTILIB_ALTIVEC
0523 .macro CHECK_V i
0524 #ifdef __PPC_VRSAVE__
0525 mfvrsave r4
0526 .if (31 - \i) > 15
0527 andis. r5, r4, 1 << (31 - \i - 16)
0528 .else
0529 andi. r5, r4, 1 << (31 - \i)
0530 .endif
0531 beq 1f
0532 #endif
0533 li r4, VTMP_OFFSET
0534 stvx \i, r1, r4
0535 lwz r5, VTMP_OFFSET(r1)
0536 addi r4, r3, 0x300 + \i
0537 cmpw r5, r4
0538 bne restore
0539 lwz r5, VTMP_OFFSET + 4(r1)
0540 addi r4, r3, 0x400 + \i
0541 cmpw r5, r4
0542 bne restore
0543 lwz r5, VTMP_OFFSET + 8(r1)
0544 addi r4, r3, 0x500 + \i
0545 cmpw r5, r4
0546 bne restore
0547 lwz r5, VTMP_OFFSET + 12(r1)
0548 addi r4, r3, 0x600 + \i
0549 cmpw r5, r4
0550 bne restore
0551 #ifdef __PPC_VRSAVE__
0552 mfvrsave r4
0553 .if (31 - \i) > 15
0554 xoris r4, r4, 1 << (31 - \i - 16)
0555 .else
0556 xori r4, r4, 1 << (31 - \i)
0557 .endif
0558 mtvrsave r4
0559 b 2f
0560 1:
0561 .if (31 - \i) > 15
0562 oris r4, r4, 1 << (31 - \i - 16)
0563 .else
0564 ori r4, r4, 1 << (31 - \i)
0565 .endif
0566 mtvrsave r4
0567 addi r4, r3, 0x300 + \i
0568 stw r4, VTMP_OFFSET(r1)
0569 addi r4, r3, 0x400 + \i
0570 stw r4, VTMP_OFFSET + 4(r1)
0571 addi r4, r3, 0x500 + \i
0572 stw r4, VTMP_OFFSET + 8(r1)
0573 addi r4, r3, 0x600 + \i
0574 stw r4, VTMP_OFFSET + 12(r1)
0575 li r4, VTMP_OFFSET
0576 lvx \i, r1, r4
0577 2:
0578 #endif
0579 .endm
0580
0581
0582 #ifdef __PPC_VRSAVE__
0583 mfvrsave r4
0584 stw r4, VRSAVE2_OFFSET(r1)
0585 oris r4, r4, 0x8000
0586 mtvrsave r4
0587 #endif
0588 li r4, VTMP_OFFSET
0589 stvx v0, r1, r4
0590 mfvscr v0
0591 li r4, VTMP2_OFFSET + 12
0592 stvewx v0, r1, r4
0593 lwz r4, VTMP2_OFFSET + 12(r1)
0594 lwz r5, VSCR_OFFSET(r1)
0595 cmpw r5, r4
0596 bne restore
0597 li r4, VTMP_OFFSET
0598 lvx v0, r1, r4
0599 #ifdef __PPC_VRSAVE__
0600 lwz r4, VRSAVE2_OFFSET(r1)
0601 mtvrsave r4
0602 #endif
0603
0604 CHECK_V 0
0605 CHECK_V 1
0606 CHECK_V 2
0607 CHECK_V 3
0608 CHECK_V 4
0609 CHECK_V 5
0610 CHECK_V 6
0611 CHECK_V 7
0612 CHECK_V 8
0613 CHECK_V 9
0614 CHECK_V 10
0615 CHECK_V 11
0616 CHECK_V 12
0617 CHECK_V 13
0618 CHECK_V 14
0619 CHECK_V 15
0620 CHECK_V 16
0621 CHECK_V 17
0622 CHECK_V 18
0623 CHECK_V 19
0624 CHECK_V 20
0625 CHECK_V 21
0626 CHECK_V 22
0627 CHECK_V 23
0628 CHECK_V 24
0629 CHECK_V 25
0630 CHECK_V 26
0631 CHECK_V 27
0632 CHECK_V 28
0633 CHECK_V 29
0634 CHECK_V 30
0635 CHECK_V 31
0636 mfvrsave r5
0637 #ifdef __PPC_VRSAVE__
0638 addi r5, r5, 1
0639 cmplwi r0, r5, 1
0640 bgt restore
0641 #else
0642 addi r4, r3, 0x700
0643 cmpw r5, r4
0644 bne restore
0645 #endif
0646 #endif
0647
0648 mtcr r29
0649 addi r5, r3, 1
0650 b check
0651
0652
0653 restore:
0654
0655 #ifdef PPC_MULTILIB_ALTIVEC
0656 li r0, 0xffffffff
0657 mtvrsave r0
0658 li r0, V31_OFFSET
0659 lvx v31, r1, r0
0660 li r0, V30_OFFSET
0661 lvx v30, r1, r0
0662 li r0, V29_OFFSET
0663 lvx v29, r1, r0
0664 li r0, V28_OFFSET
0665 lvx v28, r1, r0
0666 li r0, V27_OFFSET
0667 lvx v27, r1, r0
0668 li r0, V26_OFFSET
0669 lvx v26, r1, r0
0670 li r0, V25_OFFSET
0671 lvx v25, r1, r0
0672 li r0, V24_OFFSET
0673 lvx v24, r1, r0
0674 li r0, V23_OFFSET
0675 lvx v23, r1, r0
0676 li r0, V22_OFFSET
0677 lvx v22, r1, r0
0678 li r0, V21_OFFSET
0679 lvx v21, r1, r0
0680 li r0, V20_OFFSET
0681 lvx v20, r1, r0
0682 li r0, VSCR_OFFSET
0683 lvewx v0, r1, r0
0684 mtvscr v0
0685 lwz r0, VRSAVE_OFFSET(r1)
0686 mtvrsave r0
0687 #endif
0688
0689 #ifdef PPC_MULTILIB_FPU
0690 lfd f31, F31_OFFSET(r1)
0691 lfd f30, F30_OFFSET(r1)
0692 lfd f29, F29_OFFSET(r1)
0693 lfd f28, F28_OFFSET(r1)
0694 lfd f27, F27_OFFSET(r1)
0695 lfd f26, F26_OFFSET(r1)
0696 lfd f25, F25_OFFSET(r1)
0697 lfd f24, F24_OFFSET(r1)
0698 lfd f23, F23_OFFSET(r1)
0699 lfd f22, F22_OFFSET(r1)
0700 lfd f21, F21_OFFSET(r1)
0701 lfd f20, F20_OFFSET(r1)
0702 lfd f19, F19_OFFSET(r1)
0703 lfd f18, F18_OFFSET(r1)
0704 lfd f17, F17_OFFSET(r1)
0705 lfd f16, F16_OFFSET(r1)
0706 lfd f15, F15_OFFSET(r1)
0707 lfd f14, F14_OFFSET(r1)
0708 #endif
0709
0710 PPC_REG_LOAD r31, GPR31_OFFSET(r1)
0711 PPC_REG_LOAD r30, GPR30_OFFSET(r1)
0712 PPC_REG_LOAD r29, GPR29_OFFSET(r1)
0713 PPC_REG_LOAD r28, GPR28_OFFSET(r1)
0714 PPC_REG_LOAD r27, GPR27_OFFSET(r1)
0715 PPC_REG_LOAD r26, GPR26_OFFSET(r1)
0716 PPC_REG_LOAD r25, GPR25_OFFSET(r1)
0717 PPC_REG_LOAD r24, GPR24_OFFSET(r1)
0718 PPC_REG_LOAD r23, GPR23_OFFSET(r1)
0719 PPC_REG_LOAD r22, GPR22_OFFSET(r1)
0720 PPC_REG_LOAD r21, GPR21_OFFSET(r1)
0721 PPC_REG_LOAD r20, GPR20_OFFSET(r1)
0722 PPC_REG_LOAD r19, GPR19_OFFSET(r1)
0723 PPC_REG_LOAD r18, GPR18_OFFSET(r1)
0724 PPC_REG_LOAD r17, GPR17_OFFSET(r1)
0725 PPC_REG_LOAD r16, GPR16_OFFSET(r1)
0726 PPC_REG_LOAD r15, GPR15_OFFSET(r1)
0727 PPC_REG_LOAD r14, GPR14_OFFSET(r1)
0728 lwz r4, CR_OFFSET(r1)
0729 mtcr r4
0730 PPC_REG_LOAD r4, LR_OFFSET(r1)
0731 mtlr r4
0732 addi r1, r1, FRAME_SIZE
0733 blr