Back to home page

LXR

 
 

    


File indexing completed on 2025-05-11 08:24:23

0001 /* SPDX-License-Identifier: BSD-2-Clause */
0002 
0003 /**
0004  * @file
0005  *
0006  * @ingroup RTEMSScoreCPUARM
0007  *
0008  * @brief ARM architecture support implementation.
0009  */
0010 
0011 /*
0012  *  This file contains all assembly code for the ARM implementation
0013  *  of RTEMS.
0014  *
0015  *  Copyright (c) 2007 by Ray Xu, <Rayx.cn@gmail.com>
0016  *          Thumb support added.
0017  *
0018  *  Copyright (c) 2002 by Advent Networks, Inc.
0019  *          Jay Monkman <jmonkman@adventnetworks.com>
0020  *
0021  *  COPYRIGHT (c) 2000 Canon Research Centre France SA.
0022  *  Emmanuel Raguet, mailto:raguet@crf.canon.fr
0023  *
0024  *  Copyright (C) 2013, 2017 embedded brains GmbH & Co. KG
0025  *
0026  * Redistribution and use in source and binary forms, with or without
0027  * modification, are permitted provided that the following conditions
0028  * are met:
0029  * 1. Redistributions of source code must retain the above copyright
0030  *    notice, this list of conditions and the following disclaimer.
0031  * 2. Redistributions in binary form must reproduce the above copyright
0032  *    notice, this list of conditions and the following disclaimer in the
0033  *    documentation and/or other materials provided with the distribution.
0034  *
0035  * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
0036  * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
0037  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
0038  * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
0039  * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
0040  * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
0041  * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
0042  * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
0043  * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
0044  * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
0045  * POSSIBILITY OF SUCH DAMAGE.
0046  *
0047  */
0048 
0049 #ifdef HAVE_CONFIG_H
0050 #include "config.h"
0051 #endif
0052 
0053 #include <rtems/asm.h>
0054 
0055 #ifdef ARM_MULTILIB_ARCH_V4
0056 
0057         .text
0058 
0059 /*
0060  *  void _CPU_Context_switch( run_context, heir_context )
0061  *  void _CPU_Context_restore( run_context, heir_context )
0062  *
0063  *  This routine performs a normal non-FP context.
0064  *
0065  *  R0 = run_context    R1 = heir_context
0066  *
0067  *  This function copies the current registers to where r0 points, then
0068  *  restores the ones from where r1 points.
0069  *
0070  *  Using the ldm/stm opcodes save 2-3 us on 100 MHz ARM9TDMI with
0071  *  a 16 bit data bus.
0072  *
0073  */
0074 
0075 DEFINE_FUNCTION_ARM(_CPU_Context_switch)
0076     .globl  _CPU_Context_switch_no_return
0077     .set    _CPU_Context_switch_no_return, _CPU_Context_switch
0078 
0079 /* Start saving context */
0080     GET_SELF_CPU_CONTROL    r2
0081     ldr r3, [r2, #PER_CPU_ISR_DISPATCH_DISABLE]
0082     stm r0, {r4, r5, r6, r7, r8, r9, r10, r11, r13, r14}
0083 
0084 #ifdef ARM_MULTILIB_VFP
0085     add r5, r0, #ARM_CONTEXT_CONTROL_D8_OFFSET
0086     vstm    r5, {d8-d15}
0087 #endif
0088 
0089     str r3, [r0, #ARM_CONTEXT_CONTROL_ISR_DISPATCH_DISABLE]
0090 
0091 #ifdef RTEMS_SMP
0092     /*
0093      * The executing thread no longer executes on this processor.  Switch
0094      * the stack to the temporary interrupt stack of this processor.  Mark
0095      * the context of the executing thread as not executing.
0096      */
0097     dmb
0098     add sp, r2, #(PER_CPU_INTERRUPT_FRAME_AREA + CPU_INTERRUPT_FRAME_SIZE)
0099     mov r3, #0
0100     strb    r3, [r0, #ARM_CONTEXT_CONTROL_IS_EXECUTING_OFFSET]
0101 
0102 .L_check_is_executing:
0103 
0104     /* Check the is executing indicator of the heir context */
0105     add r3, r1, #ARM_CONTEXT_CONTROL_IS_EXECUTING_OFFSET
0106     ldrexb  r4, [r3]
0107     cmp r4, #0
0108     bne .L_get_potential_new_heir
0109 
0110     /* Try to update the is executing indicator of the heir context */
0111     mov r4, #1
0112     strexb  r5, r4, [r3]
0113     cmp r5, #0
0114     bne .L_get_potential_new_heir
0115     dmb
0116 #endif
0117 
0118 /* Start restoring context */
0119 .L_restore:
0120 #if !defined(RTEMS_SMP) && defined(ARM_MULTILIB_HAS_LOAD_STORE_EXCLUSIVE)
0121     clrex
0122 #endif
0123 
0124 #ifdef ARM_MULTILIB_HAS_THREAD_ID_REGISTER
0125     ldr r3, [r1, #ARM_CONTEXT_CONTROL_THREAD_ID_OFFSET]
0126 #endif
0127 
0128     ldr r4, [r1, #ARM_CONTEXT_CONTROL_ISR_DISPATCH_DISABLE]
0129 
0130 #ifdef ARM_MULTILIB_VFP
0131     add r5, r1, #ARM_CONTEXT_CONTROL_D8_OFFSET
0132     vldm    r5, {d8-d15}
0133 #endif
0134 
0135 #ifdef ARM_MULTILIB_HAS_THREAD_ID_REGISTER
0136     mcr p15, 0, r3, c13, c0, 3
0137 #endif
0138 
0139     str r4, [r2, #PER_CPU_ISR_DISPATCH_DISABLE]
0140 
0141     /* In ARMv5T and above the load of PC is an interworking branch */
0142 #if __ARM_ARCH >= 5
0143     ldm r1, {r4, r5, r6, r7, r8, r9, r10, r11, r13, pc}
0144 #else
0145     ldm r1, {r4, r5, r6, r7, r8, r9, r10, r11, r13, r14}
0146     bx  lr
0147 #endif
0148 
0149 /*
0150  *  void _CPU_Context_restore( new_context )
0151  *
0152  *  This function copies the restores the registers from where r0 points.
0153  *  It must match _CPU_Context_switch()
0154  *
0155  */
0156 DEFINE_FUNCTION_ARM(_CPU_Context_restore)
0157         mov     r1, r0
0158     GET_SELF_CPU_CONTROL    r2
0159         b       .L_restore
0160 
0161 #ifdef RTEMS_SMP
0162 .L_get_potential_new_heir:
0163 
0164     /* We may have a new heir */
0165 
0166     /* Read the executing and heir */
0167     ldr r4, [r2, #PER_CPU_OFFSET_EXECUTING]
0168     ldr r5, [r2, #PER_CPU_OFFSET_HEIR]
0169 
0170     /*
0171      * Update the executing only if necessary to avoid cache line
0172      * monopolization.
0173      */
0174     cmp r4, r5
0175     beq .L_check_is_executing
0176 
0177     /* Calculate the heir context pointer */
0178     sub r4, r1, r4
0179     add r1, r5, r4
0180 
0181     /* Update the executing */
0182     str r5, [r2, #PER_CPU_OFFSET_EXECUTING]
0183 
0184     b   .L_check_is_executing
0185 
0186 DEFINE_FUNCTION_ARM(_ARM_Start_multitasking)
0187     mov r1, r0
0188     GET_SELF_CPU_CONTROL    r2
0189 
0190     /* Switch the stack to the temporary interrupt stack of this processor */
0191     add sp, r2, #(PER_CPU_INTERRUPT_FRAME_AREA + CPU_INTERRUPT_FRAME_SIZE)
0192 
0193     /* Enable IRQ interrupts */
0194     cpsie   i
0195 
0196     b   .L_check_is_executing
0197 #endif
0198 
0199 #endif /* ARM_MULTILIB_ARCH_V4 */