Back to home page

LXR

 
 

    


File indexing completed on 2025-05-11 08:24:23

0001 /* SPDX-License-Identifier: BSD-2-Clause */
0002 
0003 /**
0004  * @file
0005  *
0006  * @ingroup RTEMSScoreCPUARM
0007  *
0008  * @brief This source file contains static assertions to ensure the consistency
0009  *   of interfaces used in C and assembler and it contains the ARM-specific
0010  *   implementation of _CPU_Initialize(), _CPU_ISR_Get_level(),
0011  *   _CPU_ISR_Set_level(), and _CPU_Context_Initialize().
0012  */
0013 
0014 /*
0015  *  COPYRIGHT (c) 2000 Canon Research Centre France SA.
0016  *  Emmanuel Raguet, mailto:raguet@crf.canon.fr
0017  *
0018  *  Copyright (c) 2002 Advent Networks, Inc
0019  *      Jay Monkman <jmonkman@adventnetworks.com>
0020  *
0021  *  Copyright (c) 2007 Ray xu <rayx.cn@gmail.com>
0022  *
0023  *  Copyright (C) 2009, 2017 embedded brains GmbH & Co. KG
0024  *
0025  * Redistribution and use in source and binary forms, with or without
0026  * modification, are permitted provided that the following conditions
0027  * are met:
0028  * 1. Redistributions of source code must retain the above copyright
0029  *    notice, this list of conditions and the following disclaimer.
0030  * 2. Redistributions in binary form must reproduce the above copyright
0031  *    notice, this list of conditions and the following disclaimer in the
0032  *    documentation and/or other materials provided with the distribution.
0033  *
0034  * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
0035  * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
0036  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
0037  * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
0038  * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
0039  * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
0040  * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
0041  * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
0042  * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
0043  * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
0044  * POSSIBILITY OF SUCH DAMAGE.
0045  */
0046 
0047 #ifdef HAVE_CONFIG_H
0048 #include "config.h"
0049 #endif
0050 
0051 #include <rtems/score/cpuimpl.h>
0052 #include <rtems/score/thread.h>
0053 #include <rtems/score/tls.h>
0054 
0055 #ifdef ARM_MULTILIB_VFP
0056   RTEMS_STATIC_ASSERT(
0057     offsetof( Context_Control, register_d8 ) == ARM_CONTEXT_CONTROL_D8_OFFSET,
0058     ARM_CONTEXT_CONTROL_D8_OFFSET
0059   );
0060 #endif
0061 
0062 RTEMS_STATIC_ASSERT(
0063   offsetof( Context_Control, thread_id )
0064     == ARM_CONTEXT_CONTROL_THREAD_ID_OFFSET,
0065   ARM_CONTEXT_CONTROL_THREAD_ID_OFFSET
0066 );
0067 
0068 #ifdef ARM_MULTILIB_ARCH_V4
0069   RTEMS_STATIC_ASSERT(
0070     offsetof( Context_Control, isr_dispatch_disable )
0071       == ARM_CONTEXT_CONTROL_ISR_DISPATCH_DISABLE,
0072     ARM_CONTEXT_CONTROL_ISR_DISPATCH_DISABLE
0073   );
0074 #endif
0075 
0076 #ifdef RTEMS_SMP
0077   RTEMS_STATIC_ASSERT(
0078     offsetof( Context_Control, is_executing )
0079       == ARM_CONTEXT_CONTROL_IS_EXECUTING_OFFSET,
0080     ARM_CONTEXT_CONTROL_IS_EXECUTING_OFFSET
0081   );
0082 #endif
0083 
0084 RTEMS_STATIC_ASSERT(
0085   sizeof( CPU_Exception_frame ) == ARM_EXCEPTION_FRAME_SIZE,
0086   ARM_EXCEPTION_FRAME_SIZE
0087 );
0088 
0089 RTEMS_STATIC_ASSERT(
0090   sizeof( CPU_Exception_frame ) % CPU_STACK_ALIGNMENT == 0,
0091   CPU_Exception_frame_alignment
0092 );
0093 
0094 RTEMS_STATIC_ASSERT(
0095   offsetof( CPU_Exception_frame, register_r8 )
0096     == ARM_EXCEPTION_FRAME_REGISTER_R8_OFFSET,
0097   ARM_EXCEPTION_FRAME_REGISTER_R8_OFFSET
0098 );
0099 
0100 RTEMS_STATIC_ASSERT(
0101   offsetof( CPU_Exception_frame, register_sp )
0102     == ARM_EXCEPTION_FRAME_REGISTER_SP_OFFSET,
0103   ARM_EXCEPTION_FRAME_REGISTER_SP_OFFSET
0104 );
0105 
0106 RTEMS_STATIC_ASSERT(
0107   offsetof( CPU_Exception_frame, register_pc )
0108     == ARM_EXCEPTION_FRAME_REGISTER_PC_OFFSET,
0109   ARM_EXCEPTION_FRAME_REGISTER_PC_OFFSET
0110 );
0111 
0112 #if defined(ARM_MULTILIB_ARCH_V4)
0113   RTEMS_STATIC_ASSERT(
0114     offsetof( CPU_Exception_frame, register_cpsr )
0115       == ARM_EXCEPTION_FRAME_REGISTER_CPSR_OFFSET,
0116     ARM_EXCEPTION_FRAME_REGISTER_CPSR_OFFSET
0117   );
0118 #elif defined(ARM_MULTILIB_ARCH_V6M) || defined(ARM_MULTILIB_ARCH_V7M)
0119   RTEMS_STATIC_ASSERT(
0120     offsetof( CPU_Exception_frame, register_xpsr )
0121       == ARM_EXCEPTION_FRAME_REGISTER_XPSR_OFFSET,
0122     ARM_EXCEPTION_FRAME_REGISTER_XPSR_OFFSET
0123   );
0124 #endif
0125 
0126 RTEMS_STATIC_ASSERT(
0127   sizeof( ARM_VFP_context ) == ARM_VFP_CONTEXT_SIZE,
0128   ARM_VFP_CONTEXT_SIZE
0129 );
0130 
0131 #ifdef ARM_MULTILIB_ARCH_V4
0132 
0133 void _CPU_Context_Initialize(
0134   Context_Control *the_context,
0135   void *stack_area_begin,
0136   size_t stack_area_size,
0137   uint32_t new_level,
0138   void (*entry_point)( void ),
0139   bool is_fp,
0140   void *tls_area
0141 )
0142 {
0143   (void) new_level;
0144 
0145   the_context->register_sp = (uint32_t) stack_area_begin + stack_area_size;
0146   the_context->register_lr = (uint32_t) entry_point;
0147   the_context->isr_dispatch_disable = 0;
0148   the_context->thread_id = (uint32_t) tls_area;
0149 
0150   if ( tls_area != NULL ) {
0151     the_context->thread_id = (uint32_t) _TLS_Initialize_area( tls_area );
0152   }
0153 }
0154 
0155 #if !defined(RTEMS_PARAVIRT)
0156 void _CPU_ISR_Set_level( uint32_t level )
0157 {
0158   uint32_t arm_switch_reg;
0159 
0160   /* Ignore the level parameter and just enable interrupts */
0161   (void) level;
0162 
0163   __asm__ volatile (
0164     ARM_SWITCH_TO_ARM
0165     "mrs %[arm_switch_reg], cpsr\n"
0166     "bic %[arm_switch_reg], #" RTEMS_XSTRING( ARM_PSR_I ) "\n"
0167     "msr cpsr, %0\n"
0168     ARM_SWITCH_BACK
0169     : [arm_switch_reg] "=&r" (arm_switch_reg)
0170   );
0171 }
0172 
0173 uint32_t _CPU_ISR_Get_level( void )
0174 {
0175   ARM_SWITCH_REGISTERS;
0176   uint32_t level;
0177 
0178   __asm__ volatile (
0179     ARM_SWITCH_TO_ARM
0180     "mrs %[level], cpsr\n"
0181     "and %[level], #" RTEMS_XSTRING( ARM_PSR_I ) "\n"
0182     ARM_SWITCH_BACK
0183     : [level] "=&r" (level) ARM_SWITCH_ADDITIONAL_OUTPUT
0184   );
0185 
0186   return ( level & ARM_PSR_I ) != 0;
0187 }
0188 #endif /* RTEMS_PARAVIRT */
0189 
0190 void _CPU_Initialize( void )
0191 {
0192   /* Do nothing */
0193 }
0194 
0195 #endif /* ARM_MULTILIB_ARCH_V4 */