File indexing completed on 2025-05-11 08:24:13
0001
0002
0003
0004
0005
0006
0007
0008
0009
0010
0011
0012
0013
0014
0015
0016
0017
0018
0019
0020
0021
0022
0023
0024
0025
0026
0027
0028
0029
0030
0031
0032
0033
0034
0035
0036
0037
0038 #ifndef _RTEMS_SCORE_SCHEDULERPRIORITYSMPIMPL_H
0039 #define _RTEMS_SCORE_SCHEDULERPRIORITYSMPIMPL_H
0040
0041 #include <rtems/score/schedulerprioritysmp.h>
0042 #include <rtems/score/schedulerpriorityimpl.h>
0043 #include <rtems/score/schedulersimpleimpl.h>
0044 #include <rtems/score/schedulersmpimpl.h>
0045
0046 #ifdef __cplusplus
0047 extern "C" {
0048 #endif
0049
0050
0051
0052
0053
0054
0055 static inline Scheduler_priority_SMP_Context *_Scheduler_priority_SMP_Get_self(
0056 Scheduler_Context *context
0057 )
0058 {
0059 return (Scheduler_priority_SMP_Context *) context;
0060 }
0061
0062 static inline Scheduler_priority_SMP_Node *_Scheduler_priority_SMP_Thread_get_node(
0063 Thread_Control *thread
0064 )
0065 {
0066 return (Scheduler_priority_SMP_Node *) _Thread_Scheduler_get_home_node( thread );
0067 }
0068
0069 static inline Scheduler_priority_SMP_Node *
0070 _Scheduler_priority_SMP_Node_downcast( Scheduler_Node *node )
0071 {
0072 return (Scheduler_priority_SMP_Node *) node;
0073 }
0074
0075 static inline bool _Scheduler_priority_SMP_Has_ready( Scheduler_Context *context )
0076 {
0077 Scheduler_priority_SMP_Context *self =
0078 _Scheduler_priority_SMP_Get_self( context );
0079
0080 return !_Priority_bit_map_Is_empty( &self->Bit_map );
0081 }
0082
0083 static inline void _Scheduler_priority_SMP_Move_from_scheduled_to_ready(
0084 Scheduler_Context *context,
0085 Scheduler_Node *scheduled_to_ready
0086 )
0087 {
0088 Scheduler_priority_SMP_Context *self =
0089 _Scheduler_priority_SMP_Get_self( context );
0090 Scheduler_priority_SMP_Node *node =
0091 _Scheduler_priority_SMP_Node_downcast( scheduled_to_ready );
0092
0093 _Chain_Extract_unprotected( &node->Base.Base.Node.Chain );
0094 _Scheduler_priority_Ready_queue_enqueue_first(
0095 &node->Base.Base.Node.Chain,
0096 &node->Ready_queue,
0097 &self->Bit_map
0098 );
0099 }
0100
0101 static inline void _Scheduler_priority_SMP_Move_from_ready_to_scheduled(
0102 Scheduler_Context *context,
0103 Scheduler_Node *ready_to_scheduled
0104 )
0105 {
0106 Scheduler_priority_SMP_Context *self;
0107 Scheduler_priority_SMP_Node *node;
0108 Priority_Control insert_priority;
0109
0110 self = _Scheduler_priority_SMP_Get_self( context );
0111 node = _Scheduler_priority_SMP_Node_downcast( ready_to_scheduled );
0112
0113 _Scheduler_priority_Ready_queue_extract(
0114 &node->Base.Base.Node.Chain,
0115 &node->Ready_queue,
0116 &self->Bit_map
0117 );
0118 insert_priority = _Scheduler_SMP_Node_priority( &node->Base.Base );
0119 insert_priority = SCHEDULER_PRIORITY_APPEND( insert_priority );
0120 _Chain_Insert_ordered_unprotected(
0121 &self->Base.Scheduled,
0122 &node->Base.Base.Node.Chain,
0123 &insert_priority,
0124 _Scheduler_SMP_Priority_less_equal
0125 );
0126 }
0127
0128 static inline void _Scheduler_priority_SMP_Insert_ready(
0129 Scheduler_Context *context,
0130 Scheduler_Node *node_base,
0131 Priority_Control insert_priority
0132 )
0133 {
0134 Scheduler_priority_SMP_Context *self;
0135 Scheduler_priority_SMP_Node *node;
0136
0137 self = _Scheduler_priority_SMP_Get_self( context );
0138 node = _Scheduler_priority_SMP_Node_downcast( node_base );
0139
0140 if ( SCHEDULER_PRIORITY_IS_APPEND( insert_priority ) ) {
0141 _Scheduler_priority_Ready_queue_enqueue(
0142 &node->Base.Base.Node.Chain,
0143 &node->Ready_queue,
0144 &self->Bit_map
0145 );
0146 } else {
0147 _Scheduler_priority_Ready_queue_enqueue_first(
0148 &node->Base.Base.Node.Chain,
0149 &node->Ready_queue,
0150 &self->Bit_map
0151 );
0152 }
0153 }
0154
0155 static inline void _Scheduler_priority_SMP_Extract_from_ready(
0156 Scheduler_Context *context,
0157 Scheduler_Node *thread
0158 )
0159 {
0160 Scheduler_priority_SMP_Context *self =
0161 _Scheduler_priority_SMP_Get_self( context );
0162 Scheduler_priority_SMP_Node *node =
0163 _Scheduler_priority_SMP_Node_downcast( thread );
0164
0165 _Scheduler_priority_Ready_queue_extract(
0166 &node->Base.Base.Node.Chain,
0167 &node->Ready_queue,
0168 &self->Bit_map
0169 );
0170 }
0171
0172 static inline Scheduler_Node *_Scheduler_priority_SMP_Get_idle( void *arg )
0173 {
0174 Scheduler_priority_SMP_Context *self;
0175 Scheduler_priority_SMP_Node *lowest_ready;
0176
0177 self = _Scheduler_priority_SMP_Get_self( arg );
0178 lowest_ready = (Scheduler_priority_SMP_Node *)
0179 _Chain_Last( self->idle_ready_queue );
0180 _Scheduler_priority_Ready_queue_extract(
0181 &lowest_ready->Base.Base.Node.Chain,
0182 &lowest_ready->Ready_queue,
0183 &self->Bit_map
0184 );
0185
0186 return &lowest_ready->Base.Base;
0187 }
0188
0189 static inline void _Scheduler_priority_SMP_Release_idle(
0190 Scheduler_Node *node_base,
0191 void *arg
0192 )
0193 {
0194 Scheduler_priority_SMP_Context *self;
0195 Scheduler_priority_SMP_Node *node;
0196
0197 self = _Scheduler_priority_SMP_Get_self( arg );
0198 node = _Scheduler_priority_SMP_Node_downcast( node_base );
0199
0200 _Scheduler_priority_Ready_queue_enqueue(
0201 &node->Base.Base.Node.Chain,
0202 &node->Ready_queue,
0203 &self->Bit_map
0204 );
0205 }
0206
0207 static inline void _Scheduler_priority_SMP_Do_update(
0208 Scheduler_Context *context,
0209 Scheduler_Node *node_to_update,
0210 Priority_Control new_priority
0211 )
0212 {
0213 Scheduler_priority_SMP_Context *self;
0214 Scheduler_priority_SMP_Node *node;
0215
0216 self = _Scheduler_priority_SMP_Get_self( context );
0217 node = _Scheduler_priority_SMP_Node_downcast( node_to_update );
0218
0219 _Scheduler_SMP_Node_update_priority( &node->Base, new_priority );
0220 _Scheduler_priority_Ready_queue_update(
0221 &node->Ready_queue,
0222 SCHEDULER_PRIORITY_UNMAP( new_priority ),
0223 &self->Bit_map,
0224 &self->Ready[ 0 ]
0225 );
0226 }
0227
0228
0229
0230 #ifdef __cplusplus
0231 }
0232 #endif
0233
0234 #endif