Back to home page

LXR

 
 

    


File indexing completed on 2025-05-11 08:24:22

0001 /* SPDX-License-Identifier: BSD-2-Clause */
0002 
0003 /**
0004  * @file
0005  *
0006  * @ingroup RTEMSScoreCPUAArch64
0007  *
0008  * @brief ARM AArch64 Exception API.
0009  */
0010 
0011 /*
0012  * Copyright (C) 2020 On-Line Applications Research Corporation (OAR)
0013  * Written by Kinsey Moore <kinsey.moore@oarcorp.com>
0014  *
0015  * Redistribution and use in source and binary forms, with or without
0016  * modification, are permitted provided that the following conditions
0017  * are met:
0018  * 1. Redistributions of source code must retain the above copyright
0019  *    notice, this list of conditions and the following disclaimer.
0020  * 2. Redistributions in binary form must reproduce the above copyright
0021  *    notice, this list of conditions and the following disclaimer in the
0022  *    documentation and/or other materials provided with the distribution.
0023  *
0024  * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
0025  * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
0026  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
0027  * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
0028  * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
0029  * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
0030  * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
0031  * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
0032  * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
0033  * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
0034  * POSSIBILITY OF SUCH DAMAGE.
0035  */
0036 
0037 #ifndef LIBCPU_AARCH64_VECTORS_H
0038 #define LIBCPU_AARCH64_VECTORS_H
0039 
0040 #ifndef ASM
0041 
0042 #ifdef __cplusplus
0043 extern "C" {
0044 #endif /* __cplusplus */
0045 
0046 /* VBAR, Vector Base Address Register, Security Extensions */
0047 
0048 static inline void
0049 *AArch64_get_vector_base_address(void)
0050 {
0051   void *base;
0052 
0053   __asm__ volatile (
0054     "mrs %[base], VBAR_EL1\n"
0055     : [base] "=&r" (base)
0056   );
0057 
0058   return base;
0059 }
0060 
0061 static inline void
0062 AArch64_set_vector_base_address(void *base)
0063 {
0064   __asm__ volatile (
0065     "msr VBAR_EL1, %[base]\n"
0066     : : [base] "r" (base)
0067   );
0068 }
0069 
0070 static inline void
0071 *AArch64_get_hyp_vector_base_address(void)
0072 {
0073   void *base;
0074 
0075   __asm__ volatile (
0076     "mrs %[base], VBAR_EL2\n"
0077     : [base] "=&r" (base)
0078   );
0079 
0080   return base;
0081 }
0082 
0083 static inline void
0084 AArch64_set_hyp_vector_base_address(void *base)
0085 {
0086   __asm__ volatile (
0087     "msr VBAR_EL2, %[base]\n"
0088     : : [base] "r" (base)
0089   );
0090 }
0091 
0092 /** @} */
0093 
0094 #ifdef __cplusplus
0095 }
0096 #endif /* __cplusplus */
0097 
0098 #endif /* ASM */
0099 
0100 #endif /* LIBCPU_AARCH64_VECTORS_H */