25
25
#ifndef __CMSIS_ARMCLANG_H
26
26
#define __CMSIS_ARMCLANG_H
27
27
28
+ #pragma clang system_header /* treat file as system include file */
29
+
28
30
#ifndef __ARM_COMPAT_H
29
31
#include <arm_compat.h> /* Compatibility header for ARM Compiler 5 intrinsics */
30
32
#endif
148
150
149
151
/**
150
152
\brief Reverse byte order (32 bit)
153
+ \details Reverses the byte order in unsigned integer value. For example, 0x12345678 becomes 0x78563412.
151
154
\param [in] value Value to reverse
152
155
\return Reversed value
153
156
*/
154
- #define __REV __builtin_bswap32
157
+ #define __REV ( value ) __builtin_bswap32(value)
155
158
156
159
/**
157
160
\brief Reverse byte order (16 bit)
161
+ \details Reverses the byte order within each halfword of a word. For example, 0x12345678 becomes 0x34127856.
158
162
\param [in] value Value to reverse
159
163
\return Reversed value
160
164
*/
161
- #ifndef __NO_EMBEDDED_ASM
162
- __attribute__((section (".rev16_text" ))) __STATIC_INLINE uint32_t __REV16 (uint32_t value )
163
- {
164
- uint32_t result ;
165
- __ASM volatile ("rev16 %0, %1" : "=r" (result ) : "r" (value ));
166
- return result ;
167
- }
168
- #endif
165
+ #define __REV16 (value ) __ROR(__REV(value), 16)
166
+
169
167
170
168
/**
171
- \brief Reverse byte order in signed short value
169
+ \brief Reverse byte order (16 bit)
170
+ \details Reverses the byte order in a 16-bit value and returns the signed 16-bit result. For example, 0x0080 becomes 0x8000.
172
171
\param [in] value Value to reverse
173
172
\return Reversed value
174
173
*/
175
- #ifndef __NO_EMBEDDED_ASM
176
- __attribute__((section (".revsh_text" ))) __STATIC_INLINE int32_t __REVSH (int32_t value )
177
- {
178
- int32_t result ;
179
- __ASM volatile ("revsh %0, %1" : "=r" (result ) : "r" (value ));
180
- return result ;
181
- }
182
- #endif
174
+ #define __REVSH (value ) (int16_t)__builtin_bswap16(value)
175
+
183
176
184
177
/**
185
178
\brief Rotate Right in unsigned value (32 bit)
@@ -188,31 +181,37 @@ __attribute__((section(".revsh_text"))) __STATIC_INLINE int32_t __REVSH(int32_t
188
181
\param [in] op2 Number of Bits to rotate
189
182
\return Rotated value
190
183
*/
191
- __attribute__(( always_inline )) __STATIC_INLINE uint32_t __ROR (uint32_t op1 , uint32_t op2 )
184
+ __STATIC_FORCEINLINE uint32_t __ROR (uint32_t op1 , uint32_t op2 )
192
185
{
186
+ op2 %= 32U ;
187
+ if (op2 == 0U )
188
+ {
189
+ return op1 ;
190
+ }
193
191
return (op1 >> op2 ) | (op1 << (32U - op2 ));
194
192
}
195
193
194
+
196
195
/**
197
196
\brief Breakpoint
198
197
\param [in] value is ignored by the processor.
199
198
If required, a debugger can use it to store additional information about the breakpoint.
200
199
*/
201
- #define __BKPT (value ) __ASM volatile ("bkpt "#value)
200
+ #define __BKPT (value ) __ASM volatile ("bkpt "#value)
202
201
203
202
/**
204
203
\brief Reverse bit order of value
205
204
\param [in] value Value to reverse
206
205
\return Reversed value
207
206
*/
208
- #define __RBIT __builtin_arm_rbit
207
+ #define __RBIT __builtin_arm_rbit
209
208
210
209
/**
211
210
\brief Count leading zeros
212
211
\param [in] value Value to count the leading zeros
213
212
\return number of leading zeros in value
214
213
*/
215
- #define __CLZ __builtin_clz
214
+ #define __CLZ (uint8_t) __builtin_clz
216
215
217
216
/**
218
217
\brief LDR Exclusive (8 bit)
@@ -313,7 +312,7 @@ __attribute__((always_inline)) __STATIC_INLINE uint32_t __ROR(uint32_t op1, uint
313
312
/** \brief Get CPSR Register
314
313
\return CPSR Register value
315
314
*/
316
- __attribute__(( always_inline )) __STATIC_INLINE uint32_t __get_CPSR (void )
315
+ __STATIC_FORCEINLINE uint32_t __get_CPSR (void )
317
316
{
318
317
uint32_t result ;
319
318
__ASM volatile ("MRS %0, cpsr" : "=r" (result ) );
@@ -323,31 +322,31 @@ __attribute__((always_inline)) __STATIC_INLINE uint32_t __get_CPSR(void)
323
322
/** \brief Set CPSR Register
324
323
\param [in] cpsr CPSR value to set
325
324
*/
326
- __attribute__(( always_inline )) __STATIC_INLINE void __set_CPSR (uint32_t cpsr )
325
+ __STATIC_FORCEINLINE void __set_CPSR (uint32_t cpsr )
327
326
{
328
327
__ASM volatile ("MSR cpsr, %0" : : "r" (cpsr ) : "memory" );
329
328
}
330
329
331
330
/** \brief Get Mode
332
331
\return Processor Mode
333
332
*/
334
- __attribute__(( always_inline )) __STATIC_INLINE uint32_t __get_mode (void )
333
+ __STATIC_FORCEINLINE uint32_t __get_mode (void )
335
334
{
336
335
return (__get_CPSR () & 0x1FU );
337
336
}
338
337
339
338
/** \brief Set Mode
340
339
\param [in] mode Mode value to set
341
340
*/
342
- __attribute__(( always_inline )) __STATIC_INLINE void __set_mode (uint32_t mode )
341
+ __STATIC_FORCEINLINE void __set_mode (uint32_t mode )
343
342
{
344
343
__ASM volatile ("MSR cpsr_c, %0" : : "r" (mode ) : "memory" );
345
344
}
346
345
347
346
/** \brief Get Stack Pointer
348
347
\return Stack Pointer value
349
348
*/
350
- __attribute__(( always_inline )) __STATIC_INLINE uint32_t __get_SP ()
349
+ __STATIC_FORCEINLINE uint32_t __get_SP ()
351
350
{
352
351
uint32_t result ;
353
352
__ASM volatile ("MOV %0, sp" : "=r" (result ) : : "memory" );
@@ -357,15 +356,15 @@ __attribute__((always_inline)) __STATIC_INLINE uint32_t __get_SP()
357
356
/** \brief Set Stack Pointer
358
357
\param [in] stack Stack Pointer value to set
359
358
*/
360
- __attribute__(( always_inline )) __STATIC_INLINE void __set_SP (uint32_t stack )
359
+ __STATIC_FORCEINLINE void __set_SP (uint32_t stack )
361
360
{
362
361
__ASM volatile ("MOV sp, %0" : : "r" (stack ) : "memory" );
363
362
}
364
363
365
364
/** \brief Get USR/SYS Stack Pointer
366
365
\return USR/SYS Stack Pointer value
367
366
*/
368
- __attribute__(( always_inline )) __STATIC_INLINE uint32_t __get_SP_usr ()
367
+ __STATIC_FORCEINLINE uint32_t __get_SP_usr ()
369
368
{
370
369
uint32_t cpsr ;
371
370
uint32_t result ;
@@ -382,7 +381,7 @@ __attribute__((always_inline)) __STATIC_INLINE uint32_t __get_SP_usr()
382
381
/** \brief Set USR/SYS Stack Pointer
383
382
\param [in] topOfProcStack USR/SYS Stack Pointer value to set
384
383
*/
385
- __attribute__(( always_inline )) __STATIC_INLINE void __set_SP_usr (uint32_t topOfProcStack )
384
+ __STATIC_FORCEINLINE void __set_SP_usr (uint32_t topOfProcStack )
386
385
{
387
386
uint32_t cpsr ;
388
387
__ASM volatile (
@@ -397,7 +396,7 @@ __attribute__((always_inline)) __STATIC_INLINE void __set_SP_usr(uint32_t topOfP
397
396
/** \brief Get FPEXC
398
397
\return Floating Point Exception Control register value
399
398
*/
400
- __attribute__(( always_inline )) __STATIC_INLINE uint32_t __get_FPEXC (void )
399
+ __STATIC_FORCEINLINE uint32_t __get_FPEXC (void )
401
400
{
402
401
#if (__FPU_PRESENT == 1 )
403
402
uint32_t result ;
@@ -411,7 +410,7 @@ __attribute__((always_inline)) __STATIC_INLINE uint32_t __get_FPEXC(void)
411
410
/** \brief Set FPEXC
412
411
\param [in] fpexc Floating Point Exception Control value to set
413
412
*/
414
- __attribute__(( always_inline )) __STATIC_INLINE void __set_FPEXC (uint32_t fpexc )
413
+ __STATIC_FORCEINLINE void __set_FPEXC (uint32_t fpexc )
415
414
{
416
415
#if (__FPU_PRESENT == 1 )
417
416
__ASM volatile ("VMSR fpexc, %0" : : "r" (fpexc ) : "memory" );
@@ -424,6 +423,8 @@ __attribute__((always_inline)) __STATIC_INLINE void __set_FPEXC(uint32_t fpexc)
424
423
425
424
#define __get_CP (cp , op1 , Rt , CRn , CRm , op2 ) __ASM volatile("MRC p" # cp ", " # op1 ", %0, c" # CRn ", c" # CRm ", " # op2 : "=r" (Rt) : : "memory" )
426
425
#define __set_CP (cp , op1 , Rt , CRn , CRm , op2 ) __ASM volatile("MCR p" # cp ", " # op1 ", %0, c" # CRn ", c" # CRm ", " # op2 : : "r" (Rt) : "memory" )
426
+ #define __get_CP64 (cp , op1 , Rt , CRm ) __ASM volatile("MRRC p" # cp ", " # op1 ", %Q0, %R0, c" # CRm : "=r" (Rt) : : "memory" )
427
+ #define __set_CP64 (cp , op1 , Rt , CRm ) __ASM volatile("MCRR p" # cp ", " # op1 ", %Q0, %R0, c" # CRm : : "r" (Rt) : "memory" )
427
428
428
429
#include "cmsis_cp15.h"
429
430
@@ -494,24 +495,23 @@ __STATIC_INLINE void __L1C_CleanInvalidateCache(uint32_t op)
494
495
__STATIC_INLINE void __FPU_Enable (void )
495
496
{
496
497
__ASM volatile (
497
- //Permit access to VFP/NEON, registers by modifying CPACR
498
+ //Permit access to VFP/NEON, registers by modifying CPACR
498
499
" MRC p15,0,R1,c1,c0,2 \n"
499
500
" ORR R1,R1,#0x00F00000 \n"
500
501
" MCR p15,0,R1,c1,c0,2 \n"
501
502
502
- //Ensure that subsequent instructions occur in the context of VFP/NEON access permitted
503
+ //Ensure that subsequent instructions occur in the context of VFP/NEON access permitted
503
504
" ISB \n"
504
505
505
- //Enable VFP/NEON
506
+ //Enable VFP/NEON
506
507
" VMRS R1,FPEXC \n"
507
508
" ORR R1,R1,#0x40000000 \n"
508
509
" VMSR FPEXC,R1 \n"
509
510
510
- //Initialise VFP/NEON registers to 0
511
+ //Initialise VFP/NEON registers to 0
511
512
" MOV R2,#0 \n"
512
513
513
- #if TARGET_FEATURE_EXTENSION_REGISTER_COUNT >= 16
514
- //Initialise D16 registers to 0
514
+ //Initialise D16 registers to 0
515
515
" VMOV D0, R2,R2 \n"
516
516
" VMOV D1, R2,R2 \n"
517
517
" VMOV D2, R2,R2 \n"
@@ -528,10 +528,9 @@ __STATIC_INLINE void __FPU_Enable(void)
528
528
" VMOV D13,R2,R2 \n"
529
529
" VMOV D14,R2,R2 \n"
530
530
" VMOV D15,R2,R2 \n"
531
- #endif
532
531
533
- #if TARGET_FEATURE_EXTENSION_REGISTER_COUNT == 32
534
- //Initialise D32 registers to 0
532
+ #if __ARM_NEON == 1
533
+ //Initialise D32 registers to 0
535
534
" VMOV D16,R2,R2 \n"
536
535
" VMOV D17,R2,R2 \n"
537
536
" VMOV D18,R2,R2 \n"
@@ -548,9 +547,9 @@ __STATIC_INLINE void __FPU_Enable(void)
548
547
" VMOV D29,R2,R2 \n"
549
548
" VMOV D30,R2,R2 \n"
550
549
" VMOV D31,R2,R2 \n"
551
- ".endif \n"
552
550
#endif
553
- //Initialise FPSCR to a known state
551
+
552
+ //Initialise FPSCR to a known state
554
553
" VMRS R2,FPSCR \n"
555
554
" LDR R3,=0x00086060 \n" //Mask off all bits that do not have to be preserved. Non-preserved bits can/should be zero.
556
555
" AND R2,R2,R3 \n"
0 commit comments