37
37
/* CMSIS compiler specific defines */
38
38
#ifndef __ASM
39
39
#define __ASM __asm
40
- #endif
41
- #ifndef __INLINE
40
+ #endif
41
+ #ifndef __INLINE
42
42
#define __INLINE __inline
43
- #endif
44
- #ifndef __FORCEINLINE
43
+ #endif
44
+ #ifndef __FORCEINLINE
45
45
#define __FORCEINLINE __forceinline
46
- #endif
47
- #ifndef __STATIC_INLINE
46
+ #endif
47
+ #ifndef __STATIC_INLINE
48
48
#define __STATIC_INLINE static __inline
49
- #endif
50
- #ifndef __STATIC_FORCEINLINE
49
+ #endif
50
+ #ifndef __STATIC_FORCEINLINE
51
51
#define __STATIC_FORCEINLINE static __forceinline
52
- #endif
53
- #ifndef __NO_RETURN
52
+ #endif
53
+ #ifndef __NO_RETURN
54
54
#define __NO_RETURN __declspec(noreturn)
55
- #endif
56
- #ifndef __USED
55
+ #endif
56
+ #ifndef CMSIS_DEPRECATED
57
+ #define CMSIS_DEPRECATED __attribute__((deprecated))
58
+ #endif
59
+ #ifndef __USED
57
60
#define __USED __attribute__((used))
58
- #endif
59
- #ifndef __WEAK
61
+ #endif
62
+ #ifndef __WEAK
60
63
#define __WEAK __attribute__((weak))
61
64
#endif
62
65
#ifndef __PACKED
79
82
#endif
80
83
#ifndef __ALIGNED
81
84
#define __ALIGNED (x ) __attribute__((aligned(x)))
82
- #endif
83
- #ifndef __PACKED
85
+ #endif
86
+ #ifndef __PACKED
84
87
#define __PACKED __attribute__((packed))
85
88
#endif
86
89
134
137
135
138
/**
136
139
\brief Reverse byte order (32 bit)
140
+ \details Reverses the byte order in unsigned integer value. For example, 0x12345678 becomes 0x78563412.
137
141
\param [in] value Value to reverse
138
142
\return Reversed value
139
143
*/
140
144
#define __REV __rev
141
145
142
146
/**
143
147
\brief Reverse byte order (16 bit)
148
+ \details Reverses the byte order within each halfword of a word. For example, 0x12345678 becomes 0x34127856.
144
149
\param [in] value Value to reverse
145
150
\return Reversed value
146
151
*/
@@ -153,12 +158,13 @@ __attribute__((section(".rev16_text"))) __STATIC_INLINE __ASM uint32_t __REV16(u
153
158
#endif
154
159
155
160
/**
156
- \brief Reverse byte order in signed short value
161
+ \brief Reverse byte order (16 bit)
162
+ \details Reverses the byte order in a 16-bit value and returns the signed 16-bit result. For example, 0x0080 becomes 0x8000.
157
163
\param [in] value Value to reverse
158
164
\return Reversed value
159
165
*/
160
166
#ifndef __NO_EMBEDDED_ASM
161
- __attribute__((section (".revsh_text" ))) __STATIC_INLINE __ASM int32_t __REVSH (int32_t value )
167
+ __attribute__((section (".revsh_text" ))) __STATIC_INLINE __ASM int16_t __REVSH (int16_t value )
162
168
{
163
169
revsh r0 , r0
164
170
bx lr
@@ -351,14 +357,16 @@ __STATIC_INLINE void __set_CPSR(uint32_t cpsr)
351
357
/** \brief Get Mode
352
358
\return Processor Mode
353
359
*/
354
- __STATIC_INLINE uint32_t __get_mode (void ) {
360
+ __STATIC_INLINE uint32_t __get_mode (void )
361
+ {
355
362
return (__get_CPSR () & 0x1FU );
356
363
}
357
364
358
365
/** \brief Set Mode
359
366
\param [in] mode Mode value to set
360
367
*/
361
- __STATIC_INLINE __ASM void __set_mode (uint32_t mode ) {
368
+ __STATIC_INLINE __ASM void __set_mode (uint32_t mode )
369
+ {
362
370
MOV r1 , lr
363
371
MSR CPSR_C , r0
364
372
BX r1
@@ -373,7 +381,7 @@ __STATIC_INLINE __ASM uint32_t __get_SP(void)
373
381
BX lr
374
382
}
375
383
376
- /** \brief Set Stack Pointer
384
+ /** \brief Set Stack Pointer
377
385
\param [in] stack Stack Pointer value to set
378
386
*/
379
387
__STATIC_INLINE __ASM void __set_SP (uint32_t stack )
@@ -442,75 +450,32 @@ __STATIC_INLINE void __set_FPEXC(uint32_t fpexc)
442
450
/*
443
451
* Include common core functions to access Coprocessor 15 registers
444
452
*/
445
-
446
- #define __get_CP (cp , op1 , Rt , CRn , CRm , op2 ) do { register uint32_t tmp __ASM("cp" # cp ":" # op1 ":c" # CRn ":c" # CRm ":" # op2); Rt = tmp; } while(0)
447
- #define __set_CP (cp , op1 , Rt , CRn , CRm , op2 ) do { register uint32_t tmp __ASM("cp" # cp ":" # op1 ":c" # CRn ":c" # CRm ":" # op2); tmp = Rt; } while(0)
448
453
449
- #include "cmsis_cp15.h"
454
+ #define __get_CP (cp , op1 , Rt , CRn , CRm , op2 ) do { register uint32_t tmp __ASM("cp" # cp ":" # op1 ":c" # CRn ":c" # CRm ":" # op2); (Rt) = tmp; } while(0)
455
+ #define __set_CP (cp , op1 , Rt , CRn , CRm , op2 ) do { register uint32_t tmp __ASM("cp" # cp ":" # op1 ":c" # CRn ":c" # CRm ":" # op2); tmp = (Rt); } while(0)
456
+ #define __get_CP64 (cp , op1 , Rt , CRm ) \
457
+ do { \
458
+ uint32_t ltmp, htmp; \
459
+ __ASM volatile("MRRC p" # cp ", " # op1 ", ltmp, htmp, c" # CRm); \
460
+ (Rt) = ((((uint64_t)htmp) << 32U) | ((uint64_t)ltmp)); \
461
+ } while(0)
450
462
451
- /** \brief Clean and Invalidate the entire data or unified cache
452
- * \param [in] op 0 - invalidate, 1 - clean, otherwise - invalidate and clean
453
- */
454
- __STATIC_INLINE __ASM void __L1C_CleanInvalidateCache (uint32_t op ) {
455
- ARM
463
+ #define __set_CP64 (cp , op1 , Rt , CRm ) \
464
+ do { \
465
+ const uint64_t tmp = (Rt); \
466
+ const uint32_t ltmp = (uint32_t)(tmp); \
467
+ const uint32_t htmp = (uint32_t)(tmp >> 32U); \
468
+ __ASM volatile("MCRR p" # cp ", " # op1 ", ltmp, htmp, c" # CRm); \
469
+ } while(0)
456
470
457
- PUSH {R4 - R11 }
458
-
459
- MRC p15 , 1 , R6 , c0 , c0 , 1 // Read CLIDR
460
- ANDS R3 , R6 , #0x07000000 // Extract coherency level
461
- MOV R3 , R3 , LSR #23 // Total cache levels << 1
462
- BEQ Finished // If 0, no need to clean
463
-
464
- MOV R10 , #0 // R10 holds current cache level << 1
465
- Loop1 ADD R2 , R10 , R10 , LSR #1 // R2 holds cache "Set" position
466
- MOV R1 , R6 , LSR R2 // Bottom 3 bits are the Cache-type for this level
467
- AND R1 , R1 , #7 // Isolate those lower 3 bits
468
- CMP R1 , #2
469
- BLT Skip // No cache or only instruction cache at this level
470
-
471
- MCR p15 , 2 , R10 , c0 , c0 , 0 // Write the Cache Size selection register
472
- ISB // ISB to sync the change to the CacheSizeID reg
473
- MRC p15 , 1 , R1 , c0 , c0 , 0 // Reads current Cache Size ID register
474
- AND R2 , R1 , #7 // Extract the line length field
475
- ADD R2 , R2 , #4 // Add 4 for the line length offset (log2 16 bytes)
476
- LDR R4 , = 0x3FF
477
- ANDS R4 , R4 , R1 , LSR #3 // R4 is the max number on the way size (right aligned)
478
- CLZ R5 , R4 // R5 is the bit position of the way size increment
479
- LDR R7 , = 0x7FFF
480
- ANDS R7 , R7 , R1 , LSR #13 // R7 is the max number of the index size (right aligned)
481
-
482
- Loop2 MOV R9 , R4 // R9 working copy of the max way size (right aligned)
483
-
484
- Loop3 ORR R11 , R10 , R9 , LSL R5 // Factor in the Way number and cache number into R11
485
- ORR R11 , R11 , R7 , LSL R2 // Factor in the Set number
486
- CMP R0 , #0
487
- BNE Dccsw
488
- MCR p15 , 0 , R11 , c7 , c6 , 2 // DCISW. Invalidate by Set/Way
489
- B cont
490
- Dccsw CMP R0 , #1
491
- BNE Dccisw
492
- MCR p15 , 0 , R11 , c7 , c10 , 2 // DCCSW. Clean by Set/Way
493
- B cont
494
- Dccisw MCR p15 , 0 , R11 , c7 , c14 , 2 // DCCISW. Clean and Invalidate by Set/Way
495
- cont SUBS R9 , R9 , #1 // Decrement the Way number
496
- BGE Loop3
497
- SUBS R7 , R7 , #1 // Decrement the Set number
498
- BGE Loop2
499
- Skip ADD R10 , R10 , #2 // Increment the cache number
500
- CMP R3 , R10
501
- BGT Loop1
502
-
503
- Finished
504
- DSB
505
- POP {R4 - R11 }
506
- BX lr
507
- }
471
+ #include "cmsis_cp15.h"
508
472
509
473
/** \brief Enable Floating Point Unit
510
474
511
475
Critical section, called from undef handler, so systick is disabled
512
476
*/
513
- __STATIC_INLINE __ASM void __FPU_Enable (void ) {
477
+ __STATIC_INLINE __ASM void __FPU_Enable (void )
478
+ {
514
479
ARM
515
480
516
481
//Permit access to VFP/NEON, registers by modifying CPACR
@@ -528,7 +493,7 @@ __STATIC_INLINE __ASM void __FPU_Enable(void) {
528
493
529
494
//Initialise VFP/NEON registers to 0
530
495
MOV R2 ,#0
531
- IF { TARGET_FEATURE_EXTENSION_REGISTER_COUNT } >= 16
496
+
532
497
//Initialise D16 registers to 0
533
498
VMOV D0 , R2 ,R2
534
499
VMOV D1 , R2 ,R2
@@ -546,7 +511,7 @@ __STATIC_INLINE __ASM void __FPU_Enable(void) {
546
511
VMOV D13 ,R2 ,R2
547
512
VMOV D14 ,R2 ,R2
548
513
VMOV D15 ,R2 ,R2
549
- ENDIF
514
+
550
515
IF {TARGET_FEATURE_EXTENSION_REGISTER_COUNT } == 32
551
516
//Initialise D32 registers to 0
552
517
VMOV D16 ,R2 ,R2
0 commit comments