Skip to content

Commit 5660256

Browse files
RobertRostoharTomoYamanaka
authored andcommitted
Core(A): Updated __FPU_Enable function (VFP register count detection)
1 parent 461c215 commit 5660256

File tree

6 files changed

+169
-118
lines changed

6 files changed

+169
-118
lines changed

cmsis/TARGET_CORTEX_A/cmsis_armcc.h

Lines changed: 31 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -134,13 +134,15 @@
134134

135135
/**
136136
\brief Reverse byte order (32 bit)
137+
\details Reverses the byte order in unsigned integer value. For example, 0x12345678 becomes 0x78563412.
137138
\param [in] value Value to reverse
138139
\return Reversed value
139140
*/
140141
#define __REV __rev
141142

142143
/**
143144
\brief Reverse byte order (16 bit)
145+
\details Reverses the byte order within each halfword of a word. For example, 0x12345678 becomes 0x34127856.
144146
\param [in] value Value to reverse
145147
\return Reversed value
146148
*/
@@ -153,12 +155,13 @@ __attribute__((section(".rev16_text"))) __STATIC_INLINE __ASM uint32_t __REV16(u
153155
#endif
154156

155157
/**
156-
\brief Reverse byte order in signed short value
158+
\brief Reverse byte order (16 bit)
159+
\details Reverses the byte order in a 16-bit value and returns the signed 16-bit result. For example, 0x0080 becomes 0x8000.
157160
\param [in] value Value to reverse
158161
\return Reversed value
159162
*/
160163
#ifndef __NO_EMBEDDED_ASM
161-
__attribute__((section(".revsh_text"))) __STATIC_INLINE __ASM int32_t __REVSH(int32_t value)
164+
__attribute__((section(".revsh_text"))) __STATIC_INLINE __ASM int16_t __REVSH(int16_t value)
162165
{
163166
revsh r0, r0
164167
bx lr
@@ -351,14 +354,16 @@ __STATIC_INLINE void __set_CPSR(uint32_t cpsr)
351354
/** \brief Get Mode
352355
\return Processor Mode
353356
*/
354-
__STATIC_INLINE uint32_t __get_mode(void) {
357+
__STATIC_INLINE uint32_t __get_mode(void)
358+
{
355359
return (__get_CPSR() & 0x1FU);
356360
}
357361

358362
/** \brief Set Mode
359363
\param [in] mode Mode value to set
360364
*/
361-
__STATIC_INLINE __ASM void __set_mode(uint32_t mode) {
365+
__STATIC_INLINE __ASM void __set_mode(uint32_t mode)
366+
{
362367
MOV r1, lr
363368
MSR CPSR_C, r0
364369
BX r1
@@ -443,15 +448,30 @@ __STATIC_INLINE void __set_FPEXC(uint32_t fpexc)
443448
* Include common core functions to access Coprocessor 15 registers
444449
*/
445450

446-
#define __get_CP(cp, op1, Rt, CRn, CRm, op2) do { register uint32_t tmp __ASM("cp" # cp ":" # op1 ":c" # CRn ":c" # CRm ":" # op2); Rt = tmp; } while(0)
447-
#define __set_CP(cp, op1, Rt, CRn, CRm, op2) do { register uint32_t tmp __ASM("cp" # cp ":" # op1 ":c" # CRn ":c" # CRm ":" # op2); tmp = Rt; } while(0)
451+
#define __get_CP(cp, op1, Rt, CRn, CRm, op2) do { register uint32_t tmp __ASM("cp" # cp ":" # op1 ":c" # CRn ":c" # CRm ":" # op2); (Rt) = tmp; } while(0)
452+
#define __set_CP(cp, op1, Rt, CRn, CRm, op2) do { register uint32_t tmp __ASM("cp" # cp ":" # op1 ":c" # CRn ":c" # CRm ":" # op2); tmp = (Rt); } while(0)
453+
#define __get_CP64(cp, op1, Rt, CRm) \
454+
do { \
455+
uint32_t ltmp, htmp; \
456+
__ASM volatile("MRRC p" # cp ", " # op1 ", ltmp, htmp, c" # CRm); \
457+
(Rt) = ((((uint64_t)htmp) << 32U) | ((uint64_t)ltmp)); \
458+
} while(0)
459+
460+
#define __set_CP64(cp, op1, Rt, CRm) \
461+
do { \
462+
const uint64_t tmp = (Rt); \
463+
const uint32_t ltmp = (uint32_t)(tmp); \
464+
const uint32_t htmp = (uint32_t)(tmp >> 32U); \
465+
__ASM volatile("MCRR p" # cp ", " # op1 ", ltmp, htmp, c" # CRm); \
466+
} while(0)
448467

449468
#include "cmsis_cp15.h"
450469

451470
/** \brief Clean and Invalidate the entire data or unified cache
452471
* \param [in] op 0 - invalidate, 1 - clean, otherwise - invalidate and clean
453472
*/
454-
__STATIC_INLINE __ASM void __L1C_CleanInvalidateCache(uint32_t op) {
473+
__STATIC_INLINE __ASM void __L1C_CleanInvalidateCache(uint32_t op)
474+
{
455475
ARM
456476

457477
PUSH {R4-R11}
@@ -510,7 +530,8 @@ Finished
510530
511531
Critical section, called from undef handler, so systick is disabled
512532
*/
513-
__STATIC_INLINE __ASM void __FPU_Enable(void) {
533+
__STATIC_INLINE __ASM void __FPU_Enable(void)
534+
{
514535
ARM
515536

516537
//Permit access to VFP/NEON, registers by modifying CPACR
@@ -528,7 +549,7 @@ __STATIC_INLINE __ASM void __FPU_Enable(void) {
528549

529550
//Initialise VFP/NEON registers to 0
530551
MOV R2,#0
531-
IF {TARGET_FEATURE_EXTENSION_REGISTER_COUNT} >= 16
552+
532553
//Initialise D16 registers to 0
533554
VMOV D0, R2,R2
534555
VMOV D1, R2,R2
@@ -546,7 +567,7 @@ __STATIC_INLINE __ASM void __FPU_Enable(void) {
546567
VMOV D13,R2,R2
547568
VMOV D14,R2,R2
548569
VMOV D15,R2,R2
549-
ENDIF
570+
550571
IF {TARGET_FEATURE_EXTENSION_REGISTER_COUNT} == 32
551572
//Initialise D32 registers to 0
552573
VMOV D16,R2,R2

cmsis/TARGET_CORTEX_A/cmsis_armclang.h

Lines changed: 42 additions & 43 deletions
Original file line numberDiff line numberDiff line change
@@ -25,6 +25,8 @@
2525
#ifndef __CMSIS_ARMCLANG_H
2626
#define __CMSIS_ARMCLANG_H
2727

28+
#pragma clang system_header /* treat file as system include file */
29+
2830
#ifndef __ARM_COMPAT_H
2931
#include <arm_compat.h> /* Compatibility header for ARM Compiler 5 intrinsics */
3032
#endif
@@ -148,38 +150,29 @@
148150

149151
/**
150152
\brief Reverse byte order (32 bit)
153+
\details Reverses the byte order in unsigned integer value. For example, 0x12345678 becomes 0x78563412.
151154
\param [in] value Value to reverse
152155
\return Reversed value
153156
*/
154-
#define __REV __builtin_bswap32
157+
#define __REV(value) __builtin_bswap32(value)
155158

156159
/**
157160
\brief Reverse byte order (16 bit)
161+
\details Reverses the byte order within each halfword of a word. For example, 0x12345678 becomes 0x34127856.
158162
\param [in] value Value to reverse
159163
\return Reversed value
160164
*/
161-
#ifndef __NO_EMBEDDED_ASM
162-
__attribute__((section(".rev16_text"))) __STATIC_INLINE uint32_t __REV16(uint32_t value)
163-
{
164-
uint32_t result;
165-
__ASM volatile("rev16 %0, %1" : "=r" (result) : "r" (value));
166-
return result;
167-
}
168-
#endif
165+
#define __REV16(value) __ROR(__REV(value), 16)
166+
169167

170168
/**
171-
\brief Reverse byte order in signed short value
169+
\brief Reverse byte order (16 bit)
170+
\details Reverses the byte order in a 16-bit value and returns the signed 16-bit result. For example, 0x0080 becomes 0x8000.
172171
\param [in] value Value to reverse
173172
\return Reversed value
174173
*/
175-
#ifndef __NO_EMBEDDED_ASM
176-
__attribute__((section(".revsh_text"))) __STATIC_INLINE int32_t __REVSH(int32_t value)
177-
{
178-
int32_t result;
179-
__ASM volatile("revsh %0, %1" : "=r" (result) : "r" (value));
180-
return result;
181-
}
182-
#endif
174+
#define __REVSH(value) (int16_t)__builtin_bswap16(value)
175+
183176

184177
/**
185178
\brief Rotate Right in unsigned value (32 bit)
@@ -188,31 +181,37 @@ __attribute__((section(".revsh_text"))) __STATIC_INLINE int32_t __REVSH(int32_t
188181
\param [in] op2 Number of Bits to rotate
189182
\return Rotated value
190183
*/
191-
__attribute__((always_inline)) __STATIC_INLINE uint32_t __ROR(uint32_t op1, uint32_t op2)
184+
__STATIC_FORCEINLINE uint32_t __ROR(uint32_t op1, uint32_t op2)
192185
{
186+
op2 %= 32U;
187+
if (op2 == 0U)
188+
{
189+
return op1;
190+
}
193191
return (op1 >> op2) | (op1 << (32U - op2));
194192
}
195193

194+
196195
/**
197196
\brief Breakpoint
198197
\param [in] value is ignored by the processor.
199198
If required, a debugger can use it to store additional information about the breakpoint.
200199
*/
201-
#define __BKPT(value) __ASM volatile ("bkpt "#value)
200+
#define __BKPT(value) __ASM volatile ("bkpt "#value)
202201

203202
/**
204203
\brief Reverse bit order of value
205204
\param [in] value Value to reverse
206205
\return Reversed value
207206
*/
208-
#define __RBIT __builtin_arm_rbit
207+
#define __RBIT __builtin_arm_rbit
209208

210209
/**
211210
\brief Count leading zeros
212211
\param [in] value Value to count the leading zeros
213212
\return number of leading zeros in value
214213
*/
215-
#define __CLZ __builtin_clz
214+
#define __CLZ (uint8_t)__builtin_clz
216215

217216
/**
218217
\brief LDR Exclusive (8 bit)
@@ -313,7 +312,7 @@ __attribute__((always_inline)) __STATIC_INLINE uint32_t __ROR(uint32_t op1, uint
313312
/** \brief Get CPSR Register
314313
\return CPSR Register value
315314
*/
316-
__attribute__((always_inline)) __STATIC_INLINE uint32_t __get_CPSR(void)
315+
__STATIC_FORCEINLINE uint32_t __get_CPSR(void)
317316
{
318317
uint32_t result;
319318
__ASM volatile("MRS %0, cpsr" : "=r" (result) );
@@ -323,31 +322,31 @@ __attribute__((always_inline)) __STATIC_INLINE uint32_t __get_CPSR(void)
323322
/** \brief Set CPSR Register
324323
\param [in] cpsr CPSR value to set
325324
*/
326-
__attribute__((always_inline)) __STATIC_INLINE void __set_CPSR(uint32_t cpsr)
325+
__STATIC_FORCEINLINE void __set_CPSR(uint32_t cpsr)
327326
{
328327
__ASM volatile ("MSR cpsr, %0" : : "r" (cpsr) : "memory");
329328
}
330329

331330
/** \brief Get Mode
332331
\return Processor Mode
333332
*/
334-
__attribute__((always_inline)) __STATIC_INLINE uint32_t __get_mode(void)
333+
__STATIC_FORCEINLINE uint32_t __get_mode(void)
335334
{
336335
return (__get_CPSR() & 0x1FU);
337336
}
338337

339338
/** \brief Set Mode
340339
\param [in] mode Mode value to set
341340
*/
342-
__attribute__((always_inline)) __STATIC_INLINE void __set_mode(uint32_t mode)
341+
__STATIC_FORCEINLINE void __set_mode(uint32_t mode)
343342
{
344343
__ASM volatile("MSR cpsr_c, %0" : : "r" (mode) : "memory");
345344
}
346345

347346
/** \brief Get Stack Pointer
348347
\return Stack Pointer value
349348
*/
350-
__attribute__((always_inline)) __STATIC_INLINE uint32_t __get_SP()
349+
__STATIC_FORCEINLINE uint32_t __get_SP()
351350
{
352351
uint32_t result;
353352
__ASM volatile("MOV %0, sp" : "=r" (result) : : "memory");
@@ -357,15 +356,15 @@ __attribute__((always_inline)) __STATIC_INLINE uint32_t __get_SP()
357356
/** \brief Set Stack Pointer
358357
\param [in] stack Stack Pointer value to set
359358
*/
360-
__attribute__((always_inline)) __STATIC_INLINE void __set_SP(uint32_t stack)
359+
__STATIC_FORCEINLINE void __set_SP(uint32_t stack)
361360
{
362361
__ASM volatile("MOV sp, %0" : : "r" (stack) : "memory");
363362
}
364363

365364
/** \brief Get USR/SYS Stack Pointer
366365
\return USR/SYS Stack Pointer value
367366
*/
368-
__attribute__((always_inline)) __STATIC_INLINE uint32_t __get_SP_usr()
367+
__STATIC_FORCEINLINE uint32_t __get_SP_usr()
369368
{
370369
uint32_t cpsr;
371370
uint32_t result;
@@ -382,7 +381,7 @@ __attribute__((always_inline)) __STATIC_INLINE uint32_t __get_SP_usr()
382381
/** \brief Set USR/SYS Stack Pointer
383382
\param [in] topOfProcStack USR/SYS Stack Pointer value to set
384383
*/
385-
__attribute__((always_inline)) __STATIC_INLINE void __set_SP_usr(uint32_t topOfProcStack)
384+
__STATIC_FORCEINLINE void __set_SP_usr(uint32_t topOfProcStack)
386385
{
387386
uint32_t cpsr;
388387
__ASM volatile(
@@ -397,7 +396,7 @@ __attribute__((always_inline)) __STATIC_INLINE void __set_SP_usr(uint32_t topOfP
397396
/** \brief Get FPEXC
398397
\return Floating Point Exception Control register value
399398
*/
400-
__attribute__((always_inline)) __STATIC_INLINE uint32_t __get_FPEXC(void)
399+
__STATIC_FORCEINLINE uint32_t __get_FPEXC(void)
401400
{
402401
#if (__FPU_PRESENT == 1)
403402
uint32_t result;
@@ -411,7 +410,7 @@ __attribute__((always_inline)) __STATIC_INLINE uint32_t __get_FPEXC(void)
411410
/** \brief Set FPEXC
412411
\param [in] fpexc Floating Point Exception Control value to set
413412
*/
414-
__attribute__((always_inline)) __STATIC_INLINE void __set_FPEXC(uint32_t fpexc)
413+
__STATIC_FORCEINLINE void __set_FPEXC(uint32_t fpexc)
415414
{
416415
#if (__FPU_PRESENT == 1)
417416
__ASM volatile ("VMSR fpexc, %0" : : "r" (fpexc) : "memory");
@@ -424,6 +423,8 @@ __attribute__((always_inline)) __STATIC_INLINE void __set_FPEXC(uint32_t fpexc)
424423

425424
#define __get_CP(cp, op1, Rt, CRn, CRm, op2) __ASM volatile("MRC p" # cp ", " # op1 ", %0, c" # CRn ", c" # CRm ", " # op2 : "=r" (Rt) : : "memory" )
426425
#define __set_CP(cp, op1, Rt, CRn, CRm, op2) __ASM volatile("MCR p" # cp ", " # op1 ", %0, c" # CRn ", c" # CRm ", " # op2 : : "r" (Rt) : "memory" )
426+
#define __get_CP64(cp, op1, Rt, CRm) __ASM volatile("MRRC p" # cp ", " # op1 ", %Q0, %R0, c" # CRm : "=r" (Rt) : : "memory" )
427+
#define __set_CP64(cp, op1, Rt, CRm) __ASM volatile("MCRR p" # cp ", " # op1 ", %Q0, %R0, c" # CRm : : "r" (Rt) : "memory" )
427428

428429
#include "cmsis_cp15.h"
429430

@@ -494,24 +495,23 @@ __STATIC_INLINE void __L1C_CleanInvalidateCache(uint32_t op)
494495
__STATIC_INLINE void __FPU_Enable(void)
495496
{
496497
__ASM volatile(
497-
//Permit access to VFP/NEON, registers by modifying CPACR
498+
//Permit access to VFP/NEON, registers by modifying CPACR
498499
" MRC p15,0,R1,c1,c0,2 \n"
499500
" ORR R1,R1,#0x00F00000 \n"
500501
" MCR p15,0,R1,c1,c0,2 \n"
501502

502-
//Ensure that subsequent instructions occur in the context of VFP/NEON access permitted
503+
//Ensure that subsequent instructions occur in the context of VFP/NEON access permitted
503504
" ISB \n"
504505

505-
//Enable VFP/NEON
506+
//Enable VFP/NEON
506507
" VMRS R1,FPEXC \n"
507508
" ORR R1,R1,#0x40000000 \n"
508509
" VMSR FPEXC,R1 \n"
509510

510-
//Initialise VFP/NEON registers to 0
511+
//Initialise VFP/NEON registers to 0
511512
" MOV R2,#0 \n"
512513

513-
#if TARGET_FEATURE_EXTENSION_REGISTER_COUNT >= 16
514-
//Initialise D16 registers to 0
514+
//Initialise D16 registers to 0
515515
" VMOV D0, R2,R2 \n"
516516
" VMOV D1, R2,R2 \n"
517517
" VMOV D2, R2,R2 \n"
@@ -528,10 +528,9 @@ __STATIC_INLINE void __FPU_Enable(void)
528528
" VMOV D13,R2,R2 \n"
529529
" VMOV D14,R2,R2 \n"
530530
" VMOV D15,R2,R2 \n"
531-
#endif
532531

533-
#if TARGET_FEATURE_EXTENSION_REGISTER_COUNT == 32
534-
//Initialise D32 registers to 0
532+
#if __ARM_NEON == 1
533+
//Initialise D32 registers to 0
535534
" VMOV D16,R2,R2 \n"
536535
" VMOV D17,R2,R2 \n"
537536
" VMOV D18,R2,R2 \n"
@@ -548,9 +547,9 @@ __STATIC_INLINE void __FPU_Enable(void)
548547
" VMOV D29,R2,R2 \n"
549548
" VMOV D30,R2,R2 \n"
550549
" VMOV D31,R2,R2 \n"
551-
".endif \n"
552550
#endif
553-
//Initialise FPSCR to a known state
551+
552+
//Initialise FPSCR to a known state
554553
" VMRS R2,FPSCR \n"
555554
" LDR R3,=0x00086060 \n" //Mask off all bits that do not have to be preserved. Non-preserved bits can/should be zero.
556555
" AND R2,R2,R3 \n"

0 commit comments

Comments
 (0)