@@ -100,6 +100,9 @@ void core_util_critical_section_exit(void)
100
100
}
101
101
}
102
102
103
+ /* Inline bool implementations in the header use uint8_t versions to manipulate the bool */
104
+ MBED_STATIC_ASSERT (sizeof (bool ) == sizeof (uint8_t ), "Surely bool is a byte" );
105
+
103
106
#if MBED_EXCLUSIVE_ACCESS
104
107
105
108
/* Supress __ldrex and __strex deprecated warnings - "#3731-D: intrinsic is deprecated" */
@@ -109,8 +112,8 @@ void core_util_critical_section_exit(void)
109
112
110
113
bool core_util_atomic_flag_test_and_set (volatile core_util_atomic_flag * flagPtr )
111
114
{
112
- uint8_t currentValue ;
113
115
MBED_BARRIER ();
116
+ uint8_t currentValue ;
114
117
do {
115
118
currentValue = __LDREXB (& flagPtr -> _flag );
116
119
} while (__STREXB (true, & flagPtr -> _flag ));
@@ -164,6 +167,39 @@ bool core_util_atomic_cas_u32(volatile uint32_t *ptr, uint32_t *expectedCurrentV
164
167
return true;
165
168
}
166
169
170
+ uint8_t core_util_atomic_exchange_u8 (volatile uint8_t * valuePtr , uint8_t desiredValue )
171
+ {
172
+ MBED_BARRIER ();
173
+ uint8_t currentValue ;
174
+ do {
175
+ currentValue = __LDREXB (valuePtr );
176
+ } while (__STREXB (desiredValue , valuePtr ));
177
+ MBED_BARRIER ();
178
+ return currentValue ;
179
+ }
180
+
181
+ uint16_t core_util_atomic_exchange_u16 (volatile uint16_t * valuePtr , uint16_t desiredValue )
182
+ {
183
+ MBED_BARRIER ();
184
+ uint16_t currentValue ;
185
+ do {
186
+ currentValue = __LDREXH (valuePtr );
187
+ } while (__STREXH (desiredValue , valuePtr ));
188
+ MBED_BARRIER ();
189
+ return currentValue ;
190
+ }
191
+
192
+ uint32_t core_util_atomic_exchange_u32 (volatile uint32_t * valuePtr , uint32_t desiredValue )
193
+ {
194
+ MBED_BARRIER ();
195
+ uint32_t currentValue ;
196
+ do {
197
+ currentValue = __LDREXW (valuePtr );
198
+ } while (__STREXW (desiredValue , valuePtr ));
199
+ MBED_BARRIER ();
200
+ return currentValue ;
201
+ }
202
+
167
203
uint8_t core_util_atomic_incr_u8 (volatile uint8_t * valuePtr , uint8_t delta )
168
204
{
169
205
MBED_BARRIER ();
@@ -188,8 +224,8 @@ uint16_t core_util_atomic_incr_u16(volatile uint16_t *valuePtr, uint16_t delta)
188
224
189
225
uint32_t core_util_atomic_incr_u32 (volatile uint32_t * valuePtr , uint32_t delta )
190
226
{
191
- uint32_t newValue ;
192
227
MBED_BARRIER ();
228
+ uint32_t newValue ;
193
229
do {
194
230
newValue = __LDREXW (valuePtr ) + delta ;
195
231
} while (__STREXW (newValue , valuePtr ));
@@ -200,8 +236,8 @@ uint32_t core_util_atomic_incr_u32(volatile uint32_t *valuePtr, uint32_t delta)
200
236
201
237
uint8_t core_util_atomic_decr_u8 (volatile uint8_t * valuePtr , uint8_t delta )
202
238
{
203
- uint8_t newValue ;
204
239
MBED_BARRIER ();
240
+ uint8_t newValue ;
205
241
do {
206
242
newValue = __LDREXB (valuePtr ) - delta ;
207
243
} while (__STREXB (newValue , valuePtr ));
@@ -211,8 +247,8 @@ uint8_t core_util_atomic_decr_u8(volatile uint8_t *valuePtr, uint8_t delta)
211
247
212
248
uint16_t core_util_atomic_decr_u16 (volatile uint16_t * valuePtr , uint16_t delta )
213
249
{
214
- uint16_t newValue ;
215
250
MBED_BARRIER ();
251
+ uint16_t newValue ;
216
252
do {
217
253
newValue = __LDREXH (valuePtr ) - delta ;
218
254
} while (__STREXH (newValue , valuePtr ));
@@ -222,8 +258,8 @@ uint16_t core_util_atomic_decr_u16(volatile uint16_t *valuePtr, uint16_t delta)
222
258
223
259
uint32_t core_util_atomic_decr_u32 (volatile uint32_t * valuePtr , uint32_t delta )
224
260
{
225
- uint32_t newValue ;
226
261
MBED_BARRIER ();
262
+ uint32_t newValue ;
227
263
do {
228
264
newValue = __LDREXW (valuePtr ) - delta ;
229
265
} while (__STREXW (newValue , valuePtr ));
@@ -295,6 +331,34 @@ bool core_util_atomic_cas_u32(volatile uint32_t *ptr, uint32_t *expectedCurrentV
295
331
}
296
332
297
333
334
+ uint8_t core_util_atomic_exchange_u8 (volatile uint8_t * ptr , uint8_t desiredValue )
335
+ {
336
+ core_util_critical_section_enter ();
337
+ uint8_t currentValue = * ptr ;
338
+ * ptr = desiredValue ;
339
+ core_util_critical_section_exit ();
340
+ return currentValue ;
341
+ }
342
+
343
+ uint16_t core_util_atomic_exchange_u16 (volatile uint16_t * ptr , uint16_t desiredValue )
344
+ {
345
+ core_util_critical_section_enter ();
346
+ uint16_t currentValue = * ptr ;
347
+ * ptr = desiredValue ;
348
+ core_util_critical_section_exit ();
349
+ return currentValue ;
350
+ }
351
+
352
+ uint32_t core_util_atomic_exchange_u32 (volatile uint32_t * ptr , uint32_t desiredValue )
353
+ {
354
+ core_util_critical_section_enter ();
355
+ uint32_t currentValue = * ptr ;
356
+ * ptr = desiredValue ;
357
+ core_util_critical_section_exit ();
358
+ return currentValue ;
359
+ }
360
+
361
+
298
362
uint8_t core_util_atomic_incr_u8 (volatile uint8_t * valuePtr , uint8_t delta )
299
363
{
300
364
uint8_t newValue ;
@@ -358,6 +422,69 @@ uint32_t core_util_atomic_decr_u32(volatile uint32_t *valuePtr, uint32_t delta)
358
422
359
423
#endif
360
424
425
+ /* No architecture we support has LDREXD/STREXD, so must always disable IRQs for 64-bit operations */
426
+ uint64_t core_util_atomic_load_u64 (const volatile uint64_t * valuePtr )
427
+ {
428
+ core_util_critical_section_enter ();
429
+ uint64_t currentValue = * valuePtr ;
430
+ core_util_critical_section_exit ();
431
+ return currentValue ;
432
+ }
433
+
434
+ void core_util_atomic_store_u64 (volatile uint64_t * valuePtr , uint64_t desiredValue )
435
+ {
436
+ core_util_critical_section_enter ();
437
+ * valuePtr = desiredValue ;
438
+ core_util_critical_section_exit ();
439
+ }
440
+
441
+ uint64_t core_util_atomic_exchange_u64 (volatile uint64_t * valuePtr , uint64_t desiredValue )
442
+ {
443
+ core_util_critical_section_enter ();
444
+ uint64_t currentValue = * valuePtr ;
445
+ * valuePtr = desiredValue ;
446
+ core_util_critical_section_exit ();
447
+ return currentValue ;
448
+ }
449
+
450
+ bool core_util_atomic_cas_u64 (volatile uint64_t * ptr , uint64_t * expectedCurrentValue , uint64_t desiredValue )
451
+ {
452
+ bool success ;
453
+ uint64_t currentValue ;
454
+ core_util_critical_section_enter ();
455
+ currentValue = * ptr ;
456
+ if (currentValue == * expectedCurrentValue ) {
457
+ * ptr = desiredValue ;
458
+ success = true;
459
+ } else {
460
+ * expectedCurrentValue = currentValue ;
461
+ success = false;
462
+ }
463
+ core_util_critical_section_exit ();
464
+ return success ;
465
+ }
466
+
467
+ uint64_t core_util_atomic_incr_u64 (volatile uint64_t * valuePtr , uint64_t delta )
468
+ {
469
+ uint64_t newValue ;
470
+ core_util_critical_section_enter ();
471
+ newValue = * valuePtr + delta ;
472
+ * valuePtr = newValue ;
473
+ core_util_critical_section_exit ();
474
+ return newValue ;
475
+ }
476
+
477
+ uint64_t core_util_atomic_decr_u64 (volatile uint64_t * valuePtr , uint64_t delta )
478
+ {
479
+ uint64_t newValue ;
480
+ core_util_critical_section_enter ();
481
+ newValue = * valuePtr - delta ;
482
+ * valuePtr = newValue ;
483
+ core_util_critical_section_exit ();
484
+ return newValue ;
485
+ }
486
+
487
+ MBED_STATIC_ASSERT (sizeof (void * ) == sizeof (uint32_t ), "Alas, pointers must be 32-bit" );
361
488
362
489
bool core_util_atomic_cas_ptr (void * volatile * ptr , void * * expectedCurrentValue , void * desiredValue )
363
490
{
@@ -367,6 +494,11 @@ bool core_util_atomic_cas_ptr(void *volatile *ptr, void **expectedCurrentValue,
367
494
(uint32_t )desiredValue );
368
495
}
369
496
497
+ void * core_util_atomic_exchange_ptr (void * volatile * valuePtr , void * desiredValue )
498
+ {
499
+ return (void * )core_util_atomic_exchange_u32 ((volatile uint32_t * )valuePtr , (uint32_t )desiredValue );
500
+ }
501
+
370
502
void * core_util_atomic_incr_ptr (void * volatile * valuePtr , ptrdiff_t delta )
371
503
{
372
504
return (void * )core_util_atomic_incr_u32 ((volatile uint32_t * )valuePtr , (uint32_t )delta );
0 commit comments