@@ -211,6 +211,29 @@ bool core_util_atomic_cas_u32(volatile uint32_t *ptr, uint32_t *expectedCurrentV
211
211
/** \copydoc core_util_atomic_cas_u8 */
212
212
bool core_util_atomic_cas_u64 (volatile uint64_t * ptr , uint64_t * expectedCurrentValue , uint64_t desiredValue );
213
213
214
+ /** \copydoc core_util_atomic_cas_u8 */
215
+ MBED_FORCEINLINE int8_t core_util_atomic_cas_s8 (volatile int8_t * ptr , int8_t * expectedCurrentValue , int8_t desiredValue )
216
+ {
217
+ return (int8_t )core_util_atomic_cas_u8 ((volatile uint8_t * )ptr , (uint8_t * )expectedCurrentValue , (uint8_t )desiredValue );
218
+ }
219
+
220
+ /** \copydoc core_util_atomic_cas_u8 */
221
+ MBED_FORCEINLINE int16_t core_util_atomic_cas_s16 (volatile int16_t * ptr , int16_t * expectedCurrentValue , int16_t desiredValue )
222
+ {
223
+ return (int16_t )core_util_atomic_cas_u16 ((volatile uint16_t * )ptr , (uint16_t * )expectedCurrentValue , (uint16_t )desiredValue );
224
+ }
225
+ /** \copydoc core_util_atomic_cas_u8 */
226
+ MBED_FORCEINLINE int32_t core_util_atomic_cas_s32 (volatile int32_t * ptr , int32_t * expectedCurrentValue , int32_t desiredValue )
227
+ {
228
+ return (int32_t )core_util_atomic_cas_u32 ((volatile uint32_t * )ptr , (uint32_t * )expectedCurrentValue , (uint32_t )desiredValue );
229
+ }
230
+
231
+ /** \copydoc core_util_atomic_cas_u8 */
232
+ MBED_FORCEINLINE int64_t core_util_atomic_cas_s64 (volatile int64_t * ptr , int64_t * expectedCurrentValue , int64_t desiredValue )
233
+ {
234
+ return (int64_t )core_util_atomic_cas_u64 ((volatile uint64_t * )ptr , (uint64_t * )expectedCurrentValue , (uint64_t )desiredValue );
235
+ }
236
+
214
237
/** \copydoc core_util_atomic_cas_u8 */
215
238
MBED_FORCEINLINE bool core_util_atomic_cas_bool (volatile bool * ptr , bool * expectedCurrentValue , bool desiredValue )
216
239
{
@@ -263,6 +286,52 @@ MBED_FORCEINLINE uint32_t core_util_atomic_load_u32(const volatile uint32_t *val
263
286
*/
264
287
uint64_t core_util_atomic_load_u64 (const volatile uint64_t * valuePtr );
265
288
289
+ /**
290
+ * Atomic load.
291
+ * @param valuePtr Target memory location.
292
+ * @return The loaded value.
293
+ */
294
+ MBED_FORCEINLINE int8_t core_util_atomic_load_s8 (const volatile int8_t * valuePtr )
295
+ {
296
+ int8_t value = * valuePtr ;
297
+ MBED_BARRIER ();
298
+ return value ;
299
+ }
300
+
301
+ /**
302
+ * Atomic load.
303
+ * @param valuePtr Target memory location.
304
+ * @return The loaded value.
305
+ */
306
+ MBED_FORCEINLINE int16_t core_util_atomic_load_s16 (const volatile int16_t * valuePtr )
307
+ {
308
+ int16_t value = * valuePtr ;
309
+ MBED_BARRIER ();
310
+ return value ;
311
+ }
312
+
313
+ /**
314
+ * Atomic load.
315
+ * @param valuePtr Target memory location.
316
+ * @return The loaded value.
317
+ */
318
+ MBED_FORCEINLINE int32_t core_util_atomic_load_s32 (const volatile int32_t * valuePtr )
319
+ {
320
+ int32_t value = * valuePtr ;
321
+ MBED_BARRIER ();
322
+ return value ;
323
+ }
324
+
325
+ /**
326
+ * Atomic load.
327
+ * @param valuePtr Target memory location.
328
+ * @return The loaded value.
329
+ */
330
+ MBED_FORCEINLINE int64_t core_util_atomic_load_s64 (const volatile int64_t * valuePtr )
331
+ {
332
+ return (int64_t )core_util_atomic_load_u64 ((const volatile uint64_t * )valuePtr );
333
+ }
334
+
266
335
/**
267
336
* Atomic load.
268
337
* @param valuePtr Target memory location.
@@ -330,6 +399,52 @@ MBED_FORCEINLINE void core_util_atomic_store_u32(volatile uint32_t *valuePtr, ui
330
399
*/
331
400
void core_util_atomic_store_u64 (volatile uint64_t * valuePtr , uint64_t desiredValue );
332
401
402
+ /**
403
+ * Atomic store.
404
+ * @param valuePtr Target memory location.
405
+ * @param desiredValue The value to store.
406
+ */
407
+ MBED_FORCEINLINE void core_util_atomic_store_s8 (volatile int8_t * valuePtr , int8_t desiredValue )
408
+ {
409
+ MBED_BARRIER ();
410
+ * valuePtr = desiredValue ;
411
+ MBED_BARRIER ();
412
+ }
413
+
414
+ /**
415
+ * Atomic store.
416
+ * @param valuePtr Target memory location.
417
+ * @param desiredValue The value to store.
418
+ */
419
+ MBED_FORCEINLINE void core_util_atomic_store_s16 (volatile int16_t * valuePtr , int16_t desiredValue )
420
+ {
421
+ MBED_BARRIER ();
422
+ * valuePtr = desiredValue ;
423
+ MBED_BARRIER ();
424
+ }
425
+
426
+ /**
427
+ * Atomic store.
428
+ * @param valuePtr Target memory location.
429
+ * @param desiredValue The value to store.
430
+ */
431
+ MBED_FORCEINLINE void core_util_atomic_store_s32 (volatile int32_t * valuePtr , int32_t desiredValue )
432
+ {
433
+ MBED_BARRIER ();
434
+ * valuePtr = desiredValue ;
435
+ MBED_BARRIER ();
436
+ }
437
+
438
+ /**
439
+ * Atomic store.
440
+ * @param valuePtr Target memory location.
441
+ * @param desiredValue The value to store.
442
+ */
443
+ MBED_FORCEINLINE void core_util_atomic_store_s64 (volatile int64_t * valuePtr , int64_t desiredValue )
444
+ {
445
+ core_util_atomic_store_u64 ((volatile uint64_t * )valuePtr , (uint64_t )desiredValue );
446
+ }
447
+
333
448
/**
334
449
* Atomic store.
335
450
* @param valuePtr Target memory location.
@@ -386,6 +501,50 @@ uint32_t core_util_atomic_exchange_u32(volatile uint32_t *valuePtr, uint32_t des
386
501
*/
387
502
uint64_t core_util_atomic_exchange_u64 (volatile uint64_t * valuePtr , uint64_t desiredValue );
388
503
504
+ /**
505
+ * Atomic exchange.
506
+ * @param valuePtr Target memory location.
507
+ * @param desiredValue The value to store.
508
+ * @return The previous value.
509
+ */
510
+ MBED_FORCEINLINE int8_t core_util_atomic_exchange_s8 (volatile int8_t * valuePtr , int8_t desiredValue )
511
+ {
512
+ return (int8_t )core_util_atomic_exchange_u8 ((volatile uint8_t * )valuePtr , (uint8_t )desiredValue );
513
+ }
514
+
515
+ /**
516
+ * Atomic exchange.
517
+ * @param valuePtr Target memory location.
518
+ * @param desiredValue The value to store.
519
+ * @return The previous value.
520
+ */
521
+ MBED_FORCEINLINE int16_t core_util_atomic_exchange_s16 (volatile int16_t * valuePtr , int16_t desiredValue )
522
+ {
523
+ return (int16_t )core_util_atomic_exchange_u16 ((volatile uint16_t * )valuePtr , (uint16_t )desiredValue );
524
+ }
525
+
526
+ /**
527
+ * Atomic exchange.
528
+ * @param valuePtr Target memory location.
529
+ * @param desiredValue The value to store.
530
+ * @return The previous value.
531
+ */
532
+ MBED_FORCEINLINE int32_t core_util_atomic_exchange_s32 (volatile int32_t * valuePtr , int32_t desiredValue )
533
+ {
534
+ return (int32_t )core_util_atomic_exchange_u32 ((volatile uint32_t * )valuePtr , (uint32_t )desiredValue );
535
+ }
536
+
537
+ /**
538
+ * Atomic exchange.
539
+ * @param valuePtr Target memory location.
540
+ * @param desiredValue The value to store.
541
+ * @return The previous value.
542
+ */
543
+ MBED_FORCEINLINE int64_t core_util_atomic_exchange_s64 (volatile int64_t * valuePtr , int64_t desiredValue )
544
+ {
545
+ return (int64_t )core_util_atomic_exchange_u64 ((volatile uint64_t * )valuePtr , (uint64_t )desiredValue );
546
+ }
547
+
389
548
/**
390
549
* Atomic exchange.
391
550
* @param valuePtr Target memory location.
@@ -437,6 +596,50 @@ uint32_t core_util_atomic_incr_u32(volatile uint32_t *valuePtr, uint32_t delta);
437
596
*/
438
597
uint64_t core_util_atomic_incr_u64 (volatile uint64_t * valuePtr , uint64_t delta );
439
598
599
+ /**
600
+ * Atomic increment.
601
+ * @param valuePtr Target memory location being incremented.
602
+ * @param delta The amount being incremented.
603
+ * @return The new incremented value.
604
+ */
605
+ MBED_FORCEINLINE int8_t core_util_atomic_incr_s8 (volatile int8_t * valuePtr , int8_t delta )
606
+ {
607
+ return (int8_t )core_util_atomic_incr_u8 ((volatile uint8_t * )valuePtr , (uint8_t )delta );
608
+ }
609
+
610
+ /**
611
+ * Atomic increment.
612
+ * @param valuePtr Target memory location being incremented.
613
+ * @param delta The amount being incremented.
614
+ * @return The new incremented value.
615
+ */
616
+ MBED_FORCEINLINE int16_t core_util_atomic_incr_s16 (volatile int16_t * valuePtr , int16_t delta )
617
+ {
618
+ return (int16_t )core_util_atomic_incr_u16 ((volatile uint16_t * )valuePtr , (uint16_t )delta );
619
+ }
620
+
621
+ /**
622
+ * Atomic increment.
623
+ * @param valuePtr Target memory location being incremented.
624
+ * @param delta The amount being incremented.
625
+ * @return The new incremented value.
626
+ */
627
+ MBED_FORCEINLINE int32_t core_util_atomic_incr_s32 (volatile int32_t * valuePtr , int32_t delta )
628
+ {
629
+ return (int32_t )core_util_atomic_incr_u32 ((volatile uint32_t * )valuePtr , (uint32_t )delta );
630
+ }
631
+
632
+ /**
633
+ * Atomic increment.
634
+ * @param valuePtr Target memory location being incremented.
635
+ * @param delta The amount being incremented.
636
+ * @return The new incremented value.
637
+ */
638
+ MBED_FORCEINLINE int64_t core_util_atomic_incr_s64 (volatile int64_t * valuePtr , int64_t delta )
639
+ {
640
+ return (int64_t )core_util_atomic_incr_u64 ((volatile uint64_t * )valuePtr , (uint64_t )delta );
641
+ }
642
+
440
643
/**
441
644
* Atomic increment.
442
645
* @param valuePtr Target memory location being incremented.
@@ -480,6 +683,50 @@ uint32_t core_util_atomic_decr_u32(volatile uint32_t *valuePtr, uint32_t delta);
480
683
*/
481
684
uint64_t core_util_atomic_decr_u64 (volatile uint64_t * valuePtr , uint64_t delta );
482
685
686
+ /**
687
+ * Atomic decrement.
688
+ * @param valuePtr Target memory location being decremented.
689
+ * @param delta The amount being decremented.
690
+ * @return The new decremented value.
691
+ */
692
+ MBED_FORCEINLINE int8_t core_util_atomic_decr_s8 (volatile int8_t * valuePtr , int8_t delta )
693
+ {
694
+ return (int8_t )core_util_atomic_decr_u8 ((volatile uint8_t * )valuePtr , (uint8_t )delta );
695
+ }
696
+
697
+ /**
698
+ * Atomic decrement.
699
+ * @param valuePtr Target memory location being decremented.
700
+ * @param delta The amount being decremented.
701
+ * @return The new decremented value.
702
+ */
703
+ MBED_FORCEINLINE int16_t core_util_atomic_decr_s16 (volatile int16_t * valuePtr , int16_t delta )
704
+ {
705
+ return (int16_t )core_util_atomic_decr_u16 ((volatile uint16_t * )valuePtr , (uint16_t )delta );
706
+ }
707
+
708
+ /**
709
+ * Atomic decrement.
710
+ * @param valuePtr Target memory location being decremented.
711
+ * @param delta The amount being decremented.
712
+ * @return The new decremented value.
713
+ */
714
+ MBED_FORCEINLINE int32_t core_util_atomic_decr_s32 (volatile int32_t * valuePtr , int32_t delta )
715
+ {
716
+ return (int32_t )core_util_atomic_decr_u32 ((volatile uint32_t * )valuePtr , (uint32_t )delta );
717
+ }
718
+
719
+ /**
720
+ * Atomic decrement.
721
+ * @param valuePtr Target memory location being decremented.
722
+ * @param delta The amount being decremented.
723
+ * @return The new decremented value.
724
+ */
725
+ MBED_FORCEINLINE int64_t core_util_atomic_decr_s64 (volatile int64_t * valuePtr , int64_t delta )
726
+ {
727
+ return (int64_t )core_util_atomic_decr_u64 ((volatile uint64_t * )valuePtr , (uint64_t )delta );
728
+ }
729
+
483
730
/**
484
731
* Atomic decrement.
485
732
* @param valuePtr Target memory location being decremented.
0 commit comments