@@ -267,6 +267,104 @@ uint32_t core_util_atomic_decr_u32(volatile uint32_t *valuePtr, uint32_t delta)
267
267
return newValue ;
268
268
}
269
269
270
+ uint8_t core_util_atomic_fetch_and_u8 (volatile uint8_t * valuePtr , uint8_t arg )
271
+ {
272
+ MBED_BARRIER ();
273
+ uint8_t oldValue ;
274
+ do {
275
+ oldValue = __LDREXB (valuePtr );
276
+ } while (__STREXB (oldValue & arg , valuePtr ));
277
+ MBED_BARRIER ();
278
+ return oldValue ;
279
+ }
280
+
281
+ uint16_t core_util_atomic_fetch_and_u16 (volatile uint16_t * valuePtr , uint16_t arg )
282
+ {
283
+ MBED_BARRIER ();
284
+ uint16_t oldValue ;
285
+ do {
286
+ oldValue = __LDREXH (valuePtr );
287
+ } while (__STREXH (oldValue & arg , valuePtr ));
288
+ MBED_BARRIER ();
289
+ return oldValue ;
290
+ }
291
+
292
+ uint32_t core_util_atomic_fetch_and_u32 (volatile uint32_t * valuePtr , uint32_t arg )
293
+ {
294
+ MBED_BARRIER ();
295
+ uint32_t oldValue ;
296
+ do {
297
+ oldValue = __LDREXW (valuePtr );
298
+ } while (__STREXW (oldValue & arg , valuePtr ));
299
+ MBED_BARRIER ();
300
+ return oldValue ;
301
+ }
302
+
303
+ uint8_t core_util_atomic_fetch_or_u8 (volatile uint8_t * valuePtr , uint8_t arg )
304
+ {
305
+ MBED_BARRIER ();
306
+ uint8_t oldValue ;
307
+ do {
308
+ oldValue = __LDREXB (valuePtr );
309
+ } while (__STREXB (oldValue | arg , valuePtr ));
310
+ MBED_BARRIER ();
311
+ return oldValue ;
312
+ }
313
+
314
+ uint16_t core_util_atomic_fetch_or_u16 (volatile uint16_t * valuePtr , uint16_t arg )
315
+ {
316
+ MBED_BARRIER ();
317
+ uint16_t oldValue ;
318
+ do {
319
+ oldValue = __LDREXH (valuePtr );
320
+ } while (__STREXH (oldValue | arg , valuePtr ));
321
+ MBED_BARRIER ();
322
+ return oldValue ;
323
+ }
324
+
325
+ uint32_t core_util_atomic_fetch_or_u32 (volatile uint32_t * valuePtr , uint32_t arg )
326
+ {
327
+ MBED_BARRIER ();
328
+ uint32_t oldValue ;
329
+ do {
330
+ oldValue = __LDREXW (valuePtr );
331
+ } while (__STREXW (oldValue | arg , valuePtr ));
332
+ MBED_BARRIER ();
333
+ return oldValue ;
334
+ }
335
+
336
+ uint8_t core_util_atomic_fetch_xor_u8 (volatile uint8_t * valuePtr , uint8_t arg )
337
+ {
338
+ MBED_BARRIER ();
339
+ uint8_t oldValue ;
340
+ do {
341
+ oldValue = __LDREXB (valuePtr );
342
+ } while (__STREXB (oldValue ^ arg , valuePtr ));
343
+ MBED_BARRIER ();
344
+ return oldValue ;
345
+ }
346
+
347
+ uint16_t core_util_atomic_fetch_xor_u16 (volatile uint16_t * valuePtr , uint16_t arg )
348
+ {
349
+ MBED_BARRIER ();
350
+ uint16_t oldValue ;
351
+ do {
352
+ oldValue = __LDREXH (valuePtr );
353
+ } while (__STREXH (oldValue ^ arg , valuePtr ));
354
+ MBED_BARRIER ();
355
+ return oldValue ;
356
+ }
357
+
358
+ uint32_t core_util_atomic_fetch_xor_u32 (volatile uint32_t * valuePtr , uint32_t arg )
359
+ {
360
+ MBED_BARRIER ();
361
+ uint32_t oldValue ;
362
+ do {
363
+ oldValue = __LDREXW (valuePtr );
364
+ } while (__STREXW (oldValue ^ arg , valuePtr ));
365
+ MBED_BARRIER ();
366
+ return oldValue ;
367
+ }
270
368
#else
271
369
272
370
bool core_util_atomic_flag_test_and_set (volatile core_util_atomic_flag * flagPtr )
@@ -420,6 +518,99 @@ uint32_t core_util_atomic_decr_u32(volatile uint32_t *valuePtr, uint32_t delta)
420
518
return newValue ;
421
519
}
422
520
521
+
522
+ uint8_t core_util_atomic_fetch_and_u8 (volatile uint8_t * valuePtr , uint8_t arg )
523
+ {
524
+ uint8_t oldValue ;
525
+ core_util_critical_section_enter ();
526
+ oldValue = * valuePtr ;
527
+ * valuePtr = oldValue & arg ;
528
+ core_util_critical_section_exit ();
529
+ return oldValue ;
530
+ }
531
+
532
+ uint16_t core_util_atomic_fetch_and_u16 (volatile uint16_t * valuePtr , uint16_t arg )
533
+ {
534
+ uint16_t oldValue ;
535
+ core_util_critical_section_enter ();
536
+ oldValue = * valuePtr ;
537
+ * valuePtr = oldValue & arg ;
538
+ core_util_critical_section_exit ();
539
+ return oldValue ;
540
+ }
541
+
542
+ uint32_t core_util_atomic_fetch_and_u32 (volatile uint32_t * valuePtr , uint32_t arg )
543
+ {
544
+ uint32_t oldValue ;
545
+ core_util_critical_section_enter ();
546
+ oldValue = * valuePtr ;
547
+ * valuePtr = oldValue & arg ;
548
+ core_util_critical_section_exit ();
549
+ return oldValue ;
550
+ }
551
+
552
+
553
+ uint8_t core_util_atomic_fetch_or_u8 (volatile uint8_t * valuePtr , uint8_t arg )
554
+ {
555
+ uint8_t oldValue ;
556
+ core_util_critical_section_enter ();
557
+ oldValue = * valuePtr ;
558
+ * valuePtr = oldValue | arg ;
559
+ core_util_critical_section_exit ();
560
+ return oldValue ;
561
+ }
562
+
563
+ uint16_t core_util_atomic_fetch_or_u16 (volatile uint16_t * valuePtr , uint16_t arg )
564
+ {
565
+ uint16_t oldValue ;
566
+ core_util_critical_section_enter ();
567
+ oldValue = * valuePtr ;
568
+ * valuePtr = oldValue | arg ;
569
+ core_util_critical_section_exit ();
570
+ return oldValue ;
571
+ }
572
+
573
+ uint32_t core_util_atomic_fetch_or_u32 (volatile uint32_t * valuePtr , uint32_t arg )
574
+ {
575
+ uint32_t oldValue ;
576
+ core_util_critical_section_enter ();
577
+ oldValue = * valuePtr ;
578
+ * valuePtr = oldValue | arg ;
579
+ core_util_critical_section_exit ();
580
+ return oldValue ;
581
+ }
582
+
583
+
584
+ uint8_t core_util_atomic_fetch_xor_u8 (volatile uint8_t * valuePtr , uint8_t arg )
585
+ {
586
+ uint8_t oldValue ;
587
+ core_util_critical_section_enter ();
588
+ oldValue = * valuePtr ;
589
+ * valuePtr = oldValue ^ arg ;
590
+ core_util_critical_section_exit ();
591
+ return oldValue ;
592
+ }
593
+
594
+ uint16_t core_util_atomic_fetch_xor_u16 (volatile uint16_t * valuePtr , uint16_t arg )
595
+ {
596
+ uint16_t oldValue ;
597
+ core_util_critical_section_enter ();
598
+ oldValue = * valuePtr ;
599
+ * valuePtr = oldValue ^ arg ;
600
+ core_util_critical_section_exit ();
601
+ return oldValue ;
602
+ }
603
+
604
+ uint32_t core_util_atomic_fetch_xor_u32 (volatile uint32_t * valuePtr , uint32_t arg )
605
+ {
606
+ uint32_t oldValue ;
607
+ core_util_critical_section_enter ();
608
+ oldValue = * valuePtr ;
609
+ * valuePtr = oldValue ^ arg ;
610
+ core_util_critical_section_exit ();
611
+ return oldValue ;
612
+ }
613
+
423
614
#endif
424
615
425
616
/* No architecture we support has LDREXD/STREXD, so must always disable IRQs for 64-bit operations */
@@ -484,6 +675,36 @@ uint64_t core_util_atomic_decr_u64(volatile uint64_t *valuePtr, uint64_t delta)
484
675
return newValue ;
485
676
}
486
677
678
+ uint64_t core_util_atomic_fetch_and_u64 (volatile uint64_t * valuePtr , uint64_t arg )
679
+ {
680
+ uint64_t oldValue ;
681
+ core_util_critical_section_enter ();
682
+ oldValue = * valuePtr ;
683
+ * valuePtr = oldValue & arg ;
684
+ core_util_critical_section_exit ();
685
+ return oldValue ;
686
+ }
687
+
688
+ uint64_t core_util_atomic_fetch_or_u64 (volatile uint64_t * valuePtr , uint64_t arg )
689
+ {
690
+ uint64_t oldValue ;
691
+ core_util_critical_section_enter ();
692
+ oldValue = * valuePtr ;
693
+ * valuePtr = oldValue | arg ;
694
+ core_util_critical_section_exit ();
695
+ return oldValue ;
696
+ }
697
+
698
+ uint64_t core_util_atomic_fetch_xor_u64 (volatile uint64_t * valuePtr , uint64_t arg )
699
+ {
700
+ uint64_t oldValue ;
701
+ core_util_critical_section_enter ();
702
+ oldValue = * valuePtr ;
703
+ * valuePtr = oldValue ^ arg ;
704
+ core_util_critical_section_exit ();
705
+ return oldValue ;
706
+ }
707
+
487
708
MBED_STATIC_ASSERT (sizeof (void * ) == sizeof (uint32_t ), "Alas, pointers must be 32-bit" );
488
709
489
710
bool core_util_atomic_cas_ptr (void * volatile * ptr , void * * expectedCurrentValue , void * desiredValue )
0 commit comments