Skip to content

Commit b8bf0f7

Browse files
committed
Atomic bitwise operations
1 parent 6b84b14 commit b8bf0f7

File tree

2 files changed

+317
-0
lines changed

2 files changed

+317
-0
lines changed

platform/mbed_critical.c

Lines changed: 221 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -267,6 +267,104 @@ uint32_t core_util_atomic_decr_u32(volatile uint32_t *valuePtr, uint32_t delta)
267267
return newValue;
268268
}
269269

270+
uint8_t core_util_atomic_fetch_and_u8(volatile uint8_t *valuePtr, uint8_t arg)
271+
{
272+
MBED_BARRIER();
273+
uint8_t oldValue;
274+
do {
275+
oldValue = __LDREXB(valuePtr);
276+
} while (__STREXB(oldValue & arg, valuePtr));
277+
MBED_BARRIER();
278+
return oldValue;
279+
}
280+
281+
uint16_t core_util_atomic_fetch_and_u16(volatile uint16_t *valuePtr, uint16_t arg)
282+
{
283+
MBED_BARRIER();
284+
uint16_t oldValue;
285+
do {
286+
oldValue = __LDREXH(valuePtr);
287+
} while (__STREXH(oldValue & arg, valuePtr));
288+
MBED_BARRIER();
289+
return oldValue;
290+
}
291+
292+
uint32_t core_util_atomic_fetch_and_u32(volatile uint32_t *valuePtr, uint32_t arg)
293+
{
294+
MBED_BARRIER();
295+
uint32_t oldValue;
296+
do {
297+
oldValue = __LDREXW(valuePtr);
298+
} while (__STREXW(oldValue & arg, valuePtr));
299+
MBED_BARRIER();
300+
return oldValue;
301+
}
302+
303+
uint8_t core_util_atomic_fetch_or_u8(volatile uint8_t *valuePtr, uint8_t arg)
304+
{
305+
MBED_BARRIER();
306+
uint8_t oldValue;
307+
do {
308+
oldValue = __LDREXB(valuePtr);
309+
} while (__STREXB(oldValue | arg, valuePtr));
310+
MBED_BARRIER();
311+
return oldValue;
312+
}
313+
314+
uint16_t core_util_atomic_fetch_or_u16(volatile uint16_t *valuePtr, uint16_t arg)
315+
{
316+
MBED_BARRIER();
317+
uint16_t oldValue;
318+
do {
319+
oldValue = __LDREXH(valuePtr);
320+
} while (__STREXH(oldValue | arg, valuePtr));
321+
MBED_BARRIER();
322+
return oldValue;
323+
}
324+
325+
uint32_t core_util_atomic_fetch_or_u32(volatile uint32_t *valuePtr, uint32_t arg)
326+
{
327+
MBED_BARRIER();
328+
uint32_t oldValue;
329+
do {
330+
oldValue = __LDREXW(valuePtr);
331+
} while (__STREXW(oldValue | arg, valuePtr));
332+
MBED_BARRIER();
333+
return oldValue;
334+
}
335+
336+
uint8_t core_util_atomic_fetch_xor_u8(volatile uint8_t *valuePtr, uint8_t arg)
337+
{
338+
MBED_BARRIER();
339+
uint8_t oldValue;
340+
do {
341+
oldValue = __LDREXB(valuePtr);
342+
} while (__STREXB(oldValue ^ arg, valuePtr));
343+
MBED_BARRIER();
344+
return oldValue;
345+
}
346+
347+
uint16_t core_util_atomic_fetch_xor_u16(volatile uint16_t *valuePtr, uint16_t arg)
348+
{
349+
MBED_BARRIER();
350+
uint16_t oldValue;
351+
do {
352+
oldValue = __LDREXH(valuePtr);
353+
} while (__STREXH(oldValue ^ arg, valuePtr));
354+
MBED_BARRIER();
355+
return oldValue;
356+
}
357+
358+
uint32_t core_util_atomic_fetch_xor_u32(volatile uint32_t *valuePtr, uint32_t arg)
359+
{
360+
MBED_BARRIER();
361+
uint32_t oldValue;
362+
do {
363+
oldValue = __LDREXW(valuePtr);
364+
} while (__STREXW(oldValue ^ arg, valuePtr));
365+
MBED_BARRIER();
366+
return oldValue;
367+
}
270368
#else
271369

272370
bool core_util_atomic_flag_test_and_set(volatile core_util_atomic_flag *flagPtr)
@@ -420,6 +518,99 @@ uint32_t core_util_atomic_decr_u32(volatile uint32_t *valuePtr, uint32_t delta)
420518
return newValue;
421519
}
422520

521+
522+
uint8_t core_util_atomic_fetch_and_u8(volatile uint8_t *valuePtr, uint8_t arg)
523+
{
524+
uint8_t oldValue;
525+
core_util_critical_section_enter();
526+
oldValue = *valuePtr;
527+
*valuePtr = oldValue & arg;
528+
core_util_critical_section_exit();
529+
return oldValue;
530+
}
531+
532+
uint16_t core_util_atomic_fetch_and_u16(volatile uint16_t *valuePtr, uint16_t arg)
533+
{
534+
uint16_t oldValue;
535+
core_util_critical_section_enter();
536+
oldValue = *valuePtr;
537+
*valuePtr = oldValue & arg;
538+
core_util_critical_section_exit();
539+
return oldValue;
540+
}
541+
542+
uint32_t core_util_atomic_fetch_and_u32(volatile uint32_t *valuePtr, uint32_t arg)
543+
{
544+
uint32_t oldValue;
545+
core_util_critical_section_enter();
546+
oldValue = *valuePtr;
547+
*valuePtr = oldValue & arg;
548+
core_util_critical_section_exit();
549+
return oldValue;
550+
}
551+
552+
553+
uint8_t core_util_atomic_fetch_or_u8(volatile uint8_t *valuePtr, uint8_t arg)
554+
{
555+
uint8_t oldValue;
556+
core_util_critical_section_enter();
557+
oldValue = *valuePtr;
558+
*valuePtr = oldValue | arg;
559+
core_util_critical_section_exit();
560+
return oldValue;
561+
}
562+
563+
uint16_t core_util_atomic_fetch_or_u16(volatile uint16_t *valuePtr, uint16_t arg)
564+
{
565+
uint16_t oldValue;
566+
core_util_critical_section_enter();
567+
oldValue = *valuePtr;
568+
*valuePtr = oldValue | arg;
569+
core_util_critical_section_exit();
570+
return oldValue;
571+
}
572+
573+
uint32_t core_util_atomic_fetch_or_u32(volatile uint32_t *valuePtr, uint32_t arg)
574+
{
575+
uint32_t oldValue;
576+
core_util_critical_section_enter();
577+
oldValue = *valuePtr;
578+
*valuePtr = oldValue | arg;
579+
core_util_critical_section_exit();
580+
return oldValue;
581+
}
582+
583+
584+
uint8_t core_util_atomic_fetch_xor_u8(volatile uint8_t *valuePtr, uint8_t arg)
585+
{
586+
uint8_t oldValue;
587+
core_util_critical_section_enter();
588+
oldValue = *valuePtr;
589+
*valuePtr = oldValue ^ arg;
590+
core_util_critical_section_exit();
591+
return oldValue;
592+
}
593+
594+
uint16_t core_util_atomic_fetch_xor_u16(volatile uint16_t *valuePtr, uint16_t arg)
595+
{
596+
uint16_t oldValue;
597+
core_util_critical_section_enter();
598+
oldValue = *valuePtr;
599+
*valuePtr = oldValue ^ arg;
600+
core_util_critical_section_exit();
601+
return oldValue;
602+
}
603+
604+
uint32_t core_util_atomic_fetch_xor_u32(volatile uint32_t *valuePtr, uint32_t arg)
605+
{
606+
uint32_t oldValue;
607+
core_util_critical_section_enter();
608+
oldValue = *valuePtr;
609+
*valuePtr = oldValue ^ arg;
610+
core_util_critical_section_exit();
611+
return oldValue;
612+
}
613+
423614
#endif
424615

425616
/* No architecture we support has LDREXD/STREXD, so must always disable IRQs for 64-bit operations */
@@ -484,6 +675,36 @@ uint64_t core_util_atomic_decr_u64(volatile uint64_t *valuePtr, uint64_t delta)
484675
return newValue;
485676
}
486677

678+
uint64_t core_util_atomic_fetch_and_u64(volatile uint64_t *valuePtr, uint64_t arg)
679+
{
680+
uint64_t oldValue;
681+
core_util_critical_section_enter();
682+
oldValue = *valuePtr;
683+
*valuePtr = oldValue & arg;
684+
core_util_critical_section_exit();
685+
return oldValue;
686+
}
687+
688+
uint64_t core_util_atomic_fetch_or_u64(volatile uint64_t *valuePtr, uint64_t arg)
689+
{
690+
uint64_t oldValue;
691+
core_util_critical_section_enter();
692+
oldValue = *valuePtr;
693+
*valuePtr = oldValue | arg;
694+
core_util_critical_section_exit();
695+
return oldValue;
696+
}
697+
698+
uint64_t core_util_atomic_fetch_xor_u64(volatile uint64_t *valuePtr, uint64_t arg)
699+
{
700+
uint64_t oldValue;
701+
core_util_critical_section_enter();
702+
oldValue = *valuePtr;
703+
*valuePtr = oldValue ^ arg;
704+
core_util_critical_section_exit();
705+
return oldValue;
706+
}
707+
487708
MBED_STATIC_ASSERT(sizeof(void *) == sizeof(uint32_t), "Alas, pointers must be 32-bit");
488709

489710
bool core_util_atomic_cas_ptr(void *volatile *ptr, void **expectedCurrentValue, void *desiredValue)

platform/mbed_critical.h

Lines changed: 96 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -738,6 +738,102 @@ MBED_FORCEINLINE int64_t core_util_atomic_decr_s64(volatile int64_t *valuePtr, i
738738
*/
739739
void *core_util_atomic_decr_ptr(void *volatile *valuePtr, ptrdiff_t delta);
740740

741+
/**
742+
* Atomic bitwise and.
743+
* @param valuePtr Target memory location being modified.
744+
* @param arg The argument for the bitwise operation.
745+
* @return The original value.
746+
*/
747+
uint8_t core_util_atomic_fetch_and_u8(volatile uint8_t *valuePtr, uint8_t arg);
748+
749+
/**
750+
* Atomic bitwise and.
751+
* @param valuePtr Target memory location being modified.
752+
* @param arg The argument for the bitwise operation.
753+
* @return The original value.
754+
*/
755+
uint16_t core_util_atomic_fetch_and_u16(volatile uint16_t *valuePtr, uint16_t arg);
756+
757+
/**
758+
* Atomic bitwise and.
759+
* @param valuePtr Target memory location being modified.
760+
* @param arg The argument for the bitwise operation.
761+
* @return The original value.
762+
*/
763+
uint32_t core_util_atomic_fetch_and_u32(volatile uint32_t *valuePtr, uint32_t arg);
764+
765+
/**
766+
* Atomic bitwise and.
767+
* @param valuePtr Target memory location being modified.
768+
* @param arg The argument for the bitwise operation.
769+
* @return The original value.
770+
*/
771+
uint64_t core_util_atomic_fetch_and_u64(volatile uint64_t *valuePtr, uint64_t arg);
772+
773+
/**
774+
* Atomic bitwise inclusive or.
775+
* @param valuePtr Target memory location being modified.
776+
* @param arg The argument for the bitwise operation.
777+
* @return The original value.
778+
*/
779+
uint8_t core_util_atomic_fetch_or_u8(volatile uint8_t *valuePtr, uint8_t arg);
780+
781+
/**
782+
* Atomic bitwise inclusive or.
783+
* @param valuePtr Target memory location being modified.
784+
* @param arg The argument for the bitwise operation.
785+
* @return The original value.
786+
*/
787+
uint16_t core_util_atomic_fetch_or_u16(volatile uint16_t *valuePtr, uint16_t arg);
788+
789+
/**
790+
* Atomic bitwise inclusive or.
791+
* @param valuePtr Target memory location being modified.
792+
* @param arg The argument for the bitwise operation.
793+
* @return The original value.
794+
*/
795+
uint32_t core_util_atomic_fetch_or_u32(volatile uint32_t *valuePtr, uint32_t arg);
796+
797+
/**
798+
* Atomic bitwise inclusive or.
799+
* @param valuePtr Target memory location being modified.
800+
* @param arg The argument for the bitwise operation.
801+
* @return The original value.
802+
*/
803+
uint64_t core_util_atomic_fetch_or_u64(volatile uint64_t *valuePtr, uint64_t arg);
804+
805+
/**
806+
* Atomic bitwise exclusive or.
807+
* @param valuePtr Target memory location being modified.
808+
* @param arg The argument for the bitwise operation.
809+
* @return The original value.
810+
*/
811+
uint8_t core_util_atomic_fetch_xor_u8(volatile uint8_t *valuePtr, uint8_t arg);
812+
813+
/**
814+
* Atomic bitwise exclusive or.
815+
* @param valuePtr Target memory location being modified.
816+
* @param arg The argument for the bitwise operation.
817+
* @return The original value.
818+
*/
819+
uint16_t core_util_atomic_fetch_xor_u16(volatile uint16_t *valuePtr, uint16_t arg);
820+
821+
/**
822+
* Atomic bitwise exclusive or.
823+
* @param valuePtr Target memory location being modified.
824+
* @param arg The argument for the bitwise operation.
825+
* @return The original value.
826+
*/
827+
uint32_t core_util_atomic_fetch_xor_u32(volatile uint32_t *valuePtr, uint32_t arg);
828+
829+
/**
830+
* Atomic bitwise exclusive or.
831+
* @param valuePtr Target memory location being modified.
832+
* @param arg The argument for the bitwise operation.
833+
* @return The original value.
834+
*/
835+
uint64_t core_util_atomic_fetch_xor_u64(volatile uint64_t *valuePtr, uint64_t arg);
836+
741837
#ifdef __cplusplus
742838
} // extern "C"
743839
#endif

0 commit comments

Comments
 (0)