@@ -138,8 +138,8 @@ typedef struct {
138
138
uint8_t buffer [NUMBER_OF_BANKS ][DMA_BUFFER_SIZE ];
139
139
uint32_t usage_counter ;
140
140
uint8_t tx_data ;
141
- volatile uint8_t tx_in_progress ;
142
- volatile uint8_t rx_in_progress ;
141
+ bool tx_in_progress ;
142
+ bool rx_in_progress ;
143
143
bool tx_asynch ;
144
144
bool rx_asynch ;
145
145
bool callback_posted ;
@@ -252,7 +252,7 @@ static void nordic_nrf5_uart_callback_handler(uint32_t instance)
252
252
static void nordic_nrf5_uart_event_handler_endtx (int instance )
253
253
{
254
254
/* Release mutex. As the owner this call is safe. */
255
- nordic_nrf5_uart_state [instance ].tx_in_progress = 0 ;
255
+ core_util_atomic_store_bool ( & nordic_nrf5_uart_state [instance ].tx_in_progress , false) ;
256
256
257
257
/* Check if callback handler and Tx event mask is set. */
258
258
uart_irq_handler callback = (uart_irq_handler ) nordic_nrf5_uart_state [instance ].owner -> handler ;
@@ -275,8 +275,8 @@ static void nordic_nrf5_uart_event_handler_endtx(int instance)
275
275
static void nordic_nrf5_uart_event_handler_endtx_asynch (int instance )
276
276
{
277
277
/* Set Tx done and reset Tx mode to be not asynchronous. */
278
- nordic_nrf5_uart_state [instance ].tx_in_progress = 0 ;
279
278
nordic_nrf5_uart_state [instance ].tx_asynch = false;
279
+ core_util_atomic_store_bool (& nordic_nrf5_uart_state [instance ].tx_in_progress , false);
280
280
281
281
/* Cast handler to callback function pointer. */
282
282
void (* callback )(void ) = (void (* )(void )) nordic_nrf5_uart_state [instance ].owner -> tx_handler ;
@@ -482,8 +482,8 @@ static void nordic_nrf5_uart_event_handler_rxstarted(int instance)
482
482
static void nordic_nrf5_uart_event_handler_endrx_asynch (int instance )
483
483
{
484
484
/* Set Rx done and reset Rx mode to be not asynchronous. */
485
- nordic_nrf5_uart_state [instance ].rx_in_progress = 0 ;
486
485
nordic_nrf5_uart_state [instance ].rx_asynch = false;
486
+ core_util_atomic_store_bool (& nordic_nrf5_uart_state [instance ].rx_in_progress , false);
487
487
488
488
/* Cast handler to callback function pointer. */
489
489
void (* callback )(void ) = (void (* )(void )) nordic_nrf5_uart_state [instance ].owner -> rx_handler ;
@@ -1410,7 +1410,7 @@ int serial_writable(serial_t *obj)
1410
1410
1411
1411
int instance = uart_object -> instance ;
1412
1412
1413
- return (( nordic_nrf5_uart_state [instance ].tx_in_progress == 0 ) &&
1413
+ return (! core_util_atomic_load_bool ( & nordic_nrf5_uart_state [instance ].tx_in_progress ) &&
1414
1414
(nrf_uarte_event_extra_check (nordic_nrf5_uart_register [instance ], NRF_UARTE_EVENT_TXDRDY )));
1415
1415
}
1416
1416
@@ -1449,16 +1449,14 @@ int serial_tx_asynch(serial_t *obj, const void *tx, size_t tx_length, uint8_t tx
1449
1449
1450
1450
/**
1451
1451
* tx_in_progress acts like a mutex to ensure only one transmission can be active at a time.
1452
- * The flag is modified using the atomic compare-and-set function.
1452
+ * The flag is modified using the atomic exchange function - only proceed when we see the
1453
+ * flag clear and we set it to true.
1453
1454
*/
1454
- bool mutex = false ;
1455
+ bool old_mutex ;
1455
1456
1456
1457
do {
1457
- uint8_t expected = 0 ;
1458
- uint8_t desired = 1 ;
1459
-
1460
- mutex = core_util_atomic_cas_u8 ((uint8_t * ) & nordic_nrf5_uart_state [instance ].tx_in_progress , & expected , desired );
1461
- } while (mutex == false);
1458
+ old_mutex = core_util_atomic_exchange_bool (& nordic_nrf5_uart_state [instance ].tx_in_progress , true);
1459
+ } while (old_mutex == true);
1462
1460
1463
1461
/* State variables. */
1464
1462
int result = 0 ;
@@ -1575,16 +1573,14 @@ void serial_rx_asynch(serial_t *obj, void *rx, size_t rx_length, uint8_t rx_widt
1575
1573
1576
1574
/**
1577
1575
* rx_in_progress acts like a mutex to ensure only one asynchronous reception can be active at a time.
1578
- * The flag is modified using the atomic compare-and-set function.
1576
+ * The flag is modified using the atomic exchange function - only proceed when we see the
1577
+ * flag clear and we set it to true.
1579
1578
*/
1580
- bool mutex = false ;
1579
+ bool old_mutex ;
1581
1580
1582
1581
do {
1583
- uint8_t expected = 0 ;
1584
- uint8_t desired = 1 ;
1585
-
1586
- mutex = core_util_atomic_cas_u8 ((uint8_t * ) & nordic_nrf5_uart_state [instance ].rx_in_progress , & expected , desired );
1587
- } while (mutex == false);
1582
+ old_mutex = core_util_atomic_exchange_bool (& nordic_nrf5_uart_state [instance ].rx_in_progress , true);
1583
+ } while (old_mutex == true);
1588
1584
1589
1585
/* Store callback handler, mask and reset event value. */
1590
1586
obj -> serial .rx_handler = handler ;
@@ -1663,8 +1659,8 @@ void serial_tx_abort_asynch(serial_t *obj)
1663
1659
nrf_uarte_event_clear (nordic_nrf5_uart_register [instance ], NRF_UARTE_EVENT_ENDTX );
1664
1660
1665
1661
/* Reset Tx flags. */
1666
- nordic_nrf5_uart_state [instance ].tx_in_progress = 0 ;
1667
1662
nordic_nrf5_uart_state [instance ].tx_asynch = false;
1663
+ nordic_nrf5_uart_state [instance ].tx_in_progress = false;
1668
1664
1669
1665
/* Force reconfiguration. */
1670
1666
obj -> serial .update = true;
@@ -1691,8 +1687,8 @@ void serial_rx_abort_asynch(serial_t *obj)
1691
1687
core_util_critical_section_enter ();
1692
1688
1693
1689
/* Reset Rx flags. */
1694
- nordic_nrf5_uart_state [obj -> serial .instance ].rx_in_progress = 0 ;
1695
1690
nordic_nrf5_uart_state [obj -> serial .instance ].rx_asynch = false;
1691
+ nordic_nrf5_uart_state [obj -> serial .instance ].rx_in_progress = false;
1696
1692
obj -> serial .rx_asynch = false;
1697
1693
1698
1694
/* Force reconfiguration. */
0 commit comments