@@ -138,8 +138,8 @@ typedef struct {
138
138
uint8_t buffer [NUMBER_OF_BANKS ][DMA_BUFFER_SIZE ];
139
139
uint32_t usage_counter ;
140
140
uint8_t tx_data ;
141
- bool tx_in_progress ;
142
- bool rx_in_progress ;
141
+ volatile uint8_t tx_in_progress ;
142
+ volatile uint8_t rx_in_progress ;
143
143
bool tx_asynch ;
144
144
bool rx_asynch ;
145
145
bool callback_posted ;
@@ -252,7 +252,7 @@ static void nordic_nrf5_uart_callback_handler(uint32_t instance)
252
252
static void nordic_nrf5_uart_event_handler_endtx (int instance )
253
253
{
254
254
/* Release mutex. As the owner this call is safe. */
255
- core_util_atomic_store_bool ( & nordic_nrf5_uart_state [instance ].tx_in_progress , false) ;
255
+ nordic_nrf5_uart_state [instance ].tx_in_progress = 0 ;
256
256
257
257
/* Check if callback handler and Tx event mask is set. */
258
258
uart_irq_handler callback = (uart_irq_handler ) nordic_nrf5_uart_state [instance ].owner -> handler ;
@@ -275,8 +275,8 @@ static void nordic_nrf5_uart_event_handler_endtx(int instance)
275
275
static void nordic_nrf5_uart_event_handler_endtx_asynch (int instance )
276
276
{
277
277
/* Set Tx done and reset Tx mode to be not asynchronous. */
278
+ nordic_nrf5_uart_state [instance ].tx_in_progress = 0 ;
278
279
nordic_nrf5_uart_state [instance ].tx_asynch = false;
279
- core_util_atomic_store_bool (& nordic_nrf5_uart_state [instance ].tx_in_progress , false);
280
280
281
281
/* Cast handler to callback function pointer. */
282
282
void (* callback )(void ) = (void (* )(void )) nordic_nrf5_uart_state [instance ].owner -> tx_handler ;
@@ -482,8 +482,8 @@ static void nordic_nrf5_uart_event_handler_rxstarted(int instance)
482
482
static void nordic_nrf5_uart_event_handler_endrx_asynch (int instance )
483
483
{
484
484
/* Set Rx done and reset Rx mode to be not asynchronous. */
485
+ nordic_nrf5_uart_state [instance ].rx_in_progress = 0 ;
485
486
nordic_nrf5_uart_state [instance ].rx_asynch = false;
486
- core_util_atomic_store_bool (& nordic_nrf5_uart_state [instance ].rx_in_progress , false);
487
487
488
488
/* Cast handler to callback function pointer. */
489
489
void (* callback )(void ) = (void (* )(void )) nordic_nrf5_uart_state [instance ].owner -> rx_handler ;
@@ -1410,7 +1410,7 @@ int serial_writable(serial_t *obj)
1410
1410
1411
1411
int instance = uart_object -> instance ;
1412
1412
1413
- return (! core_util_atomic_load_bool ( & nordic_nrf5_uart_state [instance ].tx_in_progress ) &&
1413
+ return (( nordic_nrf5_uart_state [instance ].tx_in_progress == 0 ) &&
1414
1414
(nrf_uarte_event_extra_check (nordic_nrf5_uart_register [instance ], NRF_UARTE_EVENT_TXDRDY )));
1415
1415
}
1416
1416
@@ -1449,14 +1449,16 @@ int serial_tx_asynch(serial_t *obj, const void *tx, size_t tx_length, uint8_t tx
1449
1449
1450
1450
/**
1451
1451
* tx_in_progress acts like a mutex to ensure only one transmission can be active at a time.
1452
- * The flag is modified using the atomic exchange function - only proceed when we see the
1453
- * flag clear and we set it to true.
1452
+ * The flag is modified using the atomic compare-and-set function.
1454
1453
*/
1455
- bool old_mutex ;
1454
+ bool mutex = false ;
1456
1455
1457
1456
do {
1458
- old_mutex = core_util_atomic_exchange_bool (& nordic_nrf5_uart_state [instance ].tx_in_progress , true);
1459
- } while (old_mutex == true);
1457
+ uint8_t expected = 0 ;
1458
+ uint8_t desired = 1 ;
1459
+
1460
+ mutex = core_util_atomic_cas_u8 ((uint8_t * ) & nordic_nrf5_uart_state [instance ].tx_in_progress , & expected , desired );
1461
+ } while (mutex == false);
1460
1462
1461
1463
/* State variables. */
1462
1464
int result = 0 ;
@@ -1573,14 +1575,16 @@ void serial_rx_asynch(serial_t *obj, void *rx, size_t rx_length, uint8_t rx_widt
1573
1575
1574
1576
/**
1575
1577
* rx_in_progress acts like a mutex to ensure only one asynchronous reception can be active at a time.
1576
- * The flag is modified using the atomic exchange function - only proceed when we see the
1577
- * flag clear and we set it to true.
1578
+ * The flag is modified using the atomic compare-and-set function.
1578
1579
*/
1579
- bool old_mutex ;
1580
+ bool mutex = false ;
1580
1581
1581
1582
do {
1582
- old_mutex = core_util_atomic_exchange_bool (& nordic_nrf5_uart_state [instance ].rx_in_progress , true);
1583
- } while (old_mutex == true);
1583
+ uint8_t expected = 0 ;
1584
+ uint8_t desired = 1 ;
1585
+
1586
+ mutex = core_util_atomic_cas_u8 ((uint8_t * ) & nordic_nrf5_uart_state [instance ].rx_in_progress , & expected , desired );
1587
+ } while (mutex == false);
1584
1588
1585
1589
/* Store callback handler, mask and reset event value. */
1586
1590
obj -> serial .rx_handler = handler ;
@@ -1659,8 +1663,8 @@ void serial_tx_abort_asynch(serial_t *obj)
1659
1663
nrf_uarte_event_clear (nordic_nrf5_uart_register [instance ], NRF_UARTE_EVENT_ENDTX );
1660
1664
1661
1665
/* Reset Tx flags. */
1666
+ nordic_nrf5_uart_state [instance ].tx_in_progress = 0 ;
1662
1667
nordic_nrf5_uart_state [instance ].tx_asynch = false;
1663
- nordic_nrf5_uart_state [instance ].tx_in_progress = false;
1664
1668
1665
1669
/* Force reconfiguration. */
1666
1670
obj -> serial .update = true;
@@ -1687,8 +1691,8 @@ void serial_rx_abort_asynch(serial_t *obj)
1687
1691
core_util_critical_section_enter ();
1688
1692
1689
1693
/* Reset Rx flags. */
1694
+ nordic_nrf5_uart_state [obj -> serial .instance ].rx_in_progress = 0 ;
1690
1695
nordic_nrf5_uart_state [obj -> serial .instance ].rx_asynch = false;
1691
- nordic_nrf5_uart_state [obj -> serial .instance ].rx_in_progress = false;
1692
1696
obj -> serial .rx_asynch = false;
1693
1697
1694
1698
/* Force reconfiguration. */
0 commit comments