@@ -186,27 +186,27 @@ struct AtomicBaseRaw {
186
186
T val;
187
187
} ret;
188
188
{
189
- CriticalSectionLock lock;
189
+ mbed:: CriticalSectionLock lock;
190
190
memcpy(std::addressof(ret.val), const_cast<const T *>(std::addressof(data)), sizeof(T));
191
191
}
192
192
return std::move(ret.val);
193
193
}
194
194
T load(memory_order order = memory_order_seq_cst) const noexcept
195
195
{
196
196
MBED_CHECK_LOAD_ORDER(order);
197
- CriticalSectionLock lock;
197
+ mbed:: CriticalSectionLock lock;
198
198
return data;
199
199
}
200
200
void store(T desired, memory_order order = memory_order_seq_cst) volatile noexcept
201
201
{
202
202
MBED_CHECK_STORE_ORDER(order);
203
- CriticalSectionLock lock;
203
+ mbed:: CriticalSectionLock lock;
204
204
memcpy(const_cast<T *>(std::addressof(data)), std::addressof(desired), sizeof(T));
205
205
}
206
206
void store(T desired, memory_order order = memory_order_seq_cst) noexcept
207
207
{
208
208
MBED_CHECK_STORE_ORDER(order);
209
- CriticalSectionLock lock;
209
+ mbed:: CriticalSectionLock lock;
210
210
data = std::move(desired); // MoveAssignable
211
211
}
212
212
T exchange(T desired, memory_order = memory_order_seq_cst) volatile noexcept
@@ -217,23 +217,23 @@ struct AtomicBaseRaw {
217
217
T val;
218
218
} old;
219
219
{
220
- CriticalSectionLock lock;
220
+ mbed:: CriticalSectionLock lock;
221
221
memcpy(std::addressof(old.val), const_cast<const T *>(std::addressof(data)), sizeof(T));
222
222
memcpy(const_cast<T *>(std::addressof(data)), std::addressof(desired), sizeof(T));
223
223
}
224
224
return old.val;
225
225
}
226
226
T exchange(T desired, memory_order = memory_order_seq_cst) noexcept
227
227
{
228
- CriticalSectionLock lock;
228
+ mbed:: CriticalSectionLock lock;
229
229
T old = std::move(data); // MoveConstructible
230
230
data = std::move(desired); // MoveAssignable
231
231
return old;
232
232
}
233
233
bool compare_exchange_strong(T &expected, T desired, memory_order success, memory_order failure) volatile noexcept
234
234
{
235
235
MBED_CHECK_CAS_ORDER(success, failure);
236
- CriticalSectionLock lock;
236
+ mbed:: CriticalSectionLock lock;
237
237
if (memcmp(const_cast<const T *>(std::addressof(data)), std::addressof(expected), sizeof(T)) == 0) {
238
238
memcpy(const_cast<T *>(std::addressof(data)), std::addressof(desired), sizeof(T));
239
239
return true;
@@ -245,7 +245,7 @@ struct AtomicBaseRaw {
245
245
bool compare_exchange_strong(T &expected, T desired, memory_order success, memory_order failure) noexcept
246
246
{
247
247
MBED_CHECK_CAS_ORDER(success, failure);
248
- CriticalSectionLock lock;
248
+ mbed:: CriticalSectionLock lock;
249
249
if (memcmp(std::addressof(data), std::addressof(expected), sizeof(T)) == 0) {
250
250
data = std::move(desired); // MoveAssignable
251
251
return true;
0 commit comments