|
30 | 30 | //! detect recursive locks.
|
31 | 31 |
|
32 | 32 | use cell::UnsafeCell;
|
33 |
| -use mem; |
| 33 | +use mem::{self, MaybeUninit}; |
34 | 34 | use sync::atomic::{AtomicUsize, Ordering};
|
35 | 35 | use sys::c;
|
36 | 36 | use sys::compat;
|
@@ -157,34 +157,34 @@ fn kind() -> Kind {
|
157 | 157 | return ret;
|
158 | 158 | }
|
159 | 159 |
|
160 |
| -pub struct ReentrantMutex { inner: UnsafeCell<c::CRITICAL_SECTION> } |
| 160 | +pub struct ReentrantMutex { inner: MaybeUninit<UnsafeCell<c::CRITICAL_SECTION>> } |
161 | 161 |
|
162 | 162 | unsafe impl Send for ReentrantMutex {}
|
163 | 163 | unsafe impl Sync for ReentrantMutex {}
|
164 | 164 |
|
165 | 165 | impl ReentrantMutex {
|
166 |
| - pub unsafe fn uninitialized() -> ReentrantMutex { |
167 |
| - mem::uninitialized() |
| 166 | + pub fn uninitialized() -> ReentrantMutex { |
| 167 | + MaybeUninit::uninitialized() |
168 | 168 | }
|
169 | 169 |
|
170 | 170 | pub unsafe fn init(&mut self) {
|
171 |
| - c::InitializeCriticalSection(self.inner.get()); |
| 171 | + c::InitializeCriticalSection(self.inner.get_ref().get()); |
172 | 172 | }
|
173 | 173 |
|
174 | 174 | pub unsafe fn lock(&self) {
|
175 |
| - c::EnterCriticalSection(self.inner.get()); |
| 175 | + c::EnterCriticalSection(self.inner.get_ref().get()); |
176 | 176 | }
|
177 | 177 |
|
178 | 178 | #[inline]
|
179 | 179 | pub unsafe fn try_lock(&self) -> bool {
|
180 |
| - c::TryEnterCriticalSection(self.inner.get()) != 0 |
| 180 | + c::TryEnterCriticalSection(self.inner.get_ref().get()) != 0 |
181 | 181 | }
|
182 | 182 |
|
183 | 183 | pub unsafe fn unlock(&self) {
|
184 |
| - c::LeaveCriticalSection(self.inner.get()); |
| 184 | + c::LeaveCriticalSection(self.inner.get_ref().get()); |
185 | 185 | }
|
186 | 186 |
|
187 | 187 | pub unsafe fn destroy(&self) {
|
188 |
| - c::DeleteCriticalSection(self.inner.get()); |
| 188 | + c::DeleteCriticalSection(self.inner.get_ref().get()); |
189 | 189 | }
|
190 | 190 | }
|
0 commit comments