@@ -43,43 +43,43 @@ impl Condvar {
43
43
}
44
44
45
45
thread_local ! {
46
- /// We track the set of locks currently held by a reference to their `MutexMetadata `
47
- static MUTEXES_HELD : RefCell <HashSet <Arc <MutexMetadata >>> = RefCell :: new( HashSet :: new( ) ) ;
46
+ /// We track the set of locks currently held by a reference to their `LockMetadata `
47
+ static LOCKS_HELD : RefCell <HashSet <Arc <LockMetadata >>> = RefCell :: new( HashSet :: new( ) ) ;
48
48
}
49
- static MUTEX_IDX : AtomicUsize = AtomicUsize :: new ( 0 ) ;
49
+ static LOCK_IDX : AtomicUsize = AtomicUsize :: new ( 0 ) ;
50
50
51
- /// Metadata about a single mutex , by id, the set of things locked-before it, and the backtrace of
51
+ /// Metadata about a single lock , by id, the set of things locked-before it, and the backtrace of
52
52
/// when the Mutex itself was constructed.
53
- struct MutexMetadata {
54
- mutex_idx : u64 ,
55
- locked_before : StdMutex < HashSet < Arc < MutexMetadata > > > ,
53
+ struct LockMetadata {
54
+ lock_idx : u64 ,
55
+ locked_before : StdMutex < HashSet < Arc < LockMetadata > > > ,
56
56
#[ cfg( feature = "backtrace" ) ]
57
- mutex_construction_bt : Backtrace ,
57
+ lock_construction_bt : Backtrace ,
58
58
}
59
- impl PartialEq for MutexMetadata {
60
- fn eq ( & self , o : & MutexMetadata ) -> bool { self . mutex_idx == o. mutex_idx }
59
+ impl PartialEq for LockMetadata {
60
+ fn eq ( & self , o : & LockMetadata ) -> bool { self . lock_idx == o. lock_idx }
61
61
}
62
- impl Eq for MutexMetadata { }
63
- impl std:: hash:: Hash for MutexMetadata {
64
- fn hash < H : std:: hash:: Hasher > ( & self , hasher : & mut H ) { hasher. write_u64 ( self . mutex_idx ) ; }
62
+ impl Eq for LockMetadata { }
63
+ impl std:: hash:: Hash for LockMetadata {
64
+ fn hash < H : std:: hash:: Hasher > ( & self , hasher : & mut H ) { hasher. write_u64 ( self . lock_idx ) ; }
65
65
}
66
66
67
- impl MutexMetadata {
68
- fn new ( ) -> MutexMetadata {
69
- MutexMetadata {
67
+ impl LockMetadata {
68
+ fn new ( ) -> LockMetadata {
69
+ LockMetadata {
70
70
locked_before : StdMutex :: new ( HashSet :: new ( ) ) ,
71
- mutex_idx : MUTEX_IDX . fetch_add ( 1 , Ordering :: Relaxed ) as u64 ,
71
+ lock_idx : LOCK_IDX . fetch_add ( 1 , Ordering :: Relaxed ) as u64 ,
72
72
#[ cfg( feature = "backtrace" ) ]
73
- mutex_construction_bt : Backtrace :: new ( ) ,
73
+ lock_construction_bt : Backtrace :: new ( ) ,
74
74
}
75
75
}
76
76
77
77
// Returns whether we were a recursive lock (only relevant for read)
78
- fn _pre_lock ( this : & Arc < MutexMetadata > , read : bool ) -> bool {
78
+ fn _pre_lock ( this : & Arc < LockMetadata > , read : bool ) -> bool {
79
79
let mut inserted = false ;
80
- MUTEXES_HELD . with ( |held| {
81
- // For each mutex which is currently locked, check that no mutex 's locked-before
82
- // set includes the mutex we're about to lock, which would imply a lockorder
80
+ LOCKS_HELD . with ( |held| {
81
+ // For each lock which is currently locked, check that no lock 's locked-before
82
+ // set includes the lock we're about to lock, which would imply a lockorder
83
83
// inversion.
84
84
for locked in held. borrow ( ) . iter ( ) {
85
85
if read && * locked == * this {
@@ -89,17 +89,17 @@ impl MutexMetadata {
89
89
}
90
90
for locked in held. borrow ( ) . iter ( ) {
91
91
if !read && * locked == * this {
92
- panic ! ( "Tried to lock a mutex while it was held!" ) ;
92
+ panic ! ( "Tried to lock a lock while it was held!" ) ;
93
93
}
94
94
for locked_dep in locked. locked_before . lock ( ) . unwrap ( ) . iter ( ) {
95
95
if * locked_dep == * this {
96
96
#[ cfg( feature = "backtrace" ) ]
97
- panic ! ( "Tried to violate existing lockorder.\n Mutex that should be locked after the current lock was created at the following backtrace.\n Note that to get a backtrace for the lockorder violation, you should set RUST_BACKTRACE=1\n {:?}" , locked. mutex_construction_bt ) ;
97
+ panic ! ( "Tried to violate existing lockorder.\n Mutex that should be locked after the current lock was created at the following backtrace.\n Note that to get a backtrace for the lockorder violation, you should set RUST_BACKTRACE=1\n {:?}" , locked. lock_construction_bt ) ;
98
98
#[ cfg( not( feature = "backtrace" ) ) ]
99
99
panic ! ( "Tried to violate existing lockorder. Build with the backtrace feature for more info." ) ;
100
100
}
101
101
}
102
- // Insert any already-held mutexes in our locked-before set.
102
+ // Insert any already-held locks in our locked-before set.
103
103
this. locked_before . lock ( ) . unwrap ( ) . insert ( Arc :: clone ( locked) ) ;
104
104
}
105
105
held. borrow_mut ( ) . insert ( Arc :: clone ( this) ) ;
@@ -108,11 +108,11 @@ impl MutexMetadata {
108
108
inserted
109
109
}
110
110
111
- fn pre_lock ( this : & Arc < MutexMetadata > ) { Self :: _pre_lock ( this, false ) ; }
112
- fn pre_read_lock ( this : & Arc < MutexMetadata > ) -> bool { Self :: _pre_lock ( this, true ) }
111
+ fn pre_lock ( this : & Arc < LockMetadata > ) { Self :: _pre_lock ( this, false ) ; }
112
+ fn pre_read_lock ( this : & Arc < LockMetadata > ) -> bool { Self :: _pre_lock ( this, true ) }
113
113
114
- fn try_locked ( this : & Arc < MutexMetadata > ) {
115
- MUTEXES_HELD . with ( |held| {
114
+ fn try_locked ( this : & Arc < LockMetadata > ) {
115
+ LOCKS_HELD . with ( |held| {
116
116
// Since a try-lock will simply fail if the lock is held already, we do not
117
117
// consider try-locks to ever generate lockorder inversions. However, if a try-lock
118
118
// succeeds, we do consider it to have created lockorder dependencies.
@@ -126,7 +126,7 @@ impl MutexMetadata {
126
126
127
127
pub struct Mutex < T : Sized > {
128
128
inner : StdMutex < T > ,
129
- deps : Arc < MutexMetadata > ,
129
+ deps : Arc < LockMetadata > ,
130
130
}
131
131
132
132
#[ must_use = "if unused the Mutex will immediately unlock" ]
@@ -148,7 +148,7 @@ impl<'a, T: Sized> MutexGuard<'a, T> {
148
148
149
149
impl < T : Sized > Drop for MutexGuard < ' _ , T > {
150
150
fn drop ( & mut self ) {
151
- MUTEXES_HELD . with ( |held| {
151
+ LOCKS_HELD . with ( |held| {
152
152
held. borrow_mut ( ) . remove ( & self . mutex . deps ) ;
153
153
} ) ;
154
154
}
@@ -170,44 +170,44 @@ impl<T: Sized> DerefMut for MutexGuard<'_, T> {
170
170
171
171
impl < T > Mutex < T > {
172
172
pub fn new ( inner : T ) -> Mutex < T > {
173
- Mutex { inner : StdMutex :: new ( inner) , deps : Arc :: new ( MutexMetadata :: new ( ) ) }
173
+ Mutex { inner : StdMutex :: new ( inner) , deps : Arc :: new ( LockMetadata :: new ( ) ) }
174
174
}
175
175
176
176
pub fn lock < ' a > ( & ' a self ) -> LockResult < MutexGuard < ' a , T > > {
177
- MutexMetadata :: pre_lock ( & self . deps ) ;
177
+ LockMetadata :: pre_lock ( & self . deps ) ;
178
178
self . inner . lock ( ) . map ( |lock| MutexGuard { mutex : self , lock } ) . map_err ( |_| ( ) )
179
179
}
180
180
181
181
pub fn try_lock < ' a > ( & ' a self ) -> LockResult < MutexGuard < ' a , T > > {
182
182
let res = self . inner . try_lock ( ) . map ( |lock| MutexGuard { mutex : self , lock } ) . map_err ( |_| ( ) ) ;
183
183
if res. is_ok ( ) {
184
- MutexMetadata :: try_locked ( & self . deps ) ;
184
+ LockMetadata :: try_locked ( & self . deps ) ;
185
185
}
186
186
res
187
187
}
188
188
}
189
189
190
190
pub struct RwLock < T : Sized > {
191
191
inner : StdRwLock < T > ,
192
- deps : Arc < MutexMetadata > ,
192
+ deps : Arc < LockMetadata > ,
193
193
}
194
194
195
195
pub struct RwLockReadGuard < ' a , T : Sized + ' a > {
196
- mutex : & ' a RwLock < T > ,
196
+ lock : & ' a RwLock < T > ,
197
197
first_lock : bool ,
198
- lock : StdRwLockReadGuard < ' a , T > ,
198
+ guard : StdRwLockReadGuard < ' a , T > ,
199
199
}
200
200
201
201
pub struct RwLockWriteGuard < ' a , T : Sized + ' a > {
202
- mutex : & ' a RwLock < T > ,
203
- lock : StdRwLockWriteGuard < ' a , T > ,
202
+ lock : & ' a RwLock < T > ,
203
+ guard : StdRwLockWriteGuard < ' a , T > ,
204
204
}
205
205
206
206
impl < T : Sized > Deref for RwLockReadGuard < ' _ , T > {
207
207
type Target = T ;
208
208
209
209
fn deref ( & self ) -> & T {
210
- & self . lock . deref ( )
210
+ & self . guard . deref ( )
211
211
}
212
212
}
213
213
@@ -219,8 +219,8 @@ impl<T: Sized> Drop for RwLockReadGuard<'_, T> {
219
219
// always be true.
220
220
return ;
221
221
}
222
- MUTEXES_HELD . with ( |held| {
223
- held. borrow_mut ( ) . remove ( & self . mutex . deps ) ;
222
+ LOCKS_HELD . with ( |held| {
223
+ held. borrow_mut ( ) . remove ( & self . lock . deps ) ;
224
224
} ) ;
225
225
}
226
226
}
@@ -229,43 +229,43 @@ impl<T: Sized> Deref for RwLockWriteGuard<'_, T> {
229
229
type Target = T ;
230
230
231
231
fn deref ( & self ) -> & T {
232
- & self . lock . deref ( )
232
+ & self . guard . deref ( )
233
233
}
234
234
}
235
235
236
236
impl < T : Sized > Drop for RwLockWriteGuard < ' _ , T > {
237
237
fn drop ( & mut self ) {
238
- MUTEXES_HELD . with ( |held| {
239
- held. borrow_mut ( ) . remove ( & self . mutex . deps ) ;
238
+ LOCKS_HELD . with ( |held| {
239
+ held. borrow_mut ( ) . remove ( & self . lock . deps ) ;
240
240
} ) ;
241
241
}
242
242
}
243
243
244
244
impl < T : Sized > DerefMut for RwLockWriteGuard < ' _ , T > {
245
245
fn deref_mut ( & mut self ) -> & mut T {
246
- self . lock . deref_mut ( )
246
+ self . guard . deref_mut ( )
247
247
}
248
248
}
249
249
250
250
impl < T > RwLock < T > {
251
251
pub fn new ( inner : T ) -> RwLock < T > {
252
- RwLock { inner : StdRwLock :: new ( inner) , deps : Arc :: new ( MutexMetadata :: new ( ) ) }
252
+ RwLock { inner : StdRwLock :: new ( inner) , deps : Arc :: new ( LockMetadata :: new ( ) ) }
253
253
}
254
254
255
255
pub fn read < ' a > ( & ' a self ) -> LockResult < RwLockReadGuard < ' a , T > > {
256
- let first_lock = MutexMetadata :: pre_read_lock ( & self . deps ) ;
257
- self . inner . read ( ) . map ( |lock | RwLockReadGuard { mutex : self , lock , first_lock } ) . map_err ( |_| ( ) )
256
+ let first_lock = LockMetadata :: pre_read_lock ( & self . deps ) ;
257
+ self . inner . read ( ) . map ( |guard | RwLockReadGuard { lock : self , guard , first_lock } ) . map_err ( |_| ( ) )
258
258
}
259
259
260
260
pub fn write < ' a > ( & ' a self ) -> LockResult < RwLockWriteGuard < ' a , T > > {
261
- MutexMetadata :: pre_lock ( & self . deps ) ;
262
- self . inner . write ( ) . map ( |lock | RwLockWriteGuard { mutex : self , lock } ) . map_err ( |_| ( ) )
261
+ LockMetadata :: pre_lock ( & self . deps ) ;
262
+ self . inner . write ( ) . map ( |guard | RwLockWriteGuard { lock : self , guard } ) . map_err ( |_| ( ) )
263
263
}
264
264
265
265
pub fn try_write < ' a > ( & ' a self ) -> LockResult < RwLockWriteGuard < ' a , T > > {
266
- let res = self . inner . try_write ( ) . map ( |lock | RwLockWriteGuard { mutex : self , lock } ) . map_err ( |_| ( ) ) ;
266
+ let res = self . inner . try_write ( ) . map ( |guard | RwLockWriteGuard { lock : self , guard } ) . map_err ( |_| ( ) ) ;
267
267
if res. is_ok ( ) {
268
- MutexMetadata :: try_locked ( & self . deps ) ;
268
+ LockMetadata :: try_locked ( & self . deps ) ;
269
269
}
270
270
res
271
271
}
0 commit comments