@@ -2,7 +2,6 @@ pub use ::alloc::sync::Arc;
2
2
use core:: ops:: { Deref , DerefMut } ;
3
3
use core:: time:: Duration ;
4
4
5
- use std:: collections:: HashSet ;
6
5
use std:: cell:: RefCell ;
7
6
8
7
use std:: sync:: atomic:: { AtomicUsize , Ordering } ;
@@ -48,7 +47,7 @@ impl Condvar {
48
47
49
48
thread_local ! {
50
49
/// We track the set of locks currently held by a reference to their `LockMetadata`
51
- static LOCKS_HELD : RefCell <HashSet < Arc <LockMetadata >>> = RefCell :: new( HashSet :: new( ) ) ;
50
+ static LOCKS_HELD : RefCell <HashMap < u64 , Arc <LockMetadata >>> = RefCell :: new( HashMap :: new( ) ) ;
52
51
}
53
52
static LOCK_IDX : AtomicUsize = AtomicUsize :: new ( 0 ) ;
54
53
@@ -61,16 +60,9 @@ static LOCKS_INIT: Once = Once::new();
61
60
/// when the Mutex itself was constructed.
62
61
struct LockMetadata {
63
62
lock_idx : u64 ,
64
- locked_before : StdMutex < HashSet < LockDep > > ,
63
+ locked_before : StdMutex < HashMap < u64 , LockDep > > ,
65
64
_lock_construction_bt : Backtrace ,
66
65
}
67
- impl PartialEq for LockMetadata {
68
- fn eq ( & self , o : & LockMetadata ) -> bool { self . lock_idx == o. lock_idx }
69
- }
70
- impl Eq for LockMetadata { }
71
- impl std:: hash:: Hash for LockMetadata {
72
- fn hash < H : std:: hash:: Hasher > ( & self , hasher : & mut H ) { hasher. write_u64 ( self . lock_idx ) ; }
73
- }
74
66
75
67
struct LockDep {
76
68
lock : Arc < LockMetadata > ,
@@ -83,13 +75,6 @@ impl LockDep {
83
75
Self { lock : Arc :: clone ( lock) , lockdep_trace : None }
84
76
}
85
77
}
86
- impl PartialEq for LockDep {
87
- fn eq ( & self , o : & LockDep ) -> bool { self . lock . lock_idx == o. lock . lock_idx }
88
- }
89
- impl Eq for LockDep { }
90
- impl std:: hash:: Hash for LockDep {
91
- fn hash < H : std:: hash:: Hasher > ( & self , hasher : & mut H ) { hasher. write_u64 ( self . lock . lock_idx ) ; }
92
- }
93
78
94
79
#[ cfg( feature = "backtrace" ) ]
95
80
fn get_construction_location ( backtrace : & Backtrace ) -> String {
@@ -123,7 +108,7 @@ impl LockMetadata {
123
108
let lock_idx = LOCK_IDX . fetch_add ( 1 , Ordering :: Relaxed ) as u64 ;
124
109
125
110
let res = Arc :: new ( LockMetadata {
126
- locked_before : StdMutex :: new ( HashSet :: new ( ) ) ,
111
+ locked_before : StdMutex :: new ( HashMap :: new ( ) ) ,
127
112
lock_idx,
128
113
_lock_construction_bt : backtrace,
129
114
} ) ;
@@ -148,20 +133,20 @@ impl LockMetadata {
148
133
// For each lock which is currently locked, check that no lock's locked-before
149
134
// set includes the lock we're about to lock, which would imply a lockorder
150
135
// inversion.
151
- for locked in held. borrow ( ) . iter ( ) {
152
- if read && * locked == * this {
136
+ for ( locked_idx , _locked ) in held. borrow ( ) . iter ( ) {
137
+ if read && * locked_idx == this. lock_idx {
153
138
// Recursive read locks are explicitly allowed
154
139
return ;
155
140
}
156
141
}
157
- for locked in held. borrow ( ) . iter ( ) {
158
- if !read && * locked == * this {
142
+ for ( locked_idx , locked) in held. borrow ( ) . iter ( ) {
143
+ if !read && * locked_idx == this. lock_idx {
159
144
// With `feature = "backtrace"` set, we may be looking at different instances
160
145
// of the same lock.
161
146
debug_assert ! ( cfg!( feature = "backtrace" ) , "Tried to acquire a lock while it was held!" ) ;
162
147
}
163
- for locked_dep in locked. locked_before . lock ( ) . unwrap ( ) . iter ( ) {
164
- if locked_dep . lock == * this && locked_dep . lock != * locked {
148
+ for ( locked_dep_idx , locked_dep) in locked. locked_before . lock ( ) . unwrap ( ) . iter ( ) {
149
+ if * locked_dep_idx == this. lock_idx && * locked_dep_idx != locked. lock_idx {
165
150
#[ cfg( feature = "backtrace" ) ]
166
151
panic ! ( "Tried to violate existing lockorder.\n Mutex that should be locked after the current lock was created at the following backtrace.\n Note that to get a backtrace for the lockorder violation, you should set RUST_BACKTRACE=1\n Lock being taken constructed at: {} ({}):\n {:?}\n Lock constructed at: {} ({})\n {:?}\n \n Lock dep created at:\n {:?}\n \n " ,
167
152
get_construction_location( & this. _lock_construction_bt) , this. lock_idx, this. _lock_construction_bt,
@@ -174,12 +159,12 @@ impl LockMetadata {
174
159
// Insert any already-held locks in our locked-before set.
175
160
let mut locked_before = this. locked_before . lock ( ) . unwrap ( ) ;
176
161
let mut lockdep = LockDep :: new_without_bt ( locked) ;
177
- if !locked_before. contains ( & lockdep) {
162
+ if !locked_before. contains_key ( & lockdep. lock . lock_idx ) {
178
163
lockdep. lockdep_trace = Some ( Backtrace :: new ( ) ) ;
179
- locked_before. insert ( lockdep) ;
164
+ locked_before. insert ( lockdep. lock . lock_idx , lockdep ) ;
180
165
}
181
166
}
182
- held. borrow_mut ( ) . insert ( Arc :: clone ( this) ) ;
167
+ held. borrow_mut ( ) . insert ( this . lock_idx , Arc :: clone ( this) ) ;
183
168
inserted = true ;
184
169
} ) ;
185
170
inserted
@@ -194,14 +179,14 @@ impl LockMetadata {
194
179
// consider try-locks to ever generate lockorder inversions. However, if a try-lock
195
180
// succeeds, we do consider it to have created lockorder dependencies.
196
181
let mut locked_before = this. locked_before . lock ( ) . unwrap ( ) ;
197
- for locked in held. borrow ( ) . iter ( ) {
182
+ for ( locked_idx , locked) in held. borrow ( ) . iter ( ) {
198
183
let mut lockdep = LockDep :: new_without_bt ( locked) ;
199
- if !locked_before. contains ( & lockdep ) {
184
+ if !locked_before. contains_key ( locked_idx ) {
200
185
lockdep. lockdep_trace = Some ( Backtrace :: new ( ) ) ;
201
- locked_before. insert ( lockdep) ;
186
+ locked_before. insert ( * locked_idx , lockdep) ;
202
187
}
203
188
}
204
- held. borrow_mut ( ) . insert ( Arc :: clone ( this) ) ;
189
+ held. borrow_mut ( ) . insert ( this . lock_idx , Arc :: clone ( this) ) ;
205
190
} ) ;
206
191
}
207
192
}
@@ -231,7 +216,7 @@ impl<'a, T: Sized> MutexGuard<'a, T> {
231
216
impl < T : Sized > Drop for MutexGuard < ' _ , T > {
232
217
fn drop ( & mut self ) {
233
218
LOCKS_HELD . with ( |held| {
234
- held. borrow_mut ( ) . remove ( & self . mutex . deps ) ;
219
+ held. borrow_mut ( ) . remove ( & self . mutex . deps . lock_idx ) ;
235
220
} ) ;
236
221
}
237
222
}
@@ -302,7 +287,7 @@ impl<T: Sized> Drop for RwLockReadGuard<'_, T> {
302
287
return ;
303
288
}
304
289
LOCKS_HELD . with ( |held| {
305
- held. borrow_mut ( ) . remove ( & self . lock . deps ) ;
290
+ held. borrow_mut ( ) . remove ( & self . lock . deps . lock_idx ) ;
306
291
} ) ;
307
292
}
308
293
}
@@ -318,7 +303,7 @@ impl<T: Sized> Deref for RwLockWriteGuard<'_, T> {
318
303
impl < T : Sized > Drop for RwLockWriteGuard < ' _ , T > {
319
304
fn drop ( & mut self ) {
320
305
LOCKS_HELD . with ( |held| {
321
- held. borrow_mut ( ) . remove ( & self . lock . deps ) ;
306
+ held. borrow_mut ( ) . remove ( & self . lock . deps . lock_idx ) ;
322
307
} ) ;
323
308
}
324
309
}
0 commit comments