@@ -2,7 +2,6 @@ pub use ::alloc::sync::Arc;
2
2
use core:: ops:: { Deref , DerefMut } ;
3
3
use core:: time:: Duration ;
4
4
5
- use std:: collections:: HashSet ;
6
5
use std:: cell:: RefCell ;
7
6
8
7
use std:: sync:: atomic:: { AtomicUsize , Ordering } ;
@@ -13,8 +12,10 @@ use std::sync::RwLockReadGuard as StdRwLockReadGuard;
13
12
use std:: sync:: RwLockWriteGuard as StdRwLockWriteGuard ;
14
13
use std:: sync:: Condvar as StdCondvar ;
15
14
15
+ use prelude:: HashMap ;
16
+
16
17
#[ cfg( feature = "backtrace" ) ]
17
- use { prelude:: { HashMap , hash_map} , backtrace:: Backtrace , std:: sync:: Once } ;
18
+ use { prelude:: hash_map, backtrace:: Backtrace , std:: sync:: Once } ;
18
19
19
20
#[ cfg( not( feature = "backtrace" ) ) ]
20
21
struct Backtrace { }
@@ -48,7 +49,7 @@ impl Condvar {
48
49
49
50
thread_local ! {
50
51
/// We track the set of locks currently held by a reference to their `LockMetadata`
51
- static LOCKS_HELD : RefCell <HashSet < Arc <LockMetadata >>> = RefCell :: new( HashSet :: new( ) ) ;
52
+ static LOCKS_HELD : RefCell <HashMap < u64 , Arc <LockMetadata >>> = RefCell :: new( HashMap :: new( ) ) ;
52
53
}
53
54
static LOCK_IDX : AtomicUsize = AtomicUsize :: new ( 0 ) ;
54
55
@@ -61,16 +62,9 @@ static LOCKS_INIT: Once = Once::new();
61
62
/// when the Mutex itself was constructed.
62
63
struct LockMetadata {
63
64
lock_idx : u64 ,
64
- locked_before : StdMutex < HashSet < LockDep > > ,
65
+ locked_before : StdMutex < HashMap < u64 , LockDep > > ,
65
66
_lock_construction_bt : Backtrace ,
66
67
}
67
- impl PartialEq for LockMetadata {
68
- fn eq ( & self , o : & LockMetadata ) -> bool { self . lock_idx == o. lock_idx }
69
- }
70
- impl Eq for LockMetadata { }
71
- impl std:: hash:: Hash for LockMetadata {
72
- fn hash < H : std:: hash:: Hasher > ( & self , hasher : & mut H ) { hasher. write_u64 ( self . lock_idx ) ; }
73
- }
74
68
75
69
struct LockDep {
76
70
lock : Arc < LockMetadata > ,
@@ -83,13 +77,6 @@ impl LockDep {
83
77
Self { lock : Arc :: clone ( lock) , lockdep_trace : None }
84
78
}
85
79
}
86
- impl PartialEq for LockDep {
87
- fn eq ( & self , o : & LockDep ) -> bool { self . lock . lock_idx == o. lock . lock_idx }
88
- }
89
- impl Eq for LockDep { }
90
- impl std:: hash:: Hash for LockDep {
91
- fn hash < H : std:: hash:: Hasher > ( & self , hasher : & mut H ) { hasher. write_u64 ( self . lock . lock_idx ) ; }
92
- }
93
80
94
81
#[ cfg( feature = "backtrace" ) ]
95
82
fn get_construction_location ( backtrace : & Backtrace ) -> String {
@@ -123,7 +110,7 @@ impl LockMetadata {
123
110
let lock_idx = LOCK_IDX . fetch_add ( 1 , Ordering :: Relaxed ) as u64 ;
124
111
125
112
let res = Arc :: new ( LockMetadata {
126
- locked_before : StdMutex :: new ( HashSet :: new ( ) ) ,
113
+ locked_before : StdMutex :: new ( HashMap :: new ( ) ) ,
127
114
lock_idx,
128
115
_lock_construction_bt : backtrace,
129
116
} ) ;
@@ -148,20 +135,20 @@ impl LockMetadata {
148
135
// For each lock which is currently locked, check that no lock's locked-before
149
136
// set includes the lock we're about to lock, which would imply a lockorder
150
137
// inversion.
151
- for locked in held. borrow ( ) . iter ( ) {
152
- if read && * locked == * this {
138
+ for ( locked_idx , _locked ) in held. borrow ( ) . iter ( ) {
139
+ if read && * locked_idx == this. lock_idx {
153
140
// Recursive read locks are explicitly allowed
154
141
return ;
155
142
}
156
143
}
157
- for locked in held. borrow ( ) . iter ( ) {
158
- if !read && * locked == * this {
144
+ for ( locked_idx , locked) in held. borrow ( ) . iter ( ) {
145
+ if !read && * locked_idx == this. lock_idx {
159
146
// With `feature = "backtrace"` set, we may be looking at different instances
160
147
// of the same lock.
161
148
debug_assert ! ( cfg!( feature = "backtrace" ) , "Tried to acquire a lock while it was held!" ) ;
162
149
}
163
- for locked_dep in locked. locked_before . lock ( ) . unwrap ( ) . iter ( ) {
164
- if locked_dep . lock == * this && locked_dep . lock != * locked {
150
+ for ( locked_dep_idx , locked_dep) in locked. locked_before . lock ( ) . unwrap ( ) . iter ( ) {
151
+ if * locked_dep_idx == this. lock_idx && * locked_dep_idx != locked. lock_idx {
165
152
#[ cfg( feature = "backtrace" ) ]
166
153
panic ! ( "Tried to violate existing lockorder.\n Mutex that should be locked after the current lock was created at the following backtrace.\n Note that to get a backtrace for the lockorder violation, you should set RUST_BACKTRACE=1\n Lock being taken constructed at: {} ({}):\n {:?}\n Lock constructed at: {} ({})\n {:?}\n \n Lock dep created at:\n {:?}\n \n " ,
167
154
get_construction_location( & this. _lock_construction_bt) , this. lock_idx, this. _lock_construction_bt,
@@ -174,12 +161,12 @@ impl LockMetadata {
174
161
// Insert any already-held locks in our locked-before set.
175
162
let mut locked_before = this. locked_before . lock ( ) . unwrap ( ) ;
176
163
let mut lockdep = LockDep :: new_without_bt ( locked) ;
177
- if !locked_before. contains ( & lockdep) {
164
+ if !locked_before. contains_key ( & lockdep. lock . lock_idx ) {
178
165
lockdep. lockdep_trace = Some ( Backtrace :: new ( ) ) ;
179
- locked_before. insert ( lockdep) ;
166
+ locked_before. insert ( lockdep. lock . lock_idx , lockdep ) ;
180
167
}
181
168
}
182
- held. borrow_mut ( ) . insert ( Arc :: clone ( this) ) ;
169
+ held. borrow_mut ( ) . insert ( this . lock_idx , Arc :: clone ( this) ) ;
183
170
inserted = true ;
184
171
} ) ;
185
172
inserted
@@ -194,14 +181,14 @@ impl LockMetadata {
194
181
// consider try-locks to ever generate lockorder inversions. However, if a try-lock
195
182
// succeeds, we do consider it to have created lockorder dependencies.
196
183
let mut locked_before = this. locked_before . lock ( ) . unwrap ( ) ;
197
- for locked in held. borrow ( ) . iter ( ) {
184
+ for ( locked_idx , locked) in held. borrow ( ) . iter ( ) {
198
185
let mut lockdep = LockDep :: new_without_bt ( locked) ;
199
- if !locked_before. contains ( & lockdep ) {
186
+ if !locked_before. contains_key ( locked_idx ) {
200
187
lockdep. lockdep_trace = Some ( Backtrace :: new ( ) ) ;
201
- locked_before. insert ( lockdep) ;
188
+ locked_before. insert ( * locked_idx , lockdep) ;
202
189
}
203
190
}
204
- held. borrow_mut ( ) . insert ( Arc :: clone ( this) ) ;
191
+ held. borrow_mut ( ) . insert ( this . lock_idx , Arc :: clone ( this) ) ;
205
192
} ) ;
206
193
}
207
194
}
@@ -231,7 +218,7 @@ impl<'a, T: Sized> MutexGuard<'a, T> {
231
218
impl < T : Sized > Drop for MutexGuard < ' _ , T > {
232
219
fn drop ( & mut self ) {
233
220
LOCKS_HELD . with ( |held| {
234
- held. borrow_mut ( ) . remove ( & self . mutex . deps ) ;
221
+ held. borrow_mut ( ) . remove ( & self . mutex . deps . lock_idx ) ;
235
222
} ) ;
236
223
}
237
224
}
@@ -302,7 +289,7 @@ impl<T: Sized> Drop for RwLockReadGuard<'_, T> {
302
289
return ;
303
290
}
304
291
LOCKS_HELD . with ( |held| {
305
- held. borrow_mut ( ) . remove ( & self . lock . deps ) ;
292
+ held. borrow_mut ( ) . remove ( & self . lock . deps . lock_idx ) ;
306
293
} ) ;
307
294
}
308
295
}
@@ -318,7 +305,7 @@ impl<T: Sized> Deref for RwLockWriteGuard<'_, T> {
318
305
impl < T : Sized > Drop for RwLockWriteGuard < ' _ , T > {
319
306
fn drop ( & mut self ) {
320
307
LOCKS_HELD . with ( |held| {
321
- held. borrow_mut ( ) . remove ( & self . lock . deps ) ;
308
+ held. borrow_mut ( ) . remove ( & self . lock . deps . lock_idx ) ;
322
309
} ) ;
323
310
}
324
311
}
0 commit comments