@@ -2,7 +2,6 @@ pub use ::alloc::sync::Arc;
2
2
use core:: ops:: { Deref , DerefMut } ;
3
3
use core:: time:: Duration ;
4
4
5
- use std:: collections:: HashSet ;
6
5
use std:: cell:: RefCell ;
7
6
8
7
use std:: sync:: atomic:: { AtomicUsize , Ordering } ;
@@ -13,8 +12,10 @@ use std::sync::RwLockReadGuard as StdRwLockReadGuard;
13
12
use std:: sync:: RwLockWriteGuard as StdRwLockWriteGuard ;
14
13
use std:: sync:: Condvar as StdCondvar ;
15
14
15
+ use prelude:: HashMap ;
16
+
16
17
#[ cfg( feature = "backtrace" ) ]
17
- use { prelude:: { HashMap , hash_map} , backtrace:: Backtrace , std:: sync:: Once } ;
18
+ use { prelude:: hash_map, backtrace:: Backtrace , std:: sync:: Once } ;
18
19
19
20
#[ cfg( not( feature = "backtrace" ) ) ]
20
21
struct Backtrace { }
@@ -48,7 +49,7 @@ impl Condvar {
48
49
49
50
thread_local ! {
50
51
/// We track the set of locks currently held by a reference to their `LockMetadata`
51
- static LOCKS_HELD : RefCell <HashSet < Arc <LockMetadata >>> = RefCell :: new( HashSet :: new( ) ) ;
52
+ static LOCKS_HELD : RefCell <HashMap < u64 , Arc <LockMetadata >>> = RefCell :: new( HashMap :: new( ) ) ;
52
53
}
53
54
static LOCK_IDX : AtomicUsize = AtomicUsize :: new ( 0 ) ;
54
55
@@ -61,34 +62,13 @@ static LOCKS_INIT: Once = Once::new();
61
62
/// when the Mutex itself was constructed.
62
63
struct LockMetadata {
63
64
lock_idx : u64 ,
64
- locked_before : StdMutex < HashSet < LockDep > > ,
65
+ locked_before : StdMutex < HashMap < u64 , LockDep > > ,
65
66
_lock_construction_bt : Backtrace ,
66
67
}
67
- impl PartialEq for LockMetadata {
68
- fn eq ( & self , o : & LockMetadata ) -> bool { self . lock_idx == o. lock_idx }
69
- }
70
- impl Eq for LockMetadata { }
71
- impl std:: hash:: Hash for LockMetadata {
72
- fn hash < H : std:: hash:: Hasher > ( & self , hasher : & mut H ) { hasher. write_u64 ( self . lock_idx ) ; }
73
- }
74
68
75
69
struct LockDep {
76
70
lock : Arc < LockMetadata > ,
77
- lockdep_trace : Option < Backtrace > ,
78
- }
79
- impl LockDep {
80
- /// Note that `Backtrace::new()` is rather expensive so we rely on the caller to fill in the
81
- /// `lockdep_backtrace` field after ensuring we need it.
82
- fn new_without_bt ( lock : & Arc < LockMetadata > ) -> Self {
83
- Self { lock : Arc :: clone ( lock) , lockdep_trace : None }
84
- }
85
- }
86
- impl PartialEq for LockDep {
87
- fn eq ( & self , o : & LockDep ) -> bool { self . lock . lock_idx == o. lock . lock_idx }
88
- }
89
- impl Eq for LockDep { }
90
- impl std:: hash:: Hash for LockDep {
91
- fn hash < H : std:: hash:: Hasher > ( & self , hasher : & mut H ) { hasher. write_u64 ( self . lock . lock_idx ) ; }
71
+ lockdep_trace : Backtrace ,
92
72
}
93
73
94
74
#[ cfg( feature = "backtrace" ) ]
@@ -123,7 +103,7 @@ impl LockMetadata {
123
103
let lock_idx = LOCK_IDX . fetch_add ( 1 , Ordering :: Relaxed ) as u64 ;
124
104
125
105
let res = Arc :: new ( LockMetadata {
126
- locked_before : StdMutex :: new ( HashSet :: new ( ) ) ,
106
+ locked_before : StdMutex :: new ( HashMap :: new ( ) ) ,
127
107
lock_idx,
128
108
_lock_construction_bt : backtrace,
129
109
} ) ;
@@ -148,20 +128,20 @@ impl LockMetadata {
148
128
// For each lock which is currently locked, check that no lock's locked-before
149
129
// set includes the lock we're about to lock, which would imply a lockorder
150
130
// inversion.
151
- for locked in held. borrow ( ) . iter ( ) {
152
- if read && * locked == * this {
131
+ for ( locked_idx , _locked ) in held. borrow ( ) . iter ( ) {
132
+ if read && * locked_idx == this. lock_idx {
153
133
// Recursive read locks are explicitly allowed
154
134
return ;
155
135
}
156
136
}
157
- for locked in held. borrow ( ) . iter ( ) {
158
- if !read && * locked == * this {
137
+ for ( locked_idx , locked) in held. borrow ( ) . iter ( ) {
138
+ if !read && * locked_idx == this. lock_idx {
159
139
// With `feature = "backtrace"` set, we may be looking at different instances
160
140
// of the same lock.
161
141
debug_assert ! ( cfg!( feature = "backtrace" ) , "Tried to acquire a lock while it was held!" ) ;
162
142
}
163
- for locked_dep in locked. locked_before . lock ( ) . unwrap ( ) . iter ( ) {
164
- if locked_dep . lock == * this && locked_dep . lock != * locked {
143
+ for ( locked_dep_idx , locked_dep) in locked. locked_before . lock ( ) . unwrap ( ) . iter ( ) {
144
+ if * locked_dep_idx == this. lock_idx && * locked_dep_idx != locked. lock_idx {
165
145
#[ cfg( feature = "backtrace" ) ]
166
146
panic ! ( "Tried to violate existing lockorder.\n Mutex that should be locked after the current lock was created at the following backtrace.\n Note that to get a backtrace for the lockorder violation, you should set RUST_BACKTRACE=1\n Lock being taken constructed at: {} ({}):\n {:?}\n Lock constructed at: {} ({})\n {:?}\n \n Lock dep created at:\n {:?}\n \n " ,
167
147
get_construction_location( & this. _lock_construction_bt) , this. lock_idx, this. _lock_construction_bt,
@@ -173,13 +153,12 @@ impl LockMetadata {
173
153
}
174
154
// Insert any already-held locks in our locked-before set.
175
155
let mut locked_before = this. locked_before . lock ( ) . unwrap ( ) ;
176
- let mut lockdep = LockDep :: new_without_bt ( locked) ;
177
- if !locked_before. contains ( & lockdep) {
178
- lockdep. lockdep_trace = Some ( Backtrace :: new ( ) ) ;
179
- locked_before. insert ( lockdep) ;
156
+ if !locked_before. contains_key ( & locked. lock_idx ) {
157
+ let lockdep = LockDep { lock : Arc :: clone ( locked) , lockdep_trace : Backtrace :: new ( ) } ;
158
+ locked_before. insert ( lockdep. lock . lock_idx , lockdep) ;
180
159
}
181
160
}
182
- held. borrow_mut ( ) . insert ( Arc :: clone ( this) ) ;
161
+ held. borrow_mut ( ) . insert ( this . lock_idx , Arc :: clone ( this) ) ;
183
162
inserted = true ;
184
163
} ) ;
185
164
inserted
@@ -194,14 +173,13 @@ impl LockMetadata {
194
173
// consider try-locks to ever generate lockorder inversions. However, if a try-lock
195
174
// succeeds, we do consider it to have created lockorder dependencies.
196
175
let mut locked_before = this. locked_before . lock ( ) . unwrap ( ) ;
197
- for locked in held. borrow ( ) . iter ( ) {
198
- let mut lockdep = LockDep :: new_without_bt ( locked) ;
199
- if !locked_before. contains ( & lockdep) {
200
- lockdep. lockdep_trace = Some ( Backtrace :: new ( ) ) ;
201
- locked_before. insert ( lockdep) ;
176
+ for ( locked_idx, locked) in held. borrow ( ) . iter ( ) {
177
+ if !locked_before. contains_key ( locked_idx) {
178
+ let lockdep = LockDep { lock : Arc :: clone ( locked) , lockdep_trace : Backtrace :: new ( ) } ;
179
+ locked_before. insert ( * locked_idx, lockdep) ;
202
180
}
203
181
}
204
- held. borrow_mut ( ) . insert ( Arc :: clone ( this) ) ;
182
+ held. borrow_mut ( ) . insert ( this . lock_idx , Arc :: clone ( this) ) ;
205
183
} ) ;
206
184
}
207
185
}
@@ -231,7 +209,7 @@ impl<'a, T: Sized> MutexGuard<'a, T> {
231
209
impl < T : Sized > Drop for MutexGuard < ' _ , T > {
232
210
fn drop ( & mut self ) {
233
211
LOCKS_HELD . with ( |held| {
234
- held. borrow_mut ( ) . remove ( & self . mutex . deps ) ;
212
+ held. borrow_mut ( ) . remove ( & self . mutex . deps . lock_idx ) ;
235
213
} ) ;
236
214
}
237
215
}
@@ -302,7 +280,7 @@ impl<T: Sized> Drop for RwLockReadGuard<'_, T> {
302
280
return ;
303
281
}
304
282
LOCKS_HELD . with ( |held| {
305
- held. borrow_mut ( ) . remove ( & self . lock . deps ) ;
283
+ held. borrow_mut ( ) . remove ( & self . lock . deps . lock_idx ) ;
306
284
} ) ;
307
285
}
308
286
}
@@ -318,7 +296,7 @@ impl<T: Sized> Deref for RwLockWriteGuard<'_, T> {
318
296
impl < T : Sized > Drop for RwLockWriteGuard < ' _ , T > {
319
297
fn drop ( & mut self ) {
320
298
LOCKS_HELD . with ( |held| {
321
- held. borrow_mut ( ) . remove ( & self . lock . deps ) ;
299
+ held. borrow_mut ( ) . remove ( & self . lock . deps . lock_idx ) ;
322
300
} ) ;
323
301
}
324
302
}
0 commit comments