Skip to content

Commit 25c3dba

Browse files
committed
Swap HashSets with custom Hash in debug_sync for HashMaps
1 parent 0315e26 commit 25c3dba

File tree

1 file changed

+19
-34
lines changed

1 file changed

+19
-34
lines changed

lightning/src/debug_sync.rs

Lines changed: 19 additions & 34 deletions
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,6 @@ pub use ::alloc::sync::Arc;
22
use core::ops::{Deref, DerefMut};
33
use core::time::Duration;
44

5-
use std::collections::HashSet;
65
use std::cell::RefCell;
76

87
use std::sync::atomic::{AtomicUsize, Ordering};
@@ -48,7 +47,7 @@ impl Condvar {
4847

4948
thread_local! {
5049
/// We track the set of locks currently held by a reference to their `LockMetadata`
51-
static LOCKS_HELD: RefCell<HashSet<Arc<LockMetadata>>> = RefCell::new(HashSet::new());
50+
static LOCKS_HELD: RefCell<HashMap<u64, Arc<LockMetadata>>> = RefCell::new(HashMap::new());
5251
}
5352
static LOCK_IDX: AtomicUsize = AtomicUsize::new(0);
5453

@@ -61,16 +60,9 @@ static LOCKS_INIT: Once = Once::new();
6160
/// when the Mutex itself was constructed.
6261
struct LockMetadata {
6362
lock_idx: u64,
64-
locked_before: StdMutex<HashSet<LockDep>>,
63+
locked_before: StdMutex<HashMap<u64, LockDep>>,
6564
_lock_construction_bt: Backtrace,
6665
}
67-
impl PartialEq for LockMetadata {
68-
fn eq(&self, o: &LockMetadata) -> bool { self.lock_idx == o.lock_idx }
69-
}
70-
impl Eq for LockMetadata {}
71-
impl std::hash::Hash for LockMetadata {
72-
fn hash<H: std::hash::Hasher>(&self, hasher: &mut H) { hasher.write_u64(self.lock_idx); }
73-
}
7466

7567
struct LockDep {
7668
lock: Arc<LockMetadata>,
@@ -83,13 +75,6 @@ impl LockDep {
8375
Self { lock: Arc::clone(lock), lockdep_trace: None }
8476
}
8577
}
86-
impl PartialEq for LockDep {
87-
fn eq(&self, o: &LockDep) -> bool { self.lock.lock_idx == o.lock.lock_idx }
88-
}
89-
impl Eq for LockDep {}
90-
impl std::hash::Hash for LockDep {
91-
fn hash<H: std::hash::Hasher>(&self, hasher: &mut H) { hasher.write_u64(self.lock.lock_idx); }
92-
}
9378

9479
#[cfg(feature = "backtrace")]
9580
fn get_construction_location(backtrace: &Backtrace) -> String {
@@ -123,7 +108,7 @@ impl LockMetadata {
123108
let lock_idx = LOCK_IDX.fetch_add(1, Ordering::Relaxed) as u64;
124109

125110
let res = Arc::new(LockMetadata {
126-
locked_before: StdMutex::new(HashSet::new()),
111+
locked_before: StdMutex::new(HashMap::new()),
127112
lock_idx,
128113
_lock_construction_bt: backtrace,
129114
});
@@ -148,20 +133,20 @@ impl LockMetadata {
148133
// For each lock which is currently locked, check that no lock's locked-before
149134
// set includes the lock we're about to lock, which would imply a lockorder
150135
// inversion.
151-
for locked in held.borrow().iter() {
152-
if read && *locked == *this {
136+
for (locked_idx, _locked) in held.borrow().iter() {
137+
if read && *locked_idx == this.lock_idx {
153138
// Recursive read locks are explicitly allowed
154139
return;
155140
}
156141
}
157-
for locked in held.borrow().iter() {
158-
if !read && *locked == *this {
142+
for (locked_idx, locked) in held.borrow().iter() {
143+
if !read && *locked_idx == this.lock_idx {
159144
// With `feature = "backtrace"` set, we may be looking at different instances
160145
// of the same lock.
161146
debug_assert!(cfg!(feature = "backtrace"), "Tried to acquire a lock while it was held!");
162147
}
163-
for locked_dep in locked.locked_before.lock().unwrap().iter() {
164-
if locked_dep.lock == *this && locked_dep.lock != *locked {
148+
for (locked_dep_idx, locked_dep) in locked.locked_before.lock().unwrap().iter() {
149+
if *locked_dep_idx == this.lock_idx && *locked_dep_idx != locked.lock_idx {
165150
#[cfg(feature = "backtrace")]
166151
panic!("Tried to violate existing lockorder.\nMutex that should be locked after the current lock was created at the following backtrace.\nNote that to get a backtrace for the lockorder violation, you should set RUST_BACKTRACE=1\nLock being taken constructed at: {} ({}):\n{:?}\nLock constructed at: {} ({})\n{:?}\n\nLock dep created at:\n{:?}\n\n",
167152
get_construction_location(&this._lock_construction_bt), this.lock_idx, this._lock_construction_bt,
@@ -174,12 +159,12 @@ impl LockMetadata {
174159
// Insert any already-held locks in our locked-before set.
175160
let mut locked_before = this.locked_before.lock().unwrap();
176161
let mut lockdep = LockDep::new_without_bt(locked);
177-
if !locked_before.contains(&lockdep) {
162+
if !locked_before.contains_key(&lockdep.lock.lock_idx) {
178163
lockdep.lockdep_trace = Some(Backtrace::new());
179-
locked_before.insert(lockdep);
164+
locked_before.insert(lockdep.lock.lock_idx, lockdep);
180165
}
181166
}
182-
held.borrow_mut().insert(Arc::clone(this));
167+
held.borrow_mut().insert(this.lock_idx, Arc::clone(this));
183168
inserted = true;
184169
});
185170
inserted
@@ -194,14 +179,14 @@ impl LockMetadata {
194179
// consider try-locks to ever generate lockorder inversions. However, if a try-lock
195180
// succeeds, we do consider it to have created lockorder dependencies.
196181
let mut locked_before = this.locked_before.lock().unwrap();
197-
for locked in held.borrow().iter() {
182+
for (locked_idx, locked) in held.borrow().iter() {
198183
let mut lockdep = LockDep::new_without_bt(locked);
199-
if !locked_before.contains(&lockdep) {
184+
if !locked_before.contains_key(locked_idx) {
200185
lockdep.lockdep_trace = Some(Backtrace::new());
201-
locked_before.insert(lockdep);
186+
locked_before.insert(*locked_idx, lockdep);
202187
}
203188
}
204-
held.borrow_mut().insert(Arc::clone(this));
189+
held.borrow_mut().insert(this.lock_idx, Arc::clone(this));
205190
});
206191
}
207192
}
@@ -231,7 +216,7 @@ impl<'a, T: Sized> MutexGuard<'a, T> {
231216
impl<T: Sized> Drop for MutexGuard<'_, T> {
232217
fn drop(&mut self) {
233218
LOCKS_HELD.with(|held| {
234-
held.borrow_mut().remove(&self.mutex.deps);
219+
held.borrow_mut().remove(&self.mutex.deps.lock_idx);
235220
});
236221
}
237222
}
@@ -302,7 +287,7 @@ impl<T: Sized> Drop for RwLockReadGuard<'_, T> {
302287
return;
303288
}
304289
LOCKS_HELD.with(|held| {
305-
held.borrow_mut().remove(&self.lock.deps);
290+
held.borrow_mut().remove(&self.lock.deps.lock_idx);
306291
});
307292
}
308293
}
@@ -318,7 +303,7 @@ impl<T: Sized> Deref for RwLockWriteGuard<'_, T> {
318303
impl<T: Sized> Drop for RwLockWriteGuard<'_, T> {
319304
fn drop(&mut self) {
320305
LOCKS_HELD.with(|held| {
321-
held.borrow_mut().remove(&self.lock.deps);
306+
held.borrow_mut().remove(&self.lock.deps.lock_idx);
322307
});
323308
}
324309
}

0 commit comments

Comments
 (0)