Skip to content

Commit 6d54ebb

Browse files
committed
Swap HashSets with custom Hash in debug_sync for HashMaps
1 parent 0315e26 commit 6d54ebb

File tree

1 file changed

+22
-35
lines changed

1 file changed

+22
-35
lines changed

lightning/src/debug_sync.rs

Lines changed: 22 additions & 35 deletions
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,6 @@ pub use ::alloc::sync::Arc;
22
use core::ops::{Deref, DerefMut};
33
use core::time::Duration;
44

5-
use std::collections::HashSet;
65
use std::cell::RefCell;
76

87
use std::sync::atomic::{AtomicUsize, Ordering};
@@ -13,8 +12,10 @@ use std::sync::RwLockReadGuard as StdRwLockReadGuard;
1312
use std::sync::RwLockWriteGuard as StdRwLockWriteGuard;
1413
use std::sync::Condvar as StdCondvar;
1514

15+
use prelude::HashMap;
16+
1617
#[cfg(feature = "backtrace")]
17-
use {prelude::{HashMap, hash_map}, backtrace::Backtrace, std::sync::Once};
18+
use {prelude::hash_map, backtrace::Backtrace, std::sync::Once};
1819

1920
#[cfg(not(feature = "backtrace"))]
2021
struct Backtrace{}
@@ -48,7 +49,7 @@ impl Condvar {
4849

4950
thread_local! {
5051
/// We track the set of locks currently held by a reference to their `LockMetadata`
51-
static LOCKS_HELD: RefCell<HashSet<Arc<LockMetadata>>> = RefCell::new(HashSet::new());
52+
static LOCKS_HELD: RefCell<HashMap<u64, Arc<LockMetadata>>> = RefCell::new(HashMap::new());
5253
}
5354
static LOCK_IDX: AtomicUsize = AtomicUsize::new(0);
5455

@@ -61,16 +62,9 @@ static LOCKS_INIT: Once = Once::new();
6162
/// when the Mutex itself was constructed.
6263
struct LockMetadata {
6364
lock_idx: u64,
64-
locked_before: StdMutex<HashSet<LockDep>>,
65+
locked_before: StdMutex<HashMap<u64, LockDep>>,
6566
_lock_construction_bt: Backtrace,
6667
}
67-
impl PartialEq for LockMetadata {
68-
fn eq(&self, o: &LockMetadata) -> bool { self.lock_idx == o.lock_idx }
69-
}
70-
impl Eq for LockMetadata {}
71-
impl std::hash::Hash for LockMetadata {
72-
fn hash<H: std::hash::Hasher>(&self, hasher: &mut H) { hasher.write_u64(self.lock_idx); }
73-
}
7468

7569
struct LockDep {
7670
lock: Arc<LockMetadata>,
@@ -83,13 +77,6 @@ impl LockDep {
8377
Self { lock: Arc::clone(lock), lockdep_trace: None }
8478
}
8579
}
86-
impl PartialEq for LockDep {
87-
fn eq(&self, o: &LockDep) -> bool { self.lock.lock_idx == o.lock.lock_idx }
88-
}
89-
impl Eq for LockDep {}
90-
impl std::hash::Hash for LockDep {
91-
fn hash<H: std::hash::Hasher>(&self, hasher: &mut H) { hasher.write_u64(self.lock.lock_idx); }
92-
}
9380

9481
#[cfg(feature = "backtrace")]
9582
fn get_construction_location(backtrace: &Backtrace) -> String {
@@ -123,7 +110,7 @@ impl LockMetadata {
123110
let lock_idx = LOCK_IDX.fetch_add(1, Ordering::Relaxed) as u64;
124111

125112
let res = Arc::new(LockMetadata {
126-
locked_before: StdMutex::new(HashSet::new()),
113+
locked_before: StdMutex::new(HashMap::new()),
127114
lock_idx,
128115
_lock_construction_bt: backtrace,
129116
});
@@ -148,20 +135,20 @@ impl LockMetadata {
148135
// For each lock which is currently locked, check that no lock's locked-before
149136
// set includes the lock we're about to lock, which would imply a lockorder
150137
// inversion.
151-
for locked in held.borrow().iter() {
152-
if read && *locked == *this {
138+
for (locked_idx, _locked) in held.borrow().iter() {
139+
if read && *locked_idx == this.lock_idx {
153140
// Recursive read locks are explicitly allowed
154141
return;
155142
}
156143
}
157-
for locked in held.borrow().iter() {
158-
if !read && *locked == *this {
144+
for (locked_idx, locked) in held.borrow().iter() {
145+
if !read && *locked_idx == this.lock_idx {
159146
// With `feature = "backtrace"` set, we may be looking at different instances
160147
// of the same lock.
161148
debug_assert!(cfg!(feature = "backtrace"), "Tried to acquire a lock while it was held!");
162149
}
163-
for locked_dep in locked.locked_before.lock().unwrap().iter() {
164-
if locked_dep.lock == *this && locked_dep.lock != *locked {
150+
for (locked_dep_idx, locked_dep) in locked.locked_before.lock().unwrap().iter() {
151+
if *locked_dep_idx == this.lock_idx && *locked_dep_idx != locked.lock_idx {
165152
#[cfg(feature = "backtrace")]
166153
panic!("Tried to violate existing lockorder.\nMutex that should be locked after the current lock was created at the following backtrace.\nNote that to get a backtrace for the lockorder violation, you should set RUST_BACKTRACE=1\nLock being taken constructed at: {} ({}):\n{:?}\nLock constructed at: {} ({})\n{:?}\n\nLock dep created at:\n{:?}\n\n",
167154
get_construction_location(&this._lock_construction_bt), this.lock_idx, this._lock_construction_bt,
@@ -174,12 +161,12 @@ impl LockMetadata {
174161
// Insert any already-held locks in our locked-before set.
175162
let mut locked_before = this.locked_before.lock().unwrap();
176163
let mut lockdep = LockDep::new_without_bt(locked);
177-
if !locked_before.contains(&lockdep) {
164+
if !locked_before.contains_key(&lockdep.lock.lock_idx) {
178165
lockdep.lockdep_trace = Some(Backtrace::new());
179-
locked_before.insert(lockdep);
166+
locked_before.insert(lockdep.lock.lock_idx, lockdep);
180167
}
181168
}
182-
held.borrow_mut().insert(Arc::clone(this));
169+
held.borrow_mut().insert(this.lock_idx, Arc::clone(this));
183170
inserted = true;
184171
});
185172
inserted
@@ -194,14 +181,14 @@ impl LockMetadata {
194181
// consider try-locks to ever generate lockorder inversions. However, if a try-lock
195182
// succeeds, we do consider it to have created lockorder dependencies.
196183
let mut locked_before = this.locked_before.lock().unwrap();
197-
for locked in held.borrow().iter() {
184+
for (locked_idx, locked) in held.borrow().iter() {
198185
let mut lockdep = LockDep::new_without_bt(locked);
199-
if !locked_before.contains(&lockdep) {
186+
if !locked_before.contains_key(locked_idx) {
200187
lockdep.lockdep_trace = Some(Backtrace::new());
201-
locked_before.insert(lockdep);
188+
locked_before.insert(*locked_idx, lockdep);
202189
}
203190
}
204-
held.borrow_mut().insert(Arc::clone(this));
191+
held.borrow_mut().insert(this.lock_idx, Arc::clone(this));
205192
});
206193
}
207194
}
@@ -231,7 +218,7 @@ impl<'a, T: Sized> MutexGuard<'a, T> {
231218
impl<T: Sized> Drop for MutexGuard<'_, T> {
232219
fn drop(&mut self) {
233220
LOCKS_HELD.with(|held| {
234-
held.borrow_mut().remove(&self.mutex.deps);
221+
held.borrow_mut().remove(&self.mutex.deps.lock_idx);
235222
});
236223
}
237224
}
@@ -302,7 +289,7 @@ impl<T: Sized> Drop for RwLockReadGuard<'_, T> {
302289
return;
303290
}
304291
LOCKS_HELD.with(|held| {
305-
held.borrow_mut().remove(&self.lock.deps);
292+
held.borrow_mut().remove(&self.lock.deps.lock_idx);
306293
});
307294
}
308295
}
@@ -318,7 +305,7 @@ impl<T: Sized> Deref for RwLockWriteGuard<'_, T> {
318305
impl<T: Sized> Drop for RwLockWriteGuard<'_, T> {
319306
fn drop(&mut self) {
320307
LOCKS_HELD.with(|held| {
321-
held.borrow_mut().remove(&self.lock.deps);
308+
held.borrow_mut().remove(&self.lock.deps.lock_idx);
322309
});
323310
}
324311
}

0 commit comments

Comments
 (0)