Skip to content

Commit 815d255

Browse files
authored
Merge pull request #3319 from tnull/2024-09-rustfmt-sync
`rustfmt`: Run on `lightning/src/sync/*`
2 parents 866cedf + 555bd75 commit 815d255

File tree

6 files changed

+88
-45
lines changed

6 files changed

+88
-45
lines changed

lightning/src/sync/debug_sync.rs

Lines changed: 47 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -1,30 +1,34 @@
1-
pub use ::alloc::sync::Arc;
1+
pub use alloc::sync::Arc;
22
use core::ops::{Deref, DerefMut};
33
use core::time::Duration;
44

55
use std::cell::RefCell;
66

77
use std::sync::atomic::{AtomicUsize, Ordering};
8+
use std::sync::Condvar as StdCondvar;
89
use std::sync::Mutex as StdMutex;
910
use std::sync::MutexGuard as StdMutexGuard;
1011
use std::sync::RwLock as StdRwLock;
1112
use std::sync::RwLockReadGuard as StdRwLockReadGuard;
1213
use std::sync::RwLockWriteGuard as StdRwLockWriteGuard;
13-
use std::sync::Condvar as StdCondvar;
1414

1515
pub use std::sync::WaitTimeoutResult;
1616

1717
use crate::prelude::*;
1818

19-
use super::{LockTestExt, LockHeldState};
19+
use super::{LockHeldState, LockTestExt};
2020

2121
#[cfg(feature = "backtrace")]
2222
use {crate::prelude::hash_map, backtrace::Backtrace, std::sync::Once};
2323

2424
#[cfg(not(feature = "backtrace"))]
25-
struct Backtrace{}
25+
struct Backtrace {}
2626
#[cfg(not(feature = "backtrace"))]
27-
impl Backtrace { fn new() -> Backtrace { Backtrace {} } }
27+
impl Backtrace {
28+
fn new() -> Backtrace {
29+
Backtrace {}
30+
}
31+
}
2832

2933
pub type LockResult<Guard> = Result<Guard, ()>;
3034

@@ -37,22 +41,30 @@ impl Condvar {
3741
Condvar { inner: StdCondvar::new() }
3842
}
3943

40-
pub fn wait_while<'a, T, F: FnMut(&mut T) -> bool>(&'a self, guard: MutexGuard<'a, T>, condition: F)
41-
-> LockResult<MutexGuard<'a, T>> {
44+
pub fn wait_while<'a, T, F: FnMut(&mut T) -> bool>(
45+
&'a self, guard: MutexGuard<'a, T>, condition: F,
46+
) -> LockResult<MutexGuard<'a, T>> {
4247
let mutex: &'a Mutex<T> = guard.mutex;
43-
self.inner.wait_while(guard.into_inner(), condition).map(|lock| MutexGuard { mutex, lock })
48+
self.inner
49+
.wait_while(guard.into_inner(), condition)
50+
.map(|lock| MutexGuard { mutex, lock })
4451
.map_err(|_| ())
4552
}
4653

4754
#[allow(unused)]
48-
pub fn wait_timeout_while<'a, T, F: FnMut(&mut T) -> bool>(&'a self, guard: MutexGuard<'a, T>, dur: Duration, condition: F)
49-
-> LockResult<(MutexGuard<'a, T>, WaitTimeoutResult)> {
55+
pub fn wait_timeout_while<'a, T, F: FnMut(&mut T) -> bool>(
56+
&'a self, guard: MutexGuard<'a, T>, dur: Duration, condition: F,
57+
) -> LockResult<(MutexGuard<'a, T>, WaitTimeoutResult)> {
5058
let mutex = guard.mutex;
51-
self.inner.wait_timeout_while(guard.into_inner(), dur, condition).map_err(|_| ())
59+
self.inner
60+
.wait_timeout_while(guard.into_inner(), dur, condition)
61+
.map_err(|_| ())
5262
.map(|(lock, e)| (MutexGuard { mutex, lock }, e))
5363
}
5464

55-
pub fn notify_all(&self) { self.inner.notify_all(); }
65+
pub fn notify_all(&self) {
66+
self.inner.notify_all();
67+
}
5668
}
5769

5870
thread_local! {
@@ -99,14 +111,19 @@ fn locate_call_symbol(backtrace: &Backtrace) -> (String, Option<u32>) {
99111
symbol_after_latest_debug_sync = Some(symbol);
100112
found_debug_sync = false;
101113
}
102-
} else { found_debug_sync = true; }
114+
} else {
115+
found_debug_sync = true;
116+
}
103117
}
104118
}
105119
}
106120
let symbol = symbol_after_latest_debug_sync.unwrap_or_else(|| {
107121
panic!("Couldn't find lock call symbol in trace {:?}", backtrace);
108122
});
109-
(format!("{}:{}", symbol.filename().unwrap().display(), symbol.lineno().unwrap()), symbol.colno())
123+
(
124+
format!("{}:{}", symbol.filename().unwrap().display(), symbol.lineno().unwrap()),
125+
symbol.colno(),
126+
)
110127
}
111128

112129
impl LockMetadata {
@@ -124,16 +141,20 @@ impl LockMetadata {
124141
{
125142
let (lock_constr_location, lock_constr_colno) =
126143
locate_call_symbol(&res._lock_construction_bt);
127-
LOCKS_INIT.call_once(|| { unsafe { LOCKS = Some(StdMutex::new(new_hash_map())); } });
144+
LOCKS_INIT.call_once(|| unsafe {
145+
LOCKS = Some(StdMutex::new(new_hash_map()));
146+
});
128147
let mut locks = unsafe { LOCKS.as_ref() }.unwrap().lock().unwrap();
129148
match locks.entry(lock_constr_location) {
130149
hash_map::Entry::Occupied(e) => {
131150
assert_eq!(lock_constr_colno,
132151
locate_call_symbol(&e.get()._lock_construction_bt).1,
133152
"Because Windows doesn't support column number results in backtraces, we cannot construct two mutexes on the same line or we risk lockorder detection false positives.");
134-
return Arc::clone(e.get())
153+
return Arc::clone(e.get());
154+
},
155+
hash_map::Entry::Vacant(e) => {
156+
e.insert(Arc::clone(&res));
135157
},
136-
hash_map::Entry::Vacant(e) => { e.insert(Arc::clone(&res)); },
137158
}
138159
}
139160
res
@@ -213,7 +234,8 @@ impl LockMetadata {
213234
let mut locked_before = this.locked_before.lock().unwrap();
214235
for (locked_idx, locked) in held.borrow().iter() {
215236
if !locked_before.contains_key(locked_idx) {
216-
let lockdep = LockDep { lock: Arc::clone(locked), _lockdep_trace: Backtrace::new() };
237+
let lockdep =
238+
LockDep { lock: Arc::clone(locked), _lockdep_trace: Backtrace::new() };
217239
locked_before.insert(*locked_idx, lockdep);
218240
}
219241
}
@@ -282,7 +304,8 @@ impl<T> Mutex<T> {
282304
}
283305

284306
pub fn try_lock<'a>(&'a self) -> LockResult<MutexGuard<'a, T>> {
285-
let res = self.inner.try_lock().map(|lock| MutexGuard { mutex: self, lock }).map_err(|_| ());
307+
let res =
308+
self.inner.try_lock().map(|lock| MutexGuard { mutex: self, lock }).map_err(|_| ());
286309
if res.is_ok() {
287310
LockMetadata::try_locked(&self.deps);
288311
}
@@ -376,7 +399,11 @@ impl<T> RwLock<T> {
376399
}
377400

378401
pub fn try_write<'a>(&'a self) -> LockResult<RwLockWriteGuard<'a, T>> {
379-
let res = self.inner.try_write().map(|guard| RwLockWriteGuard { lock: self, guard }).map_err(|_| ());
402+
let res = self
403+
.inner
404+
.try_write()
405+
.map(|guard| RwLockWriteGuard { lock: self, guard })
406+
.map_err(|_| ());
380407
if res.is_ok() {
381408
LockMetadata::try_locked(&self.deps);
382409
}

lightning/src/sync/fairrwlock.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
1-
use std::sync::{LockResult, RwLock, RwLockReadGuard, RwLockWriteGuard, TryLockResult};
2-
use std::sync::atomic::{AtomicUsize, Ordering};
31
use super::{LockHeldState, LockTestExt};
2+
use std::sync::atomic::{AtomicUsize, Ordering};
3+
use std::sync::{LockResult, RwLock, RwLockReadGuard, RwLockWriteGuard, TryLockResult};
44

55
/// Rust libstd's RwLock does not provide any fairness guarantees (and, in fact, when used on
66
/// Linux with pthreads under the hood, readers trivially and completely starve writers).

lightning/src/sync/mod.rs

Lines changed: 16 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -31,24 +31,35 @@ mod test_lockorder_checks;
3131
#[cfg(all(feature = "std", any(ldk_bench, not(test))))]
3232
pub(crate) mod fairrwlock;
3333
#[cfg(all(feature = "std", any(ldk_bench, not(test))))]
34-
pub use {std::sync::{Arc, Mutex, Condvar, MutexGuard, RwLock, RwLockReadGuard, RwLockWriteGuard}, fairrwlock::FairRwLock};
34+
pub use {
35+
fairrwlock::FairRwLock,
36+
std::sync::{Arc, Condvar, Mutex, MutexGuard, RwLock, RwLockReadGuard, RwLockWriteGuard},
37+
};
3538

3639
#[cfg(all(feature = "std", any(ldk_bench, not(test))))]
3740
mod ext_impl {
3841
use super::*;
3942
impl<'a, T: 'a> LockTestExt<'a> for Mutex<T> {
4043
#[inline]
41-
fn held_by_thread(&self) -> LockHeldState { LockHeldState::Unsupported }
44+
fn held_by_thread(&self) -> LockHeldState {
45+
LockHeldState::Unsupported
46+
}
4247
type ExclLock = MutexGuard<'a, T>;
4348
#[inline]
44-
fn unsafe_well_ordered_double_lock_self(&'a self) -> MutexGuard<T> { self.lock().unwrap() }
49+
fn unsafe_well_ordered_double_lock_self(&'a self) -> MutexGuard<T> {
50+
self.lock().unwrap()
51+
}
4552
}
4653
impl<'a, T: 'a> LockTestExt<'a> for RwLock<T> {
4754
#[inline]
48-
fn held_by_thread(&self) -> LockHeldState { LockHeldState::Unsupported }
55+
fn held_by_thread(&self) -> LockHeldState {
56+
LockHeldState::Unsupported
57+
}
4958
type ExclLock = RwLockWriteGuard<'a, T>;
5059
#[inline]
51-
fn unsafe_well_ordered_double_lock_self(&'a self) -> RwLockWriteGuard<T> { self.write().unwrap() }
60+
fn unsafe_well_ordered_double_lock_self(&'a self) -> RwLockWriteGuard<T> {
61+
self.write().unwrap()
62+
}
5263
}
5364
}
5465

lightning/src/sync/nostd_sync.rs

Lines changed: 22 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,12 @@
1-
pub use ::alloc::sync::Arc;
1+
use super::{LockHeldState, LockTestExt};
2+
pub use alloc::sync::Arc;
3+
use core::cell::{Ref, RefCell, RefMut};
24
use core::ops::{Deref, DerefMut};
3-
use core::cell::{RefCell, Ref, RefMut};
4-
use super::{LockTestExt, LockHeldState};
55

66
pub type LockResult<Guard> = Result<Guard, ()>;
77

88
pub struct Mutex<T: ?Sized> {
9-
inner: RefCell<T>
9+
inner: RefCell<T>,
1010
}
1111

1212
#[must_use = "if unused the Mutex will immediately unlock"]
@@ -45,16 +45,21 @@ impl<T> Mutex<T> {
4545
impl<'a, T: 'a> LockTestExt<'a> for Mutex<T> {
4646
#[inline]
4747
fn held_by_thread(&self) -> LockHeldState {
48-
if self.inner.try_borrow_mut().is_err() { return LockHeldState::HeldByThread; }
49-
else { return LockHeldState::NotHeldByThread; }
48+
if self.inner.try_borrow_mut().is_err() {
49+
return LockHeldState::HeldByThread;
50+
} else {
51+
return LockHeldState::NotHeldByThread;
52+
}
5053
}
5154
type ExclLock = MutexGuard<'a, T>;
5255
#[inline]
53-
fn unsafe_well_ordered_double_lock_self(&'a self) -> MutexGuard<T> { self.lock().unwrap() }
56+
fn unsafe_well_ordered_double_lock_self(&'a self) -> MutexGuard<T> {
57+
self.lock().unwrap()
58+
}
5459
}
5560

5661
pub struct RwLock<T: ?Sized> {
57-
inner: RefCell<T>
62+
inner: RefCell<T>,
5863
}
5964

6065
pub struct RwLockReadGuard<'a, T: ?Sized + 'a> {
@@ -103,20 +108,25 @@ impl<T> RwLock<T> {
103108
pub fn try_write<'a>(&'a self) -> LockResult<RwLockWriteGuard<'a, T>> {
104109
match self.inner.try_borrow_mut() {
105110
Ok(lock) => Ok(RwLockWriteGuard { lock }),
106-
Err(_) => Err(())
111+
Err(_) => Err(()),
107112
}
108113
}
109114
}
110115

111116
impl<'a, T: 'a> LockTestExt<'a> for RwLock<T> {
112117
#[inline]
113118
fn held_by_thread(&self) -> LockHeldState {
114-
if self.inner.try_borrow_mut().is_err() { return LockHeldState::HeldByThread; }
115-
else { return LockHeldState::NotHeldByThread; }
119+
if self.inner.try_borrow_mut().is_err() {
120+
return LockHeldState::HeldByThread;
121+
} else {
122+
return LockHeldState::NotHeldByThread;
123+
}
116124
}
117125
type ExclLock = RwLockWriteGuard<'a, T>;
118126
#[inline]
119-
fn unsafe_well_ordered_double_lock_self(&'a self) -> RwLockWriteGuard<T> { self.write().unwrap() }
127+
fn unsafe_well_ordered_double_lock_self(&'a self) -> RwLockWriteGuard<T> {
128+
self.write().unwrap()
129+
}
120130
}
121131

122132
pub type FairRwLock<T> = RwLock<T>;

lightning/src/sync/test_lockorder_checks.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
use crate::sync::debug_sync::{RwLock, Mutex};
1+
use crate::sync::debug_sync::{Mutex, RwLock};
22

33
use super::{LockHeldState, LockTestExt};
44

rustfmt_excluded_files

Lines changed: 0 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -72,11 +72,6 @@
7272
./lightning/src/routing/scoring.rs
7373
./lightning/src/routing/test_utils.rs
7474
./lightning/src/routing/utxo.rs
75-
./lightning/src/sync/debug_sync.rs
76-
./lightning/src/sync/fairrwlock.rs
77-
./lightning/src/sync/mod.rs
78-
./lightning/src/sync/nostd_sync.rs
79-
./lightning/src/sync/test_lockorder_checks.rs
8075
./lightning/src/util/atomic_counter.rs
8176
./lightning/src/util/base32.rs
8277
./lightning/src/util/byte_utils.rs

0 commit comments

Comments
 (0)