Skip to content

Commit d101794

Browse files
committed
remove inlined lazy::Waiter in favor of sync::Once
1 parent d1263f5 commit d101794

File tree

2 files changed

+29
-129
lines changed

2 files changed

+29
-129
lines changed

src/libstd/lazy.rs

Lines changed: 15 additions & 125 deletions
Original file line numberDiff line numberDiff line change
@@ -3,12 +3,10 @@
33
use crate::{
44
cell::{Cell, UnsafeCell},
55
fmt,
6-
marker::PhantomData,
76
mem::{self, MaybeUninit},
87
ops::{Deref, Drop},
98
panic::{RefUnwindSafe, UnwindSafe},
10-
sync::atomic::{AtomicBool, AtomicUsize, Ordering},
11-
thread::{self, Thread},
9+
sync::Once,
1210
};
1311

1412
#[doc(inline)]
@@ -42,10 +40,7 @@ pub use core::lazy::*;
4240
/// ```
4341
#[unstable(feature = "once_cell", issue = "68198")]
4442
pub struct SyncOnceCell<T> {
45-
// This `state` word is actually an encoded version of just a pointer to a
46-
// `Waiter`, so we add the `PhantomData` appropriately.
47-
state_and_queue: AtomicUsize,
48-
_marker: PhantomData<*mut Waiter>,
43+
once: Once,
4944
// Whether or not the value is initialized is tracked by `state_and_queue`.
5045
value: UnsafeCell<MaybeUninit<T>>,
5146
}
@@ -122,8 +117,7 @@ impl<T> SyncOnceCell<T> {
122117
#[unstable(feature = "once_cell", issue = "68198")]
123118
pub const fn new() -> SyncOnceCell<T> {
124119
SyncOnceCell {
125-
state_and_queue: AtomicUsize::new(INCOMPLETE),
126-
_marker: PhantomData,
120+
once: Once::new(),
127121
value: UnsafeCell::new(MaybeUninit::uninit()),
128122
}
129123
}
@@ -135,7 +129,7 @@ impl<T> SyncOnceCell<T> {
135129
#[unstable(feature = "once_cell", issue = "68198")]
136130
pub fn get(&self) -> Option<&T> {
137131
if self.is_initialized() {
138-
// Safe b/c checked is_initialize
132+
// Safe b/c checked is_initialized
139133
Some(unsafe { self.get_unchecked() })
140134
} else {
141135
None
@@ -148,7 +142,7 @@ impl<T> SyncOnceCell<T> {
148142
#[unstable(feature = "once_cell", issue = "68198")]
149143
pub fn get_mut(&mut self) -> Option<&mut T> {
150144
if self.is_initialized() {
151-
// Safe b/c checked is_initialize and we have a unique access
145+
// Safe b/c checked is_initialized and we have a unique access
152146
Some(unsafe { self.get_unchecked_mut() })
153147
} else {
154148
None
@@ -350,37 +344,32 @@ impl<T> SyncOnceCell<T> {
350344
}
351345
}
352346

353-
/// Safety: synchronizes with store to value via Release/(Acquire|SeqCst).
354347
#[inline]
355348
fn is_initialized(&self) -> bool {
356-
// An `Acquire` load is enough because that makes all the initialization
357-
// operations visible to us, and, this being a fast path, weaker
358-
// ordering helps with performance. This `Acquire` synchronizes with
359-
// `SeqCst` operations on the slow path.
360-
self.state_and_queue.load(Ordering::Acquire) == COMPLETE
349+
self.once.is_completed()
361350
}
362351

363-
/// Safety: synchronizes with store to value via SeqCst read from state,
364-
/// writes value only once because we never get to INCOMPLETE state after a
365-
/// successful write.
366352
#[cold]
367353
fn initialize<F, E>(&self, f: F) -> Result<(), E>
368354
where
369355
F: FnOnce() -> Result<T, E>,
370356
{
371-
let mut f = Some(f);
372357
let mut res: Result<(), E> = Ok(());
373358
let slot = &self.value;
374-
initialize_inner(&self.state_and_queue, &mut || {
375-
let f = f.take().unwrap();
359+
360+
// Ignore poisoning from other threads
361+
// If another thread panics, then we'll be able to run our closure
362+
self.once.call_once_force(|p| {
376363
match f() {
377364
Ok(value) => {
378365
unsafe { (&mut *slot.get()).write(value) };
379-
true
380366
}
381367
Err(e) => {
382368
res = Err(e);
383-
false
369+
370+
// Treat the underlying `Once` as poisoned since we
371+
// failed to initialize our value. Calls
372+
p.poison();
384373
}
385374
}
386375
});
@@ -407,106 +396,6 @@ impl<T> Drop for SyncOnceCell<T> {
407396
}
408397
}
409398

410-
const INCOMPLETE: usize = 0x0;
411-
const RUNNING: usize = 0x1;
412-
const COMPLETE: usize = 0x2;
413-
414-
const STATE_MASK: usize = 0x3;
415-
416-
// The alignment here is so that we can stash the state in the lower
417-
// bits of the `next` pointer
418-
#[repr(align(4))]
419-
struct Waiter {
420-
thread: Cell<Option<Thread>>,
421-
signaled: AtomicBool,
422-
next: *const Waiter,
423-
}
424-
425-
struct WaiterQueue<'a> {
426-
state_and_queue: &'a AtomicUsize,
427-
set_state_on_drop_to: usize,
428-
}
429-
430-
impl Drop for WaiterQueue<'_> {
431-
fn drop(&mut self) {
432-
let state_and_queue =
433-
self.state_and_queue.swap(self.set_state_on_drop_to, Ordering::AcqRel);
434-
435-
assert_eq!(state_and_queue & STATE_MASK, RUNNING);
436-
437-
unsafe {
438-
let mut queue = (state_and_queue & !STATE_MASK) as *const Waiter;
439-
while !queue.is_null() {
440-
let next = (*queue).next;
441-
let thread = (*queue).thread.replace(None).unwrap();
442-
(*queue).signaled.store(true, Ordering::Release);
443-
queue = next;
444-
thread.unpark();
445-
}
446-
}
447-
}
448-
}
449-
450-
fn initialize_inner(my_state_and_queue: &AtomicUsize, init: &mut dyn FnMut() -> bool) -> bool {
451-
let mut state_and_queue = my_state_and_queue.load(Ordering::Acquire);
452-
453-
loop {
454-
match state_and_queue {
455-
COMPLETE => return true,
456-
INCOMPLETE => {
457-
let old = my_state_and_queue.compare_and_swap(
458-
state_and_queue,
459-
RUNNING,
460-
Ordering::Acquire,
461-
);
462-
if old != state_and_queue {
463-
state_and_queue = old;
464-
continue;
465-
}
466-
let mut waiter_queue = WaiterQueue {
467-
state_and_queue: my_state_and_queue,
468-
set_state_on_drop_to: INCOMPLETE,
469-
};
470-
let success = init();
471-
472-
waiter_queue.set_state_on_drop_to = if success { COMPLETE } else { INCOMPLETE };
473-
return success;
474-
}
475-
_ => {
476-
assert!(state_and_queue & STATE_MASK == RUNNING);
477-
wait(&my_state_and_queue, state_and_queue);
478-
state_and_queue = my_state_and_queue.load(Ordering::Acquire);
479-
}
480-
}
481-
}
482-
}
483-
484-
fn wait(state_and_queue: &AtomicUsize, mut current_state: usize) {
485-
loop {
486-
if current_state & STATE_MASK != RUNNING {
487-
return;
488-
}
489-
490-
let node = Waiter {
491-
thread: Cell::new(Some(thread::current())),
492-
signaled: AtomicBool::new(false),
493-
next: (current_state & !STATE_MASK) as *const Waiter,
494-
};
495-
let me = &node as *const Waiter as usize;
496-
497-
let old = state_and_queue.compare_and_swap(current_state, me | RUNNING, Ordering::Release);
498-
if old != current_state {
499-
current_state = old;
500-
continue;
501-
}
502-
503-
while !node.signaled.load(Ordering::Acquire) {
504-
thread::park();
505-
}
506-
break;
507-
}
508-
}
509-
510399
/// A value which is initialized on the first access.
511400
///
512401
/// This type is a thread-safe `Lazy`, and can be used in statics.
@@ -763,6 +652,7 @@ mod tests {
763652

764653
let res = panic::catch_unwind(|| cell.get_or_try_init(|| -> Result<_, ()> { panic!() }));
765654
assert!(res.is_err());
655+
assert!(!cell.is_initialized());
766656
assert!(cell.get().is_none());
767657

768658
assert_eq!(cell.get_or_try_init(|| Err(())), Err(()));

src/libstd/sync/once.rs

Lines changed: 14 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -132,6 +132,7 @@ unsafe impl Send for Once {}
132132
#[derive(Debug)]
133133
pub struct OnceState {
134134
poisoned: bool,
135+
set_state_on_drop_to: Cell<usize>,
135136
}
136137

137138
/// Initialization value for static [`Once`] values.
@@ -321,7 +322,7 @@ impl Once {
321322
}
322323

323324
let mut f = Some(f);
324-
self.call_inner(true, &mut |p| f.take().unwrap()(&OnceState { poisoned: p }));
325+
self.call_inner(true, &mut |p| f.take().unwrap()(p));
325326
}
326327

327328
/// Returns `true` if some `call_once` call has completed
@@ -385,7 +386,7 @@ impl Once {
385386
// currently no way to take an `FnOnce` and call it via virtual dispatch
386387
// without some allocation overhead.
387388
#[cold]
388-
fn call_inner(&self, ignore_poisoning: bool, init: &mut dyn FnMut(bool)) {
389+
fn call_inner(&self, ignore_poisoning: bool, init: &mut dyn FnMut(&OnceState)) {
389390
let mut state_and_queue = self.state_and_queue.load(Ordering::Acquire);
390391
loop {
391392
match state_and_queue {
@@ -413,8 +414,9 @@ impl Once {
413414
};
414415
// Run the initialization function, letting it know if we're
415416
// poisoned or not.
416-
init(state_and_queue == POISONED);
417-
waiter_queue.set_state_on_drop_to = COMPLETE;
417+
let init_state = OnceState { poisoned: state_and_queue == POISONED, set_state_on_drop_to: Cell::new(COMPLETE) };
418+
init(&init_state);
419+
waiter_queue.set_state_on_drop_to = init_state.set_state_on_drop_to.get();
418420
break;
419421
}
420422
_ => {
@@ -554,6 +556,14 @@ impl OnceState {
554556
pub fn poisoned(&self) -> bool {
555557
self.poisoned
556558
}
559+
560+
/// Poison the associated [`Once`] without explicitly panicking.
561+
///
562+
/// [`Once`]: struct.Once.html
563+
// NOTE: This is currently only exposed for the `lazy` module
564+
pub(crate) fn poison(&self) {
565+
self.set_state_on_drop_to.set(POISONED);
566+
}
557567
}
558568

559569
#[cfg(all(test, not(target_os = "emscripten")))]

0 commit comments

Comments
 (0)