std\sys\thread_local/
os.rs

1use super::key::{Key, LazyKey, get, set};
2use super::{abort_on_dtor_unwind, guard};
3use crate::alloc::{self, Layout};
4use crate::cell::Cell;
5use crate::marker::PhantomData;
6use crate::mem::ManuallyDrop;
7use crate::ops::Deref;
8use crate::panic::{AssertUnwindSafe, catch_unwind, resume_unwind};
9use crate::ptr::{self, NonNull};
10
11#[doc(hidden)]
12#[allow_internal_unstable(thread_local_internals)]
13#[allow_internal_unsafe]
14#[unstable(feature = "thread_local_internals", issue = "none")]
15#[rustc_macro_transparency = "semitransparent"]
16pub macro thread_local_inner {
17    // NOTE: we cannot import `Storage` or `LocalKey` with a `use` because that can shadow user
18    // provided type or type alias with a matching name. Please update the shadowing test in
19    // `tests/thread.rs` if these types are renamed.
20
21    // used to generate the `LocalKey` value for `thread_local!`.
22    (@key $t:ty, $($(#[$($align_attr:tt)*])+)?, $init:expr) => {{
23        #[inline]
24        fn __rust_std_internal_init_fn() -> $t { $init }
25
26        // NOTE: this cannot import `LocalKey` or `Storage` with a `use` because that can shadow
27        // user provided type or type alias with a matching name. Please update the shadowing test
28        // in `tests/thread.rs` if these types are renamed.
29        unsafe {
30            $crate::thread::LocalKey::new(|__rust_std_internal_init| {
31                static __RUST_STD_INTERNAL_VAL: $crate::thread::local_impl::Storage<$t, {
32                    $({
33                        // Ensure that attributes have valid syntax
34                        // and that the proper feature gate is enabled
35                        $(#[$($align_attr)*])+
36                        #[allow(unused)]
37                        static DUMMY: () = ();
38                    })?
39
40                    #[allow(unused_mut)]
41                    let mut final_align = $crate::thread::local_impl::value_align::<$t>();
42                    $($($crate::thread::local_impl::thread_local_inner!(@align final_align, $($align_attr)*);)+)?
43                    final_align
44                }>
45                    = $crate::thread::local_impl::Storage::new();
46                __RUST_STD_INTERNAL_VAL.get(__rust_std_internal_init, __rust_std_internal_init_fn)
47            })
48        }
49    }},
50
51    // process a single `rustc_align_static` attribute
52    (@align $final_align:ident, rustc_align_static($($align:tt)*) $(, $($attr_rest:tt)+)?) => {
53        let new_align: $crate::primitive::usize = $($align)*;
54        if new_align > $final_align {
55            $final_align = new_align;
56        }
57
58        $($crate::thread::local_impl::thread_local_inner!(@align $final_align, $($attr_rest)+);)?
59    },
60
61    // process a single `cfg_attr` attribute
62    // by translating it into a `cfg`ed block and recursing.
63    // https://doc.rust-lang.org/reference/conditional-compilation.html#railroad-ConfigurationPredicate
64
65    (@align $final_align:ident, cfg_attr(true, $($cfg_rhs:tt)*) $(, $($attr_rest:tt)+)?) => {
66        #[cfg(true)]
67        {
68            $crate::thread::local_impl::thread_local_inner!(@align $final_align, $($cfg_rhs)*);
69        }
70
71        $($crate::thread::local_impl::thread_local_inner!(@align $final_align, $($attr_rest)+);)?
72    },
73
74    (@align $final_align:ident, cfg_attr(false, $($cfg_rhs:tt)*) $(, $($attr_rest:tt)+)?) => {
75        #[cfg(false)]
76        {
77            $crate::thread::local_impl::thread_local_inner!(@align $final_align, $($cfg_rhs)*);
78        }
79
80        $($crate::thread::local_impl::thread_local_inner!(@align $final_align, $($attr_rest)+);)?
81    },
82
83    (@align $final_align:ident, cfg_attr($cfg_pred:meta, $($cfg_rhs:tt)*) $(, $($attr_rest:tt)+)?) => {
84        #[cfg($cfg_pred)]
85        {
86            $crate::thread::local_impl::thread_local_inner!(@align $final_align, $($cfg_rhs)*);
87        }
88
89        $($crate::thread::local_impl::thread_local_inner!(@align $final_align, $($attr_rest)+);)?
90    },
91}
92
93/// Use a regular global static to store this key; the state provided will then be
94/// thread-local.
95/// INVARIANT: ALIGN must be a valid alignment, and no less than `value_align::<T>`.
96#[allow(missing_debug_implementations)]
97pub struct Storage<T, const ALIGN: usize> {
98    key: LazyKey,
99    marker: PhantomData<Cell<T>>,
100}
101
102unsafe impl<T, const ALIGN: usize> Sync for Storage<T, ALIGN> {}
103
104#[repr(C)]
105struct Value<T: 'static> {
106    // This field must be first, for correctness of `#[rustc_align_static]`
107    value: T,
108    // INVARIANT: if this value is stored under a TLS key, `key` must be that `key`.
109    key: Key,
110}
111
112pub const fn value_align<T: 'static>() -> usize {
113    crate::mem::align_of::<Value<T>>()
114}
115
116/// Equivalent to `Box<Value<T>>`, but potentially over-aligned.
117struct AlignedBox<T: 'static, const ALIGN: usize> {
118    ptr: NonNull<Value<T>>,
119}
120
121impl<T: 'static, const ALIGN: usize> AlignedBox<T, ALIGN> {
122    #[inline]
123    fn new(v: Value<T>) -> Self {
124        let layout = Layout::new::<Value<T>>().align_to(ALIGN).unwrap();
125
126        let ptr: *mut Value<T> = (unsafe { alloc::alloc(layout) }).cast();
127        let Some(ptr) = NonNull::new(ptr) else {
128            alloc::handle_alloc_error(layout);
129        };
130        unsafe { ptr.write(v) };
131        Self { ptr }
132    }
133
134    #[inline]
135    fn into_raw(b: Self) -> *mut Value<T> {
136        let md = ManuallyDrop::new(b);
137        md.ptr.as_ptr()
138    }
139
140    #[inline]
141    unsafe fn from_raw(ptr: *mut Value<T>) -> Self {
142        Self { ptr: unsafe { NonNull::new_unchecked(ptr) } }
143    }
144}
145
146impl<T: 'static, const ALIGN: usize> Deref for AlignedBox<T, ALIGN> {
147    type Target = Value<T>;
148
149    #[inline]
150    fn deref(&self) -> &Self::Target {
151        unsafe { &*(self.ptr.as_ptr()) }
152    }
153}
154
155impl<T: 'static, const ALIGN: usize> Drop for AlignedBox<T, ALIGN> {
156    #[inline]
157    fn drop(&mut self) {
158        let layout = Layout::new::<Value<T>>().align_to(ALIGN).unwrap();
159
160        unsafe {
161            let unwind_result = catch_unwind(AssertUnwindSafe(|| self.ptr.drop_in_place()));
162            alloc::dealloc(self.ptr.as_ptr().cast(), layout);
163            if let Err(payload) = unwind_result {
164                resume_unwind(payload);
165            }
166        }
167    }
168}
169
170impl<T: 'static, const ALIGN: usize> Storage<T, ALIGN> {
171    pub const fn new() -> Storage<T, ALIGN> {
172        Storage { key: LazyKey::new(Some(destroy_value::<T, ALIGN>)), marker: PhantomData }
173    }
174
175    /// Gets a pointer to the TLS value, potentially initializing it with the
176    /// provided parameters. If the TLS variable has been destroyed, a null
177    /// pointer is returned.
178    ///
179    /// The resulting pointer may not be used after reentrant inialialization
180    /// or thread destruction has occurred.
181    pub fn get(&'static self, i: Option<&mut Option<T>>, f: impl FnOnce() -> T) -> *const T {
182        let key = self.key.force();
183        let ptr = unsafe { get(key) as *mut Value<T> };
184        if ptr.addr() > 1 {
185            // SAFETY: the check ensured the pointer is safe (its destructor
186            // is not running) + it is coming from a trusted source (self).
187            unsafe { &(*ptr).value }
188        } else {
189            // SAFETY: trivially correct.
190            unsafe { Self::try_initialize(key, ptr, i, f) }
191        }
192    }
193
194    /// # Safety
195    /// * `key` must be the result of calling `self.key.force()`
196    /// * `ptr` must be the current value associated with `key`.
197    unsafe fn try_initialize(
198        key: Key,
199        ptr: *mut Value<T>,
200        i: Option<&mut Option<T>>,
201        f: impl FnOnce() -> T,
202    ) -> *const T {
203        if ptr.addr() == 1 {
204            // destructor is running
205            return ptr::null();
206        }
207
208        let value = AlignedBox::<T, ALIGN>::new(Value {
209            value: i.and_then(Option::take).unwrap_or_else(f),
210            key,
211        });
212        let ptr = AlignedBox::into_raw(value);
213
214        // SAFETY:
215        // * key came from a `LazyKey` and is thus correct.
216        // * `ptr` is a correct pointer that can be destroyed by the key destructor.
217        // * the value is stored under the key that it contains.
218        let old = unsafe {
219            let old = get(key) as *mut Value<T>;
220            set(key, ptr as *mut u8);
221            old
222        };
223
224        if !old.is_null() {
225            // If the variable was recursively initialized, drop the old value.
226            // SAFETY: We cannot be inside a `LocalKey::with` scope, as the
227            // initializer has already returned and the next scope only starts
228            // after we return the pointer. Therefore, there can be no references
229            // to the old value.
230            drop(unsafe { AlignedBox::<T, ALIGN>::from_raw(old) });
231        }
232
233        // SAFETY: We just created this value above.
234        unsafe { &(*ptr).value }
235    }
236}
237
238unsafe extern "C" fn destroy_value<T: 'static, const ALIGN: usize>(ptr: *mut u8) {
239    // SAFETY:
240    //
241    // The OS TLS ensures that this key contains a null value when this
242    // destructor starts to run. We set it back to a sentinel value of 1 to
243    // ensure that any future calls to `get` for this thread will return
244    // `None`.
245    //
246    // Note that to prevent an infinite loop we reset it back to null right
247    // before we return from the destructor ourselves.
248    abort_on_dtor_unwind(|| {
249        let ptr = unsafe { AlignedBox::<T, ALIGN>::from_raw(ptr as *mut Value<T>) };
250        let key = ptr.key;
251        // SAFETY: `key` is the TLS key `ptr` was stored under.
252        unsafe { set(key, ptr::without_provenance_mut(1)) };
253        drop(ptr);
254        // SAFETY: `key` is the TLS key `ptr` was stored under.
255        unsafe { set(key, ptr::null_mut()) };
256        // Make sure that the runtime cleanup will be performed
257        // after the next round of TLS destruction.
258        guard::enable();
259    });
260}
261
262#[rustc_macro_transparency = "semitransparent"]
263pub(crate) macro local_pointer {
264    () => {},
265    ($vis:vis static $name:ident; $($rest:tt)*) => {
266        $vis static $name: $crate::sys::thread_local::LocalPointer = $crate::sys::thread_local::LocalPointer::__new();
267        $crate::sys::thread_local::local_pointer! { $($rest)* }
268    },
269}
270
271pub(crate) struct LocalPointer {
272    key: LazyKey,
273}
274
275impl LocalPointer {
276    pub const fn __new() -> LocalPointer {
277        LocalPointer { key: LazyKey::new(None) }
278    }
279
280    pub fn get(&'static self) -> *mut () {
281        unsafe { get(self.key.force()) as *mut () }
282    }
283
284    pub fn set(&'static self, p: *mut ()) {
285        unsafe { set(self.key.force(), p as *mut u8) }
286    }
287}