portable_atomic/imp/fallback/
outline_atomics.rs

1// SPDX-License-Identifier: Apache-2.0 OR MIT
2
3/*
4Helper for outline-atomics.
5
6On architectures where DW atomics are not supported on older CPUs, we use
7fallback implementation when DW atomic instructions are not supported and
8outline-atomics is enabled.
9
10This module provides helpers to implement them.
11*/
12
13use core::sync::atomic::Ordering;
14
15#[cfg(any(target_arch = "x86_64", target_arch = "powerpc64", target_arch = "riscv64"))]
16pub(crate) type Udw = u128;
17#[cfg(any(target_arch = "x86_64", target_arch = "powerpc64", target_arch = "riscv64"))]
18pub(crate) type AtomicUdw = super::super::super::fallback::AtomicU128;
19#[cfg(any(target_arch = "x86_64", target_arch = "powerpc64", target_arch = "riscv64"))]
20pub(crate) type AtomicIdw = super::super::super::fallback::AtomicI128;
21
22#[cfg(any(target_arch = "arm", target_arch = "riscv32"))]
23pub(crate) type Udw = u64;
24#[cfg(any(target_arch = "arm", target_arch = "riscv32"))]
25pub(crate) type AtomicUdw = super::super::super::fallback::AtomicU64;
26#[cfg(any(target_arch = "arm", target_arch = "riscv32"))]
27pub(crate) type AtomicIdw = super::super::super::fallback::AtomicI64;
28
29// Asserts that the function is called in the correct context.
30macro_rules! debug_assert_outline_atomics {
31    () => {
32        #[cfg(target_arch = "x86_64")]
33        {
34            debug_assert!(!super::detect::detect().has_cmpxchg16b());
35        }
36        #[cfg(target_arch = "powerpc64")]
37        {
38            debug_assert!(!super::detect::detect().has_quadword_atomics());
39        }
40        #[cfg(any(target_arch = "riscv32", target_arch = "riscv64"))]
41        {
42            debug_assert!(!super::detect::detect().has_zacas());
43        }
44        #[cfg(target_arch = "arm")]
45        {
46            debug_assert!(!super::has_kuser_cmpxchg64());
47        }
48    };
49}
50
51#[cold]
52pub(crate) unsafe fn atomic_load(src: *mut Udw, order: Ordering) -> Udw {
53    debug_assert_outline_atomics!();
54    #[allow(clippy::cast_ptr_alignment)]
55    // SAFETY: the caller must uphold the safety contract.
56    unsafe {
57        (*(src as *const AtomicUdw)).load(order)
58    }
59}
60fn_alias! {
61    #[cold]
62    pub(crate) unsafe fn(src: *mut Udw) -> Udw;
63    // fallback's atomic load has at least acquire semantics.
64    #[cfg(not(any(target_arch = "arm", target_arch = "x86_64")))]
65    atomic_load_non_seqcst = atomic_load(Ordering::Acquire);
66    atomic_load_seqcst = atomic_load(Ordering::SeqCst);
67}
68
69#[cfg(not(any(target_arch = "riscv32", target_arch = "riscv64")))]
70#[cold]
71pub(crate) unsafe fn atomic_store(dst: *mut Udw, val: Udw, order: Ordering) {
72    debug_assert_outline_atomics!();
73    #[allow(clippy::cast_ptr_alignment)]
74    // SAFETY: the caller must uphold the safety contract.
75    unsafe {
76        (*(dst as *const AtomicUdw)).store(val, order);
77    }
78}
79#[cfg(not(any(target_arch = "riscv32", target_arch = "riscv64")))]
80fn_alias! {
81    #[cold]
82    pub(crate) unsafe fn(dst: *mut Udw, val: Udw);
83    // fallback's atomic store has at least release semantics.
84    #[cfg(not(target_arch = "arm"))]
85    atomic_store_non_seqcst = atomic_store(Ordering::Release);
86    atomic_store_seqcst = atomic_store(Ordering::SeqCst);
87}
88
89#[cold]
90pub(crate) unsafe fn atomic_compare_exchange(
91    dst: *mut Udw,
92    old: Udw,
93    new: Udw,
94    success: Ordering,
95    failure: Ordering,
96) -> (Udw, bool) {
97    debug_assert_outline_atomics!();
98    #[allow(clippy::cast_ptr_alignment)]
99    // SAFETY: the caller must uphold the safety contract.
100    unsafe {
101        match (*(dst as *const AtomicUdw)).compare_exchange(old, new, success, failure) {
102            Ok(v) => (v, true),
103            Err(v) => (v, false),
104        }
105    }
106}
107fn_alias! {
108    #[cold]
109    pub(crate) unsafe fn(dst: *mut Udw, old: Udw, new: Udw) -> (Udw, bool);
110    // fallback's atomic CAS has at least AcqRel semantics.
111    #[cfg(not(any(target_arch = "arm", target_arch = "x86_64")))]
112    atomic_compare_exchange_non_seqcst
113        = atomic_compare_exchange(Ordering::AcqRel, Ordering::Acquire);
114    atomic_compare_exchange_seqcst
115        = atomic_compare_exchange(Ordering::SeqCst, Ordering::SeqCst);
116}
117
118macro_rules! atomic_rmw_3 {
119    (
120        $name:ident($atomic_type:ident::$method_name:ident),
121        $non_seqcst_alias:ident, $seqcst_alias:ident
122    ) => {
123        #[cold]
124        pub(crate) unsafe fn $name(dst: *mut Udw, val: Udw, order: Ordering) -> Udw {
125            debug_assert_outline_atomics!();
126            #[allow(
127                clippy::as_underscore,
128                clippy::cast_possible_wrap,
129                clippy::cast_ptr_alignment,
130                clippy::cast_sign_loss
131            )]
132            // SAFETY: the caller must uphold the safety contract.
133            unsafe {
134                (*(dst as *const $atomic_type)).$method_name(val as _, order) as Udw
135            }
136        }
137        fn_alias! {
138            #[cold]
139            pub(crate) unsafe fn(dst: *mut Udw, val: Udw) -> Udw;
140            // fallback's atomic RMW has at least AcqRel semantics.
141            #[cfg(not(any(target_arch = "arm", target_arch = "x86_64")))]
142            $non_seqcst_alias = $name(Ordering::AcqRel);
143            $seqcst_alias = $name(Ordering::SeqCst);
144        }
145    };
146}
147macro_rules! atomic_rmw_2 {
148    (
149        $name:ident($atomic_type:ident::$method_name:ident),
150        $non_seqcst_alias:ident, $seqcst_alias:ident
151    ) => {
152        #[cold]
153        pub(crate) unsafe fn $name(dst: *mut Udw, order: Ordering) -> Udw {
154            debug_assert_outline_atomics!();
155            #[allow(clippy::cast_ptr_alignment)]
156            // SAFETY: the caller must uphold the safety contract.
157            unsafe {
158                (*(dst as *const $atomic_type)).$method_name(order) as Udw
159            }
160        }
161        fn_alias! {
162            #[cold]
163            pub(crate) unsafe fn(dst: *mut Udw) -> Udw;
164            // fallback's atomic RMW has at least AcqRel semantics.
165            #[cfg(not(any(target_arch = "arm", target_arch = "x86_64")))]
166            $non_seqcst_alias = $name(Ordering::AcqRel);
167            $seqcst_alias = $name(Ordering::SeqCst);
168        }
169    };
170}
171
172atomic_rmw_3!(atomic_swap(AtomicUdw::swap), atomic_swap_non_seqcst, atomic_swap_seqcst);
173atomic_rmw_3!(atomic_add(AtomicUdw::fetch_add), atomic_add_non_seqcst, atomic_add_seqcst);
174atomic_rmw_3!(atomic_sub(AtomicUdw::fetch_sub), atomic_sub_non_seqcst, atomic_sub_seqcst);
175atomic_rmw_3!(atomic_and(AtomicUdw::fetch_and), atomic_and_non_seqcst, atomic_and_seqcst);
176atomic_rmw_3!(atomic_nand(AtomicUdw::fetch_nand), atomic_nand_non_seqcst, atomic_nand_seqcst);
177atomic_rmw_3!(atomic_or(AtomicUdw::fetch_or), atomic_or_non_seqcst, atomic_or_seqcst);
178atomic_rmw_3!(atomic_xor(AtomicUdw::fetch_xor), atomic_xor_non_seqcst, atomic_xor_seqcst);
179atomic_rmw_3!(atomic_max(AtomicIdw::fetch_max), atomic_max_non_seqcst, atomic_max_seqcst);
180atomic_rmw_3!(atomic_umax(AtomicUdw::fetch_max), atomic_umax_non_seqcst, atomic_umax_seqcst);
181atomic_rmw_3!(atomic_min(AtomicIdw::fetch_min), atomic_min_non_seqcst, atomic_min_seqcst);
182atomic_rmw_3!(atomic_umin(AtomicUdw::fetch_min), atomic_umin_non_seqcst, atomic_umin_seqcst);
183
184atomic_rmw_2!(atomic_not(AtomicUdw::fetch_not), atomic_not_non_seqcst, atomic_not_seqcst);
185atomic_rmw_2!(atomic_neg(AtomicUdw::fetch_neg), atomic_neg_non_seqcst, atomic_neg_seqcst);