1 // Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 // SPDX-License-Identifier: Apache-2.0 OR BSD-3-Clause
3 
4 use std::sync::atomic::Ordering;
5 
6 /// # Safety
7 ///
8 /// Objects that implement this trait must consist exclusively of atomic types
9 /// from [`std::sync::atomic`](https://doc.rust-lang.org/std/sync/atomic/), except for
10 /// [`AtomicPtr<T>`](https://doc.rust-lang.org/std/sync/atomic/struct.AtomicPtr.html) and
11 /// [`AtomicBool`](https://doc.rust-lang.org/std/sync/atomic/struct.AtomicBool.html).
12 pub unsafe trait AtomicInteger: Sync + Send {
13     /// The raw value type associated with the atomic integer (i.e. `u16` for `AtomicU16`).
14     type V;
15 
16     /// Create a new instance of `Self`.
new(v: Self::V) -> Self17     fn new(v: Self::V) -> Self;
18 
19     /// Loads a value from the atomic integer.
load(&self, order: Ordering) -> Self::V20     fn load(&self, order: Ordering) -> Self::V;
21 
22     /// Stores a value into the atomic integer.
store(&self, val: Self::V, order: Ordering)23     fn store(&self, val: Self::V, order: Ordering);
24 }
25 
26 macro_rules! impl_atomic_integer_ops {
27     ($T:path, $V:ty) => {
28         // SAFETY: This is safe as long as T is an Atomic type.
29         // This is a helper macro for generating the implementation for common
30         // Atomic types.
31         unsafe impl AtomicInteger for $T {
32             type V = $V;
33 
34             fn new(v: Self::V) -> Self {
35                 Self::new(v)
36             }
37 
38             fn load(&self, order: Ordering) -> Self::V {
39                 self.load(order)
40             }
41 
42             fn store(&self, val: Self::V, order: Ordering) {
43                 self.store(val, order)
44             }
45         }
46     };
47 }
48 
49 // TODO: Detect availability using #[cfg(target_has_atomic) when it is stabilized.
50 // Right now we essentially assume we're running on either x86 or Arm (32 or 64 bit). AFAIK,
51 // Rust starts using additional synchronization primitives to implement atomics when they're
52 // not natively available, and that doesn't interact safely with how we cast pointers to
53 // atomic value references. We should be wary of this when looking at a broader range of
54 // platforms.
55 
56 impl_atomic_integer_ops!(std::sync::atomic::AtomicI8, i8);
57 impl_atomic_integer_ops!(std::sync::atomic::AtomicI16, i16);
58 impl_atomic_integer_ops!(std::sync::atomic::AtomicI32, i32);
59 #[cfg(any(
60     target_arch = "x86_64",
61     target_arch = "aarch64",
62     target_arch = "powerpc64",
63     target_arch = "s390x"
64 ))]
65 impl_atomic_integer_ops!(std::sync::atomic::AtomicI64, i64);
66 
67 impl_atomic_integer_ops!(std::sync::atomic::AtomicU8, u8);
68 impl_atomic_integer_ops!(std::sync::atomic::AtomicU16, u16);
69 impl_atomic_integer_ops!(std::sync::atomic::AtomicU32, u32);
70 #[cfg(any(
71     target_arch = "x86_64",
72     target_arch = "aarch64",
73     target_arch = "powerpc64",
74     target_arch = "s390x"
75 ))]
76 impl_atomic_integer_ops!(std::sync::atomic::AtomicU64, u64);
77 
78 impl_atomic_integer_ops!(std::sync::atomic::AtomicIsize, isize);
79 impl_atomic_integer_ops!(std::sync::atomic::AtomicUsize, usize);
80 
81 #[cfg(test)]
82 mod tests {
83     use super::*;
84 
85     use std::fmt::Debug;
86     use std::sync::atomic::AtomicU32;
87 
check_atomic_integer_ops<A: AtomicInteger>() where A::V: Copy + Debug + From<u8> + PartialEq,88     fn check_atomic_integer_ops<A: AtomicInteger>()
89     where
90         A::V: Copy + Debug + From<u8> + PartialEq,
91     {
92         let v = A::V::from(0);
93         let a = A::new(v);
94         assert_eq!(a.load(Ordering::Relaxed), v);
95 
96         let v2 = A::V::from(100);
97         a.store(v2, Ordering::Relaxed);
98         assert_eq!(a.load(Ordering::Relaxed), v2);
99     }
100 
101     #[test]
test_atomic_integer_ops()102     fn test_atomic_integer_ops() {
103         check_atomic_integer_ops::<AtomicU32>()
104     }
105 }
106