1use std::{
3 alloc::{self, Layout},
4 cmp::Ordering,
5 hash::{Hash, Hasher},
6 marker::PhantomData,
7 mem::{self, offset_of, ManuallyDrop},
8 ops::Deref,
9 ptr,
10 sync::atomic::{
11 self,
12 Ordering::{Acquire, Relaxed, Release},
13 },
14};
15
16const MAX_REFCOUNT: usize = (isize::MAX) as usize;
21
22#[repr(C)]
24pub(crate) struct ArcInner<T: ?Sized> {
25 pub(crate) count: atomic::AtomicUsize,
26 pub(crate) data: T,
27}
28
29unsafe impl<T: ?Sized + Sync + Send> Send for ArcInner<T> {}
30unsafe impl<T: ?Sized + Sync + Send> Sync for ArcInner<T> {}
31
32#[repr(transparent)]
39pub(crate) struct Arc<T: ?Sized> {
40 pub(crate) p: ptr::NonNull<ArcInner<T>>,
41 pub(crate) phantom: PhantomData<T>,
42}
43
44unsafe impl<T: ?Sized + Sync + Send> Send for Arc<T> {}
45unsafe impl<T: ?Sized + Sync + Send> Sync for Arc<T> {}
46
47impl<T> Arc<T> {
48 #[inline]
55 pub(crate) unsafe fn from_raw(ptr: *const T) -> Self {
56 let ptr = (ptr as *const u8).sub(offset_of!(ArcInner<T>, data));
59 Arc { p: ptr::NonNull::new_unchecked(ptr as *mut ArcInner<T>), phantom: PhantomData }
60 }
61}
62
63impl<T: ?Sized> Arc<T> {
64 #[inline]
65 fn inner(&self) -> &ArcInner<T> {
66 unsafe { &*self.ptr() }
72 }
73
74 #[inline(never)]
76 unsafe fn drop_slow(&mut self) {
77 let _ = Box::from_raw(self.ptr());
78 }
79
80 #[inline]
83 pub(crate) fn ptr_eq(this: &Self, other: &Self) -> bool {
84 std::ptr::addr_eq(this.ptr(), other.ptr())
85 }
86
87 pub(crate) fn ptr(&self) -> *mut ArcInner<T> {
88 self.p.as_ptr()
89 }
90}
91
92impl<T: ?Sized> Clone for Arc<T> {
93 #[inline]
94 fn clone(&self) -> Self {
95 let old_size = self.inner().count.fetch_add(1, Relaxed);
107
108 if old_size > MAX_REFCOUNT {
118 std::process::abort();
119 }
120
121 unsafe { Arc { p: ptr::NonNull::new_unchecked(self.ptr()), phantom: PhantomData } }
122 }
123}
124
125impl<T: ?Sized> Deref for Arc<T> {
126 type Target = T;
127
128 #[inline]
129 fn deref(&self) -> &T {
130 &self.inner().data
131 }
132}
133
134impl<T: ?Sized> Arc<T> {
135 #[inline]
137 pub(crate) fn get_mut(this: &mut Self) -> Option<&mut T> {
138 if this.is_unique() {
139 unsafe {
140 Some(&mut (*this.ptr()).data)
142 }
143 } else {
144 None
145 }
146 }
147
148 pub(crate) fn is_unique(&self) -> bool {
150 self.inner().count.load(Acquire) == 1
154 }
155}
156
157impl<T: ?Sized> Drop for Arc<T> {
158 #[inline]
159 fn drop(&mut self) {
160 if self.inner().count.fetch_sub(1, Release) != 1 {
163 return;
164 }
165
166 self.inner().count.load(Acquire);
187
188 unsafe {
189 self.drop_slow();
190 }
191 }
192}
193
194impl<T: ?Sized + PartialEq> PartialEq for Arc<T> {
195 fn eq(&self, other: &Arc<T>) -> bool {
196 Self::ptr_eq(self, other) || *(*self) == *(*other)
197 }
198
199 fn ne(&self, other: &Arc<T>) -> bool {
200 !Self::ptr_eq(self, other) && *(*self) != *(*other)
201 }
202}
203
204impl<T: ?Sized + PartialOrd> PartialOrd for Arc<T> {
205 fn partial_cmp(&self, other: &Arc<T>) -> Option<Ordering> {
206 (**self).partial_cmp(&**other)
207 }
208
209 fn lt(&self, other: &Arc<T>) -> bool {
210 *(*self) < *(*other)
211 }
212
213 fn le(&self, other: &Arc<T>) -> bool {
214 *(*self) <= *(*other)
215 }
216
217 fn gt(&self, other: &Arc<T>) -> bool {
218 *(*self) > *(*other)
219 }
220
221 fn ge(&self, other: &Arc<T>) -> bool {
222 *(*self) >= *(*other)
223 }
224}
225
226impl<T: ?Sized + Ord> Ord for Arc<T> {
227 fn cmp(&self, other: &Arc<T>) -> Ordering {
228 (**self).cmp(&**other)
229 }
230}
231
232impl<T: ?Sized + Eq> Eq for Arc<T> {}
233
234impl<T: ?Sized + Hash> Hash for Arc<T> {
235 fn hash<H: Hasher>(&self, state: &mut H) {
236 (**self).hash(state)
237 }
238}
239
240#[derive(Debug, Eq, PartialEq, Hash, PartialOrd)]
241#[repr(C)]
242pub(crate) struct HeaderSlice<H, T: ?Sized> {
243 pub(crate) header: H,
244 length: usize,
245 slice: T,
246}
247
248impl<H, T> HeaderSlice<H, [T]> {
249 pub(crate) fn slice(&self) -> &[T] {
250 &self.slice
251 }
252}
253
254impl<H, T> Deref for HeaderSlice<H, [T; 0]> {
255 type Target = HeaderSlice<H, [T]>;
256
257 fn deref(&self) -> &Self::Target {
258 let len = self.length;
259 let fake_slice: *const [T] = ptr::slice_from_raw_parts(self as *const _ as *const T, len);
260 unsafe { &*(fake_slice as *const HeaderSlice<H, [T]>) }
261 }
262}
263
264#[repr(transparent)]
280pub(crate) struct ThinArc<H, T> {
281 ptr: ptr::NonNull<ArcInner<HeaderSlice<H, [T; 0]>>>,
282 phantom: PhantomData<(H, T)>,
283}
284
285unsafe impl<H: Sync + Send, T: Sync + Send> Send for ThinArc<H, T> {}
286unsafe impl<H: Sync + Send, T: Sync + Send> Sync for ThinArc<H, T> {}
287
288fn thin_to_thick<H, T>(
290 thin: *mut ArcInner<HeaderSlice<H, [T; 0]>>,
291) -> *mut ArcInner<HeaderSlice<H, [T]>> {
292 let len = unsafe { (*thin).data.length };
293 let fake_slice: *mut [T] = ptr::slice_from_raw_parts_mut(thin as *mut T, len);
294 fake_slice as *mut ArcInner<HeaderSlice<H, [T]>>
296}
297
298impl<H, T> ThinArc<H, T> {
299 #[inline]
302 pub(crate) fn with_arc<F, U>(&self, f: F) -> U
303 where
304 F: FnOnce(&Arc<HeaderSlice<H, [T]>>) -> U,
305 {
306 let transient = unsafe {
308 ManuallyDrop::new(Arc {
309 p: ptr::NonNull::new_unchecked(thin_to_thick(self.ptr.as_ptr())),
310 phantom: PhantomData,
311 })
312 };
313
314 let result = f(&transient);
316
317 result
319 }
320
321 pub(crate) fn from_header_and_iter<I>(header: H, mut items: I) -> Self
324 where
325 I: Iterator<Item = T> + ExactSizeIterator,
326 {
327 assert_ne!(mem::size_of::<T>(), 0, "Need to think about ZST");
328
329 let num_items = items.len();
330
331 let inner_to_data_offset = offset_of!(ArcInner<HeaderSlice<H, [T; 0]>>, data);
333 let data_to_slice_offset = offset_of!(HeaderSlice<H, [T; 0]>, slice);
334 let slice_offset = inner_to_data_offset + data_to_slice_offset;
335
336 let slice_size = mem::size_of::<T>().checked_mul(num_items).expect("size overflows");
338 let usable_size = slice_offset.checked_add(slice_size).expect("size overflows");
339
340 let align = mem::align_of::<ArcInner<HeaderSlice<H, [T; 0]>>>();
342 let size = usable_size.wrapping_add(align - 1) & !(align - 1);
343 assert!(size >= usable_size, "size overflows");
344 let layout = Layout::from_size_align(size, align).expect("invalid layout");
345
346 let ptr: *mut ArcInner<HeaderSlice<H, [T; 0]>>;
347 unsafe {
348 let buffer = alloc::alloc(layout);
349
350 if buffer.is_null() {
351 alloc::handle_alloc_error(layout);
352 }
353
354 ptr = buffer as *mut _;
363
364 let count = atomic::AtomicUsize::new(1);
365
366 ptr::write(ptr::addr_of_mut!((*ptr).count), count);
371 ptr::write(ptr::addr_of_mut!((*ptr).data.header), header);
372 ptr::write(ptr::addr_of_mut!((*ptr).data.length), num_items);
373 if num_items != 0 {
374 let mut current = ptr::addr_of_mut!((*ptr).data.slice) as *mut T;
375 debug_assert_eq!(current as usize - buffer as usize, slice_offset);
376 for _ in 0..num_items {
377 ptr::write(
378 current,
379 items.next().expect("ExactSizeIterator over-reported length"),
380 );
381 current = current.offset(1);
382 }
383 assert!(items.next().is_none(), "ExactSizeIterator under-reported length");
384
385 debug_assert_eq!(current as *mut u8, buffer.add(usable_size));
387 }
388 assert!(items.next().is_none(), "ExactSizeIterator under-reported length");
389 }
390
391 ThinArc { ptr: unsafe { ptr::NonNull::new_unchecked(ptr) }, phantom: PhantomData }
392 }
393}
394
395impl<H, T> Deref for ThinArc<H, T> {
396 type Target = HeaderSlice<H, [T]>;
397
398 #[inline]
399 fn deref(&self) -> &Self::Target {
400 unsafe { &(*thin_to_thick(self.ptr.as_ptr())).data }
401 }
402}
403
404impl<H, T> Clone for ThinArc<H, T> {
405 #[inline]
406 fn clone(&self) -> Self {
407 ThinArc::with_arc(self, |a| Arc::into_thin(a.clone()))
408 }
409}
410
411impl<H, T> Drop for ThinArc<H, T> {
412 #[inline]
413 fn drop(&mut self) {
414 let _ = Arc::from_thin(ThinArc { ptr: self.ptr, phantom: PhantomData });
415 }
416}
417
418impl<H, T> Arc<HeaderSlice<H, [T]>> {
419 #[inline]
422 pub(crate) fn into_thin(a: Self) -> ThinArc<H, T> {
423 assert_eq!(a.length, a.slice.len(), "Length needs to be correct for ThinArc to work");
424 let fat_ptr: *mut ArcInner<HeaderSlice<H, [T]>> = a.ptr();
425 mem::forget(a);
426 let thin_ptr = fat_ptr as *mut [usize] as *mut usize;
427 ThinArc {
428 ptr: unsafe {
429 ptr::NonNull::new_unchecked(thin_ptr as *mut ArcInner<HeaderSlice<H, [T; 0]>>)
430 },
431 phantom: PhantomData,
432 }
433 }
434
435 #[inline]
438 pub(crate) fn from_thin(a: ThinArc<H, T>) -> Self {
439 let ptr = thin_to_thick(a.ptr.as_ptr());
440 mem::forget(a);
441 unsafe { Arc { p: ptr::NonNull::new_unchecked(ptr), phantom: PhantomData } }
442 }
443}
444
445impl<H: PartialEq, T: PartialEq> PartialEq for ThinArc<H, T> {
446 #[inline]
447 fn eq(&self, other: &ThinArc<H, T>) -> bool {
448 **self == **other
449 }
450}
451
452impl<H: Eq, T: Eq> Eq for ThinArc<H, T> {}
453
454impl<H: Hash, T: Hash> Hash for ThinArc<H, T> {
455 fn hash<HSR: Hasher>(&self, state: &mut HSR) {
456 (**self).hash(state)
457 }
458}