Skip to content

Commit 6837b81

Browse files
committed
Fix some Arc allocator leaks
This doesn't matter for the stable `Global` allocator as it is a ZST singleton, but other allocators may rely on all instances being dropped.
1 parent 6351247 commit 6837b81

File tree

2 files changed

+80
-29
lines changed

2 files changed

+80
-29
lines changed

library/alloc/src/sync.rs

+20-24
Original file line numberDiff line numberDiff line change
@@ -279,6 +279,12 @@ impl<T: ?Sized> Arc<T> {
279279
}
280280

281281
impl<T: ?Sized, A: Allocator> Arc<T, A> {
282+
#[inline]
283+
fn internal_into_inner_with_allocator(self) -> (NonNull<ArcInner<T>>, A) {
284+
let this = mem::ManuallyDrop::new(self);
285+
(this.ptr, unsafe { ptr::read(&this.alloc) })
286+
}
287+
282288
#[inline]
283289
unsafe fn from_inner_in(ptr: NonNull<ArcInner<T>>, alloc: A) -> Self {
284290
Self { ptr, phantom: PhantomData, alloc }
@@ -1271,12 +1277,9 @@ impl<T, A: Allocator> Arc<mem::MaybeUninit<T>, A> {
12711277
#[unstable(feature = "new_uninit", issue = "63291")]
12721278
#[must_use = "`self` will be dropped if the result is not used"]
12731279
#[inline]
1274-
pub unsafe fn assume_init(self) -> Arc<T, A>
1275-
where
1276-
A: Clone,
1277-
{
1278-
let md_self = mem::ManuallyDrop::new(self);
1279-
unsafe { Arc::from_inner_in(md_self.ptr.cast(), md_self.alloc.clone()) }
1280+
pub unsafe fn assume_init(self) -> Arc<T, A> {
1281+
let (ptr, alloc) = self.internal_into_inner_with_allocator();
1282+
unsafe { Arc::from_inner_in(ptr.cast(), alloc) }
12801283
}
12811284
}
12821285

@@ -1316,12 +1319,9 @@ impl<T, A: Allocator> Arc<[mem::MaybeUninit<T>], A> {
13161319
#[unstable(feature = "new_uninit", issue = "63291")]
13171320
#[must_use = "`self` will be dropped if the result is not used"]
13181321
#[inline]
1319-
pub unsafe fn assume_init(self) -> Arc<[T], A>
1320-
where
1321-
A: Clone,
1322-
{
1323-
let md_self = mem::ManuallyDrop::new(self);
1324-
unsafe { Arc::from_ptr_in(md_self.ptr.as_ptr() as _, md_self.alloc.clone()) }
1322+
pub unsafe fn assume_init(self) -> Arc<[T], A> {
1323+
let (ptr, alloc) = self.internal_into_inner_with_allocator();
1324+
unsafe { Arc::from_ptr_in(ptr.as_ptr() as _, alloc) }
13251325
}
13261326
}
13271327

@@ -2409,7 +2409,7 @@ unsafe impl<#[may_dangle] T: ?Sized, A: Allocator> Drop for Arc<T, A> {
24092409
}
24102410
}
24112411

2412-
impl<A: Allocator + Clone> Arc<dyn Any + Send + Sync, A> {
2412+
impl<A: Allocator> Arc<dyn Any + Send + Sync, A> {
24132413
/// Attempt to downcast the `Arc<dyn Any + Send + Sync>` to a concrete type.
24142414
///
24152415
/// # Examples
@@ -2436,10 +2436,8 @@ impl<A: Allocator + Clone> Arc<dyn Any + Send + Sync, A> {
24362436
{
24372437
if (*self).is::<T>() {
24382438
unsafe {
2439-
let ptr = self.ptr.cast::<ArcInner<T>>();
2440-
let alloc = self.alloc.clone();
2441-
mem::forget(self);
2442-
Ok(Arc::from_inner_in(ptr, alloc))
2439+
let (ptr, alloc) = self.internal_into_inner_with_allocator();
2440+
Ok(Arc::from_inner_in(ptr.cast(), alloc))
24432441
}
24442442
} else {
24452443
Err(self)
@@ -2479,10 +2477,8 @@ impl<A: Allocator + Clone> Arc<dyn Any + Send + Sync, A> {
24792477
T: Any + Send + Sync,
24802478
{
24812479
unsafe {
2482-
let ptr = self.ptr.cast::<ArcInner<T>>();
2483-
let alloc = self.alloc.clone();
2484-
mem::forget(self);
2485-
Arc::from_inner_in(ptr, alloc)
2480+
let (ptr, alloc) = self.internal_into_inner_with_allocator();
2481+
Arc::from_inner_in(ptr.cast(), alloc)
24862482
}
24872483
}
24882484
}
@@ -3438,13 +3434,13 @@ impl From<Arc<str>> for Arc<[u8]> {
34383434
}
34393435

34403436
#[stable(feature = "boxed_slice_try_from", since = "1.43.0")]
3441-
impl<T, A: Allocator + Clone, const N: usize> TryFrom<Arc<[T], A>> for Arc<[T; N], A> {
3437+
impl<T, A: Allocator, const N: usize> TryFrom<Arc<[T], A>> for Arc<[T; N], A> {
34423438
type Error = Arc<[T], A>;
34433439

34443440
fn try_from(boxed_slice: Arc<[T], A>) -> Result<Self, Self::Error> {
34453441
if boxed_slice.len() == N {
3446-
let alloc = boxed_slice.alloc.clone();
3447-
Ok(unsafe { Arc::from_raw_in(Arc::into_raw(boxed_slice) as *mut [T; N], alloc) })
3442+
let (ptr, alloc) = boxed_slice.internal_into_inner_with_allocator();
3443+
Ok(unsafe { Arc::from_inner_in(ptr.cast(), alloc) })
34483444
} else {
34493445
Err(boxed_slice)
34503446
}

library/alloc/src/sync/tests.rs

+60-5
Original file line numberDiff line numberDiff line change
@@ -1,13 +1,15 @@
11
use super::*;
22

33
use std::clone::Clone;
4+
use std::mem::MaybeUninit;
45
use std::option::Option::None;
6+
use std::sync::atomic::AtomicUsize;
57
use std::sync::atomic::Ordering::SeqCst;
68
use std::sync::mpsc::channel;
79
use std::sync::Mutex;
810
use std::thread;
911

10-
struct Canary(*mut atomic::AtomicUsize);
12+
struct Canary(*mut AtomicUsize);
1113

1214
impl Drop for Canary {
1315
fn drop(&mut self) {
@@ -21,6 +23,37 @@ impl Drop for Canary {
2123
}
2224
}
2325

26+
struct AllocCanary<'a>(&'a AtomicUsize);
27+
28+
impl<'a> AllocCanary<'a> {
29+
fn new(counter: &'a AtomicUsize) -> Self {
30+
counter.fetch_add(1, SeqCst);
31+
Self(counter)
32+
}
33+
}
34+
35+
unsafe impl Allocator for AllocCanary<'_> {
36+
fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
37+
std::alloc::Global.allocate(layout)
38+
}
39+
40+
unsafe fn deallocate(&self, ptr: NonNull<u8>, layout: Layout) {
41+
unsafe { std::alloc::Global.deallocate(ptr, layout) }
42+
}
43+
}
44+
45+
impl Clone for AllocCanary<'_> {
46+
fn clone(&self) -> Self {
47+
Self::new(self.0)
48+
}
49+
}
50+
51+
impl Drop for AllocCanary<'_> {
52+
fn drop(&mut self) {
53+
self.0.fetch_sub(1, SeqCst);
54+
}
55+
}
56+
2457
#[test]
2558
#[cfg_attr(target_os = "emscripten", ignore)]
2659
fn manually_share_arc() {
@@ -295,16 +328,16 @@ fn weak_self_cyclic() {
295328

296329
#[test]
297330
fn drop_arc() {
298-
let mut canary = atomic::AtomicUsize::new(0);
299-
let x = Arc::new(Canary(&mut canary as *mut atomic::AtomicUsize));
331+
let mut canary = AtomicUsize::new(0);
332+
let x = Arc::new(Canary(&mut canary as *mut AtomicUsize));
300333
drop(x);
301334
assert!(canary.load(Acquire) == 1);
302335
}
303336

304337
#[test]
305338
fn drop_arc_weak() {
306-
let mut canary = atomic::AtomicUsize::new(0);
307-
let arc = Arc::new(Canary(&mut canary as *mut atomic::AtomicUsize));
339+
let mut canary = AtomicUsize::new(0);
340+
let arc = Arc::new(Canary(&mut canary as *mut AtomicUsize));
308341
let arc_weak = Arc::downgrade(&arc);
309342
assert!(canary.load(Acquire) == 0);
310343
drop(arc);
@@ -660,3 +693,25 @@ fn arc_drop_dereferenceable_race() {
660693
thread.join().unwrap();
661694
}
662695
}
696+
697+
#[test]
698+
fn arc_doesnt_leak_allocator() {
699+
let counter = AtomicUsize::new(0);
700+
701+
{
702+
let arc: Arc<dyn Any + Send + Sync, _> = Arc::new_in(5usize, AllocCanary::new(&counter));
703+
drop(arc.downcast::<usize>().unwrap());
704+
705+
let arc: Arc<dyn Any + Send + Sync, _> = Arc::new_in(5usize, AllocCanary::new(&counter));
706+
drop(unsafe { arc.downcast_unchecked::<usize>() });
707+
708+
let arc = Arc::new_in(MaybeUninit::<usize>::new(5usize), AllocCanary::new(&counter));
709+
drop(unsafe { arc.assume_init() });
710+
711+
let arc: Arc<[MaybeUninit<usize>], _> =
712+
Arc::new_zeroed_slice_in(5, AllocCanary::new(&counter));
713+
drop(unsafe { arc.assume_init() });
714+
}
715+
716+
assert_eq!(counter.load(SeqCst), 0);
717+
}

0 commit comments

Comments
 (0)