Skip to content

Commit e29f420

Browse files
committed
Auto merge of #21972 - pnkfelix:new-dtor-semantics-6, r=nikomatsakis
This is a resurrection and heavy revision/expansion of a PR that pcwalton did to resolve #8861. The most relevant, user-visible semantic change is this: #[unsafe_destructor] is gone. Instead, if a type expression for some value has a destructor, then any lifetimes referenced within that type expression must strictly outlive the scope of the value. See discussion on rust-lang/rfcs#769
2 parents 446bc89 + 2c9d81b commit e29f420

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

56 files changed

+1863
-136
lines changed

src/libarena/lib.rs

+50-21
Original file line numberDiff line numberDiff line change
@@ -42,6 +42,7 @@ use std::cell::{Cell, RefCell};
4242
use std::cmp;
4343
use std::intrinsics::{TyDesc, get_tydesc};
4444
use std::intrinsics;
45+
use std::marker;
4546
use std::mem;
4647
use std::num::{Int, UnsignedInt};
4748
use std::ptr;
@@ -88,27 +89,29 @@ impl Chunk {
8889
/// than objects without destructors. This reduces overhead when initializing
8990
/// plain-old-data (`Copy` types) and means we don't need to waste time running
9091
/// their destructors.
91-
pub struct Arena {
92+
pub struct Arena<'longer_than_self> {
9293
// The head is separated out from the list as a unbenchmarked
9394
// microoptimization, to avoid needing to case on the list to access the
9495
// head.
9596
head: RefCell<Chunk>,
9697
copy_head: RefCell<Chunk>,
9798
chunks: RefCell<Vec<Chunk>>,
99+
_invariant: marker::InvariantLifetime<'longer_than_self>,
98100
}
99101

100-
impl Arena {
102+
impl<'a> Arena<'a> {
101103
/// Allocates a new Arena with 32 bytes preallocated.
102-
pub fn new() -> Arena {
104+
pub fn new() -> Arena<'a> {
103105
Arena::new_with_size(32)
104106
}
105107

106108
/// Allocates a new Arena with `initial_size` bytes preallocated.
107-
pub fn new_with_size(initial_size: usize) -> Arena {
109+
pub fn new_with_size(initial_size: usize) -> Arena<'a> {
108110
Arena {
109111
head: RefCell::new(chunk(initial_size, false)),
110112
copy_head: RefCell::new(chunk(initial_size, true)),
111113
chunks: RefCell::new(Vec::new()),
114+
_invariant: marker::InvariantLifetime,
112115
}
113116
}
114117
}
@@ -122,7 +125,7 @@ fn chunk(size: usize, is_copy: bool) -> Chunk {
122125
}
123126

124127
#[unsafe_destructor]
125-
impl Drop for Arena {
128+
impl<'longer_than_self> Drop for Arena<'longer_than_self> {
126129
fn drop(&mut self) {
127130
unsafe {
128131
destroy_chunk(&*self.head.borrow());
@@ -180,7 +183,7 @@ fn un_bitpack_tydesc_ptr(p: usize) -> (*const TyDesc, bool) {
180183
((p & !1) as *const TyDesc, p & 1 == 1)
181184
}
182185

183-
impl Arena {
186+
impl<'longer_than_self> Arena<'longer_than_self> {
184187
fn chunk_size(&self) -> usize {
185188
self.copy_head.borrow().capacity()
186189
}
@@ -293,7 +296,7 @@ impl Arena {
293296
/// Allocates a new item in the arena, using `op` to initialize the value,
294297
/// and returns a reference to it.
295298
#[inline]
296-
pub fn alloc<T, F>(&self, op: F) -> &mut T where F: FnOnce() -> T {
299+
pub fn alloc<T:'longer_than_self, F>(&self, op: F) -> &mut T where F: FnOnce() -> T {
297300
unsafe {
298301
if intrinsics::needs_drop::<T>() {
299302
self.alloc_noncopy(op)
@@ -317,20 +320,6 @@ fn test_arena_destructors() {
317320
}
318321
}
319322

320-
#[test]
321-
fn test_arena_alloc_nested() {
322-
struct Inner { value: usize }
323-
struct Outer<'a> { inner: &'a Inner }
324-
325-
let arena = Arena::new();
326-
327-
let result = arena.alloc(|| Outer {
328-
inner: arena.alloc(|| Inner { value: 10 })
329-
});
330-
331-
assert_eq!(result.inner.value, 10);
332-
}
333-
334323
#[test]
335324
#[should_fail]
336325
fn test_arena_destructors_fail() {
@@ -365,6 +354,10 @@ pub struct TypedArena<T> {
365354

366355
/// A pointer to the first arena segment.
367356
first: RefCell<*mut TypedArenaChunk<T>>,
357+
358+
/// Marker indicating that dropping the arena causes its owned
359+
/// instances of `T` to be dropped.
360+
_own: marker::PhantomData<T>,
368361
}
369362

370363
struct TypedArenaChunk<T> {
@@ -460,6 +453,7 @@ impl<T> TypedArena<T> {
460453
ptr: Cell::new((*chunk).start() as *const T),
461454
end: Cell::new((*chunk).end() as *const T),
462455
first: RefCell::new(chunk),
456+
_own: marker::PhantomData,
463457
}
464458
}
465459
}
@@ -523,6 +517,41 @@ mod tests {
523517
z: i32,
524518
}
525519

520+
#[test]
521+
fn test_arena_alloc_nested() {
522+
struct Inner { value: u8 }
523+
struct Outer<'a> { inner: &'a Inner }
524+
enum EI<'e> { I(Inner), O(Outer<'e>) }
525+
526+
struct Wrap<'a>(TypedArena<EI<'a>>);
527+
528+
impl<'a> Wrap<'a> {
529+
fn alloc_inner<F:Fn() -> Inner>(&self, f: F) -> &Inner {
530+
let r: &EI = self.0.alloc(EI::I(f()));
531+
if let &EI::I(ref i) = r {
532+
i
533+
} else {
534+
panic!("mismatch");
535+
}
536+
}
537+
fn alloc_outer<F:Fn() -> Outer<'a>>(&self, f: F) -> &Outer {
538+
let r: &EI = self.0.alloc(EI::O(f()));
539+
if let &EI::O(ref o) = r {
540+
o
541+
} else {
542+
panic!("mismatch");
543+
}
544+
}
545+
}
546+
547+
let arena = Wrap(TypedArena::new());
548+
549+
let result = arena.alloc_outer(|| Outer {
550+
inner: arena.alloc_inner(|| Inner { value: 10 }) });
551+
552+
assert_eq!(result.inner.value, 10);
553+
}
554+
526555
#[test]
527556
pub fn test_copy() {
528557
let arena = TypedArena::new();

src/libcollections/btree/node.rs

+8-8
Original file line numberDiff line numberDiff line change
@@ -278,7 +278,7 @@ impl<T> Drop for RawItems<T> {
278278
#[unsafe_destructor]
279279
impl<K, V> Drop for Node<K, V> {
280280
fn drop(&mut self) {
281-
if self.keys.0.is_null() {
281+
if self.keys.ptr.is_null() {
282282
// We have already cleaned up this node.
283283
return;
284284
}
@@ -292,7 +292,7 @@ impl<K, V> Drop for Node<K, V> {
292292
self.destroy();
293293
}
294294

295-
self.keys.0 = ptr::null_mut();
295+
self.keys.ptr = ptr::null_mut();
296296
}
297297
}
298298

@@ -337,18 +337,18 @@ impl<K, V> Node<K, V> {
337337
unsafe fn destroy(&mut self) {
338338
let (alignment, size) =
339339
calculate_allocation_generic::<K, V>(self.capacity(), self.is_leaf());
340-
heap::deallocate(self.keys.0 as *mut u8, size, alignment);
340+
heap::deallocate(self.keys.ptr as *mut u8, size, alignment);
341341
}
342342

343343
#[inline]
344344
pub fn as_slices<'a>(&'a self) -> (&'a [K], &'a [V]) {
345345
unsafe {(
346346
mem::transmute(raw::Slice {
347-
data: self.keys.0,
347+
data: self.keys.ptr,
348348
len: self.len()
349349
}),
350350
mem::transmute(raw::Slice {
351-
data: self.vals.0,
351+
data: self.vals.ptr,
352352
len: self.len()
353353
})
354354
)}
@@ -368,7 +368,7 @@ impl<K, V> Node<K, V> {
368368
} else {
369369
unsafe {
370370
mem::transmute(raw::Slice {
371-
data: self.edges.0,
371+
data: self.edges.ptr,
372372
len: self.len() + 1
373373
})
374374
}
@@ -586,7 +586,7 @@ impl <K, V> Node<K, V> {
586586

587587
/// If the node has any children
588588
pub fn is_leaf(&self) -> bool {
589-
self.edges.0.is_null()
589+
self.edges.ptr.is_null()
590590
}
591591

592592
/// if the node has too few elements
@@ -1064,7 +1064,7 @@ impl<K, V> Node<K, V> {
10641064
vals: RawItems::from_slice(self.vals()),
10651065
edges: RawItems::from_slice(self.edges()),
10661066

1067-
ptr: self.keys.0 as *mut u8,
1067+
ptr: self.keys.ptr as *mut u8,
10681068
capacity: self.capacity(),
10691069
is_leaf: self.is_leaf()
10701070
},

src/libcollections/vec.rs

+12-6
Original file line numberDiff line numberDiff line change
@@ -57,7 +57,7 @@ use core::default::Default;
5757
use core::fmt;
5858
use core::hash::{self, Hash};
5959
use core::iter::{repeat, FromIterator, IntoIterator};
60-
use core::marker::{ContravariantLifetime, InvariantType};
60+
use core::marker::{self, ContravariantLifetime, InvariantType};
6161
use core::mem;
6262
use core::nonzero::NonZero;
6363
use core::num::{Int, UnsignedInt};
@@ -140,6 +140,7 @@ pub struct Vec<T> {
140140
ptr: NonZero<*mut T>,
141141
len: usize,
142142
cap: usize,
143+
_own: marker::PhantomData<T>,
143144
}
144145

145146
unsafe impl<T: Send> Send for Vec<T> { }
@@ -166,7 +167,7 @@ impl<T> Vec<T> {
166167
// non-null value which is fine since we never call deallocate on the ptr
167168
// if cap is 0. The reason for this is because the pointer of a slice
168169
// being NULL would break the null pointer optimization for enums.
169-
Vec { ptr: unsafe { NonZero::new(EMPTY as *mut T) }, len: 0, cap: 0 }
170+
unsafe { Vec::from_raw_parts(EMPTY as *mut T, 0, 0) }
170171
}
171172

172173
/// Constructs a new, empty `Vec<T>` with the specified capacity.
@@ -198,15 +199,15 @@ impl<T> Vec<T> {
198199
#[stable(feature = "rust1", since = "1.0.0")]
199200
pub fn with_capacity(capacity: usize) -> Vec<T> {
200201
if mem::size_of::<T>() == 0 {
201-
Vec { ptr: unsafe { NonZero::new(EMPTY as *mut T) }, len: 0, cap: usize::MAX }
202+
unsafe { Vec::from_raw_parts(EMPTY as *mut T, 0, usize::MAX) }
202203
} else if capacity == 0 {
203204
Vec::new()
204205
} else {
205206
let size = capacity.checked_mul(mem::size_of::<T>())
206207
.expect("capacity overflow");
207208
let ptr = unsafe { allocate(size, mem::min_align_of::<T>()) };
208209
if ptr.is_null() { ::alloc::oom() }
209-
Vec { ptr: unsafe { NonZero::new(ptr as *mut T) }, len: 0, cap: capacity }
210+
unsafe { Vec::from_raw_parts(ptr as *mut T, 0, capacity) }
210211
}
211212
}
212213

@@ -247,7 +248,12 @@ impl<T> Vec<T> {
247248
#[stable(feature = "rust1", since = "1.0.0")]
248249
pub unsafe fn from_raw_parts(ptr: *mut T, length: usize,
249250
capacity: usize) -> Vec<T> {
250-
Vec { ptr: NonZero::new(ptr), len: length, cap: capacity }
251+
Vec {
252+
ptr: NonZero::new(ptr),
253+
len: length,
254+
cap: capacity,
255+
_own: marker::PhantomData,
256+
}
251257
}
252258

253259
/// Creates a vector by copying the elements from a raw pointer.
@@ -1626,7 +1632,7 @@ impl<T> IntoIter<T> {
16261632
for _x in self.by_ref() { }
16271633
let IntoIter { allocation, cap, ptr: _ptr, end: _end } = self;
16281634
mem::forget(self);
1629-
Vec { ptr: NonZero::new(allocation), cap: cap, len: 0 }
1635+
Vec::from_raw_parts(allocation, 0, cap)
16301636
}
16311637
}
16321638
}

src/libcore/marker.rs

+18
Original file line numberDiff line numberDiff line change
@@ -202,6 +202,24 @@ pub unsafe trait Sync {
202202
// Empty
203203
}
204204

205+
/// A marker type that indicates to the compiler that the instances
206+
/// of the type itself owns instances of the type parameter `T`.
207+
///
208+
/// This is used to indicate that one or more instances of the type
209+
/// `T` could be dropped when instances of the type itself is dropped,
210+
/// though that may not be apparent from the other structure of the
211+
/// type itself. For example, the type may hold a `*mut T`, which the
212+
/// compiler does not automatically treat as owned.
213+
#[unstable(feature = "core",
214+
reason = "Newly added to deal with scoping and destructor changes")]
215+
#[lang="phantom_data"]
216+
#[derive(PartialEq, Eq, PartialOrd, Ord)]
217+
pub struct PhantomData<T: ?Sized>;
218+
219+
impl<T: ?Sized> Copy for PhantomData<T> {}
220+
impl<T: ?Sized> Clone for PhantomData<T> {
221+
fn clone(&self) -> PhantomData<T> { *self }
222+
}
205223

206224
/// A marker type whose type parameter `T` is considered to be
207225
/// covariant with respect to the type itself. This is (typically)

src/libcore/ptr.rs

+14-3
Original file line numberDiff line numberDiff line change
@@ -92,7 +92,7 @@ use mem;
9292
use clone::Clone;
9393
use intrinsics;
9494
use option::Option::{self, Some, None};
95-
use marker::{Send, Sized, Sync};
95+
use marker::{self, Send, Sized, Sync};
9696

9797
use cmp::{PartialEq, Eq, Ord, PartialOrd};
9898
use cmp::Ordering::{self, Less, Equal, Greater};
@@ -522,7 +522,11 @@ impl<T> PartialOrd for *mut T {
522522
/// Useful for building abstractions like `Vec<T>` or `Box<T>`, which
523523
/// internally use raw pointers to manage the memory that they own.
524524
#[unstable(feature = "core", reason = "recently added to this module")]
525-
pub struct Unique<T: ?Sized>(pub *mut T);
525+
pub struct Unique<T: ?Sized> {
526+
/// The wrapped `*mut T`.
527+
pub ptr: *mut T,
528+
_own: marker::PhantomData<T>,
529+
}
526530

527531
/// `Unique` pointers are `Send` if `T` is `Send` because the data they
528532
/// reference is unaliased. Note that this aliasing invariant is
@@ -550,6 +554,13 @@ impl<T> Unique<T> {
550554
#[unstable(feature = "core",
551555
reason = "recently added to this module")]
552556
pub unsafe fn offset(self, offset: int) -> *mut T {
553-
self.0.offset(offset)
557+
self.ptr.offset(offset)
554558
}
555559
}
560+
561+
/// Creates a `Unique` wrapped around `ptr`, taking ownership of the
562+
/// data referenced by `ptr`.
563+
#[allow(non_snake_case)]
564+
pub fn Unique<T: ?Sized>(ptr: *mut T) -> Unique<T> {
565+
Unique { ptr: ptr, _own: marker::PhantomData }
566+
}

src/libcoretest/ptr.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -172,7 +172,7 @@ fn test_set_memory() {
172172
fn test_unsized_unique() {
173173
let xs: &mut [_] = &mut [1, 2, 3];
174174
let ptr = Unique(xs as *mut [_]);
175-
let ys = unsafe { &mut *ptr.0 };
175+
let ys = unsafe { &mut *ptr.ptr };
176176
let zs: &mut [_] = &mut [1, 2, 3];
177177
assert!(ys == zs);
178178
}

src/libflate/lib.rs

+2-2
Original file line numberDiff line numberDiff line change
@@ -45,13 +45,13 @@ pub struct Bytes {
4545
impl Deref for Bytes {
4646
type Target = [u8];
4747
fn deref(&self) -> &[u8] {
48-
unsafe { slice::from_raw_parts_mut(self.ptr.0, self.len) }
48+
unsafe { slice::from_raw_parts_mut(self.ptr.ptr, self.len) }
4949
}
5050
}
5151

5252
impl Drop for Bytes {
5353
fn drop(&mut self) {
54-
unsafe { libc::free(self.ptr.0 as *mut _); }
54+
unsafe { libc::free(self.ptr.ptr as *mut _); }
5555
}
5656
}
5757

0 commit comments

Comments
 (0)