Skip to content

Commit 4ee84e1

Browse files
authored
Merge pull request #161 from emilHof/miri-stack-borrow
Change LruCache.map to hold a pointer, rather than owned `LruEntry`.
2 parents cf063f6 + e0db0f2 commit 4ee84e1

File tree

1 file changed

+68
-50
lines changed

1 file changed

+68
-50
lines changed

src/lib.rs

Lines changed: 68 additions & 50 deletions
Original file line numberDiff line numberDiff line change
@@ -74,7 +74,7 @@ use core::iter::FusedIterator;
7474
use core::marker::PhantomData;
7575
use core::mem;
7676
use core::num::NonZeroUsize;
77-
use core::ptr;
77+
use core::ptr::{self, NonNull};
7878
use core::usize;
7979

8080
#[cfg(any(test, not(feature = "hashbrown")))]
@@ -189,7 +189,7 @@ pub type DefaultHasher = std::collections::hash_map::RandomState;
189189

190190
/// An LRU Cache
191191
pub struct LruCache<K, V, S = DefaultHasher> {
192-
map: HashMap<KeyRef<K>, Box<LruEntry<K, V>>, S>,
192+
map: HashMap<KeyRef<K>, NonNull<LruEntry<K, V>>, S>,
193193
cap: NonZeroUsize,
194194

195195
// head and tail are sigil nodes to facilitate inserting entries
@@ -266,7 +266,7 @@ impl<K: Hash + Eq, V, S: BuildHasher> LruCache<K, V, S> {
266266
/// Creates a new LRU Cache with the given capacity.
267267
fn construct(
268268
cap: NonZeroUsize,
269-
map: HashMap<KeyRef<K>, Box<LruEntry<K, V>>, S>,
269+
map: HashMap<KeyRef<K>, NonNull<LruEntry<K, V>>, S>,
270270
) -> LruCache<K, V, S> {
271271
// NB: The compiler warns that cache does not need to be marked as mutable if we
272272
// declare it as such since we only mutate it inside the unsafe block.
@@ -342,19 +342,23 @@ impl<K: Hash + Eq, V, S: BuildHasher> LruCache<K, V, S> {
342342

343343
match node_ref {
344344
Some(node_ref) => {
345-
let node_ptr: *mut LruEntry<K, V> = &mut **node_ref;
346-
347345
// if the key is already in the cache just update its value and move it to the
348346
// front of the list
349-
unsafe { mem::swap(&mut v, &mut (*(*node_ptr).val.as_mut_ptr()) as &mut V) }
347+
let node_ptr: *mut LruEntry<K, V> = node_ref.as_ptr();
348+
349+
// gets a reference to the node to perform a swap and drops it right after
350+
let node_ref = unsafe { &mut (*(*node_ptr).val.as_mut_ptr()) };
351+
mem::swap(&mut v, node_ref);
352+
let _ = node_ref;
353+
350354
self.detach(node_ptr);
351355
self.attach(node_ptr);
352356
Some((k, v))
353357
}
354358
None => {
355-
let (replaced, mut node) = self.replace_or_create_node(k, v);
359+
let (replaced, node) = self.replace_or_create_node(k, v);
360+
let node_ptr: *mut LruEntry<K, V> = node.as_ptr();
356361

357-
let node_ptr: *mut LruEntry<K, V> = &mut *node;
358362
self.attach(node_ptr);
359363

360364
let keyref = unsafe { (*node_ptr).key.as_ptr() };
@@ -368,27 +372,32 @@ impl<K: Hash + Eq, V, S: BuildHasher> LruCache<K, V, S> {
368372
// Used internally to swap out a node if the cache is full or to create a new node if space
369373
// is available. Shared between `put`, `push`, `get_or_insert`, and `get_or_insert_mut`.
370374
#[allow(clippy::type_complexity)]
371-
fn replace_or_create_node(&mut self, k: K, v: V) -> (Option<(K, V)>, Box<LruEntry<K, V>>) {
375+
fn replace_or_create_node(&mut self, k: K, v: V) -> (Option<(K, V)>, NonNull<LruEntry<K, V>>) {
372376
if self.len() == self.cap().get() {
373377
// if the cache is full, remove the last entry so we can use it for the new key
374378
let old_key = KeyRef {
375379
k: unsafe { &(*(*(*self.tail).prev).key.as_ptr()) },
376380
};
377-
let mut old_node = self.map.remove(&old_key).unwrap();
381+
let old_node = self.map.remove(&old_key).unwrap();
382+
let node_ptr: *mut LruEntry<K, V> = old_node.as_ptr();
378383

379384
// read out the node's old key and value and then replace it
380-
let replaced = unsafe { (old_node.key.assume_init(), old_node.val.assume_init()) };
381-
382-
old_node.key = mem::MaybeUninit::new(k);
383-
old_node.val = mem::MaybeUninit::new(v);
385+
let replaced = unsafe {
386+
(
387+
mem::replace(&mut (*node_ptr).key, mem::MaybeUninit::new(k)).assume_init(),
388+
mem::replace(&mut (*node_ptr).val, mem::MaybeUninit::new(v)).assume_init(),
389+
)
390+
};
384391

385-
let node_ptr: *mut LruEntry<K, V> = &mut *old_node;
386392
self.detach(node_ptr);
387393

388394
(Some(replaced), old_node)
389395
} else {
390396
// if the cache is not full allocate a new LruEntry
391-
(None, Box::new(LruEntry::new(k, v)))
397+
// Safety: We allocate, turn into raw, and get NonNull all in one step.
398+
(None, unsafe {
399+
NonNull::new_unchecked(Box::into_raw(Box::new(LruEntry::new(k, v))))
400+
})
392401
}
393402
}
394403

@@ -417,12 +426,12 @@ impl<K: Hash + Eq, V, S: BuildHasher> LruCache<K, V, S> {
417426
Q: Hash + Eq + ?Sized,
418427
{
419428
if let Some(node) = self.map.get_mut(k) {
420-
let node_ptr: *mut LruEntry<K, V> = &mut **node;
429+
let node_ptr: *mut LruEntry<K, V> = node.as_ptr();
421430

422431
self.detach(node_ptr);
423432
self.attach(node_ptr);
424433

425-
Some(unsafe { &(*(*node_ptr).val.as_ptr()) as &V })
434+
Some(unsafe { &*(*node_ptr).val.as_ptr() })
426435
} else {
427436
None
428437
}
@@ -453,12 +462,12 @@ impl<K: Hash + Eq, V, S: BuildHasher> LruCache<K, V, S> {
453462
Q: Hash + Eq + ?Sized,
454463
{
455464
if let Some(node) = self.map.get_mut(k) {
456-
let node_ptr: *mut LruEntry<K, V> = &mut **node;
465+
let node_ptr: *mut LruEntry<K, V> = node.as_ptr();
457466

458467
self.detach(node_ptr);
459468
self.attach(node_ptr);
460469

461-
Some(unsafe { &mut (*(*node_ptr).val.as_mut_ptr()) as &mut V })
470+
Some(unsafe { &mut *(*node_ptr).val.as_mut_ptr() })
462471
} else {
463472
None
464473
}
@@ -491,22 +500,22 @@ impl<K: Hash + Eq, V, S: BuildHasher> LruCache<K, V, S> {
491500
F: FnOnce() -> V,
492501
{
493502
if let Some(node) = self.map.get_mut(&KeyRef { k: &k }) {
494-
let node_ptr: *mut LruEntry<K, V> = &mut **node;
503+
let node_ptr: *mut LruEntry<K, V> = node.as_ptr();
495504

496505
self.detach(node_ptr);
497506
self.attach(node_ptr);
498507

499-
unsafe { &(*(*node_ptr).val.as_ptr()) as &V }
508+
unsafe { &*(*node_ptr).val.as_ptr() }
500509
} else {
501510
let v = f();
502-
let (_, mut node) = self.replace_or_create_node(k, v);
511+
let (_, node) = self.replace_or_create_node(k, v);
512+
let node_ptr: *mut LruEntry<K, V> = node.as_ptr();
503513

504-
let node_ptr: *mut LruEntry<K, V> = &mut *node;
505514
self.attach(node_ptr);
506515

507516
let keyref = unsafe { (*node_ptr).key.as_ptr() };
508517
self.map.insert(KeyRef { k: keyref }, node);
509-
unsafe { &(*(*node_ptr).val.as_ptr()) as &V }
518+
unsafe { &*(*node_ptr).val.as_ptr() }
510519
}
511520
}
512521

@@ -537,22 +546,22 @@ impl<K: Hash + Eq, V, S: BuildHasher> LruCache<K, V, S> {
537546
F: FnOnce() -> V,
538547
{
539548
if let Some(node) = self.map.get_mut(&KeyRef { k: &k }) {
540-
let node_ptr: *mut LruEntry<K, V> = &mut **node;
549+
let node_ptr: *mut LruEntry<K, V> = node.as_ptr();
541550

542551
self.detach(node_ptr);
543552
self.attach(node_ptr);
544553

545-
unsafe { &mut (*(*node_ptr).val.as_mut_ptr()) as &mut V }
554+
unsafe { &mut *(*node_ptr).val.as_mut_ptr() }
546555
} else {
547556
let v = f();
548-
let (_, mut node) = self.replace_or_create_node(k, v);
557+
let (_, node) = self.replace_or_create_node(k, v);
558+
let node_ptr: *mut LruEntry<K, V> = node.as_ptr();
549559

550-
let node_ptr: *mut LruEntry<K, V> = &mut *node;
551560
self.attach(node_ptr);
552561

553562
let keyref = unsafe { (*node_ptr).key.as_ptr() };
554563
self.map.insert(KeyRef { k: keyref }, node);
555-
unsafe { &mut (*(*node_ptr).val.as_mut_ptr()) as &mut V }
564+
unsafe { &mut *(*node_ptr).val.as_mut_ptr() }
556565
}
557566
}
558567

@@ -580,7 +589,7 @@ impl<K: Hash + Eq, V, S: BuildHasher> LruCache<K, V, S> {
580589
{
581590
self.map
582591
.get(k)
583-
.map(|node| unsafe { &(*node.val.as_ptr()) as &V })
592+
.map(|node| unsafe { &*node.as_ref().val.as_ptr() })
584593
}
585594

586595
/// Returns a mutable reference to the value corresponding to the key in the cache or `None`
@@ -607,7 +616,7 @@ impl<K: Hash + Eq, V, S: BuildHasher> LruCache<K, V, S> {
607616
{
608617
match self.map.get_mut(k) {
609618
None => None,
610-
Some(node) => Some(unsafe { &mut (*node.val.as_mut_ptr()) as &mut V }),
619+
Some(node) => Some(unsafe { &mut *(*node.as_ptr()).val.as_mut_ptr() }),
611620
}
612621
}
613622

@@ -692,13 +701,18 @@ impl<K: Hash + Eq, V, S: BuildHasher> LruCache<K, V, S> {
692701
{
693702
match self.map.remove(k) {
694703
None => None,
695-
Some(mut old_node) => {
696-
unsafe {
704+
Some(old_node) => {
705+
let mut old_node = unsafe {
706+
let mut old_node = *Box::from_raw(old_node.as_ptr());
697707
ptr::drop_in_place(old_node.key.as_mut_ptr());
698-
}
699-
let node_ptr: *mut LruEntry<K, V> = &mut *old_node;
700-
self.detach(node_ptr);
701-
unsafe { Some(old_node.val.assume_init()) }
708+
709+
old_node
710+
};
711+
712+
self.detach(&mut old_node);
713+
714+
let LruEntry { key: _, val, .. } = old_node;
715+
unsafe { Some(val.assume_init()) }
702716
}
703717
}
704718
}
@@ -729,10 +743,13 @@ impl<K: Hash + Eq, V, S: BuildHasher> LruCache<K, V, S> {
729743
{
730744
match self.map.remove(k) {
731745
None => None,
732-
Some(mut old_node) => {
733-
let node_ptr: *mut LruEntry<K, V> = &mut *old_node;
734-
self.detach(node_ptr);
735-
unsafe { Some((old_node.key.assume_init(), old_node.val.assume_init())) }
746+
Some(old_node) => {
747+
let mut old_node = unsafe { *Box::from_raw(old_node.as_ptr()) };
748+
749+
self.detach(&mut old_node);
750+
751+
let LruEntry { key, val, .. } = old_node;
752+
unsafe { Some((key.assume_init(), val.assume_init())) }
736753
}
737754
}
738755
}
@@ -793,7 +810,7 @@ impl<K: Hash + Eq, V, S: BuildHasher> LruCache<K, V, S> {
793810
Q: Hash + Eq + ?Sized,
794811
{
795812
if let Some(node) = self.map.get_mut(k) {
796-
let node_ptr: *mut LruEntry<K, V> = &mut **node;
813+
let node_ptr: *mut LruEntry<K, V> = node.as_ptr();
797814
self.detach(node_ptr);
798815
self.attach(node_ptr);
799816
}
@@ -829,7 +846,7 @@ impl<K: Hash + Eq, V, S: BuildHasher> LruCache<K, V, S> {
829846
Q: Hash + Eq + ?Sized,
830847
{
831848
if let Some(node) = self.map.get_mut(k) {
832-
let node_ptr: *mut LruEntry<K, V> = &mut **node;
849+
let node_ptr: *mut LruEntry<K, V> = node.as_ptr();
833850
self.detach(node_ptr);
834851
self.attach_last(node_ptr);
835852
}
@@ -1018,10 +1035,10 @@ impl<K: Hash + Eq, V, S: BuildHasher> LruCache<K, V, S> {
10181035
let old_key = KeyRef {
10191036
k: unsafe { &(*(*(*self.tail).prev).key.as_ptr()) },
10201037
};
1021-
let mut old_node = self.map.remove(&old_key).unwrap();
1022-
let node_ptr: *mut LruEntry<K, V> = &mut *old_node;
1038+
let old_node = self.map.remove(&old_key).unwrap();
1039+
let node_ptr: *mut LruEntry<K, V> = old_node.as_ptr();
10231040
self.detach(node_ptr);
1024-
Some(old_node)
1041+
unsafe { Some(Box::from_raw(node_ptr)) }
10251042
} else {
10261043
None
10271044
}
@@ -1057,9 +1074,10 @@ impl<K: Hash + Eq, V, S: BuildHasher> LruCache<K, V, S> {
10571074

10581075
impl<K, V, S> Drop for LruCache<K, V, S> {
10591076
fn drop(&mut self) {
1060-
self.map.values_mut().for_each(|e| unsafe {
1061-
ptr::drop_in_place(e.key.as_mut_ptr());
1062-
ptr::drop_in_place(e.val.as_mut_ptr());
1077+
self.map.drain().for_each(|(_, node)| unsafe {
1078+
let mut node = *Box::from_raw(node.as_ptr());
1079+
ptr::drop_in_place((node).key.as_mut_ptr());
1080+
ptr::drop_in_place((node).val.as_mut_ptr());
10631081
});
10641082
// We rebox the head/tail, and because these are maybe-uninit
10651083
// they do not have the absent k/v dropped.

0 commit comments

Comments
 (0)