@@ -4,7 +4,7 @@ use std::collections::BTreeMap;
4
4
use std:: hash:: Hash ;
5
5
use std:: num:: NonZeroU32 ;
6
6
use std:: ops:: { Index , IndexMut } ;
7
- use std:: sync:: atomic:: { AtomicUsize , Ordering } ;
7
+ use std:: sync:: atomic:: { AtomicU32 , Ordering } ;
8
8
9
9
use super :: fxhash:: FxHashMap ;
10
10
@@ -13,12 +13,12 @@ pub(super) type Handle = NonZeroU32;
13
13
/// A store that associates values of type `T` with numeric handles. A value can
14
14
/// be looked up using its handle.
15
15
pub ( super ) struct OwnedStore < T : ' static > {
16
- counter : & ' static AtomicUsize ,
16
+ counter : & ' static AtomicU32 ,
17
17
data : BTreeMap < Handle , T > ,
18
18
}
19
19
20
20
impl < T > OwnedStore < T > {
21
- pub ( super ) fn new ( counter : & ' static AtomicUsize ) -> Self {
21
+ pub ( super ) fn new ( counter : & ' static AtomicU32 ) -> Self {
22
22
// Ensure the handle counter isn't 0, which would panic later,
23
23
// when `NonZeroU32::new` (aka `Handle::new`) is called in `alloc`.
24
24
assert_ne ! ( counter. load( Ordering :: SeqCst ) , 0 ) ;
@@ -30,7 +30,7 @@ impl<T> OwnedStore<T> {
30
30
impl < T > OwnedStore < T > {
31
31
pub ( super ) fn alloc ( & mut self , x : T ) -> Handle {
32
32
let counter = self . counter . fetch_add ( 1 , Ordering :: SeqCst ) ;
33
- let handle = Handle :: new ( counter as u32 ) . expect ( "`proc_macro` handle counter overflowed" ) ;
33
+ let handle = Handle :: new ( counter) . expect ( "`proc_macro` handle counter overflowed" ) ;
34
34
assert ! ( self . data. insert( handle, x) . is_none( ) ) ;
35
35
handle
36
36
}
@@ -60,7 +60,7 @@ pub(super) struct InternedStore<T: 'static> {
60
60
}
61
61
62
62
impl < T : Copy + Eq + Hash > InternedStore < T > {
63
- pub ( super ) fn new ( counter : & ' static AtomicUsize ) -> Self {
63
+ pub ( super ) fn new ( counter : & ' static AtomicU32 ) -> Self {
64
64
InternedStore { owned : OwnedStore :: new ( counter) , interner : FxHashMap :: default ( ) }
65
65
}
66
66
0 commit comments