1
+ use std:: borrow:: Cow ;
1
2
use std:: { iter, mem} ;
2
3
3
4
use rustc_ast:: token:: { Delimiter , Token , TokenKind } ;
@@ -6,6 +7,7 @@ use rustc_ast::tokenstream::{
6
7
Spacing , ToAttrTokenStream ,
7
8
} ;
8
9
use rustc_ast:: { self as ast, AttrVec , Attribute , HasAttrs , HasTokens } ;
10
+ use rustc_data_structures:: fx:: FxHashSet ;
9
11
use rustc_errors:: PResult ;
10
12
use rustc_session:: parse:: ParseSess ;
11
13
use rustc_span:: { sym, Span , DUMMY_SP } ;
@@ -256,35 +258,48 @@ impl<'a> Parser<'a> {
256
258
res?
257
259
} ;
258
260
261
+ // - `None`: Our target doesn't support tokens at all (e.g. `NtIdent`).
262
+ // - `Some(None)`: Our target supports tokens and has none.
263
+ // - `Some(Some(_))`: Our target already has tokens set (e.g. we've
264
+ // parsed something like `#[my_attr] $item`).
265
+ let ret_can_hold_tokens = matches ! ( ret. tokens_mut( ) , Some ( None ) ) ;
266
+
259
267
// Ignore any attributes we've previously processed. This happens when
260
268
// an inner call to `collect_tokens` returns an AST node and then an
261
269
// outer call ends up with the same AST node without any additional
262
270
// wrapping layer.
263
- let ret_attrs: AttrVec = ret
264
- . attrs ( )
265
- . iter ( )
266
- . cloned ( )
267
- . filter ( |attr| {
268
- let is_unseen = self . capture_state . seen_attrs . insert ( attr. id ) ;
269
- is_unseen
270
- } )
271
- . collect ( ) ;
271
+ let mut seen_indices = FxHashSet :: default ( ) ;
272
+ for ( i, attr) in ret. attrs ( ) . iter ( ) . enumerate ( ) {
273
+ let is_unseen = self . capture_state . seen_attrs . insert ( attr. id ) ;
274
+ if !is_unseen {
275
+ seen_indices. insert ( i) ;
276
+ }
277
+ }
278
+ let ret_attrs: Cow < ' _ , [ Attribute ] > =
279
+ if seen_indices. is_empty ( ) {
280
+ Cow :: Borrowed ( ret. attrs ( ) )
281
+ } else {
282
+ let ret_attrs =
283
+ ret. attrs ( )
284
+ . iter ( )
285
+ . enumerate ( )
286
+ . filter_map ( |( i, attr) | {
287
+ if seen_indices. contains ( & i) { None } else { Some ( attr. clone ( ) ) }
288
+ } )
289
+ . collect ( ) ;
290
+ Cow :: Owned ( ret_attrs)
291
+ } ;
272
292
273
293
// When we're not in "definite capture mode", then skip collecting and
274
- // return early if either of the following conditions hold.
275
- // - `None`: Our target doesn't support tokens at all (e.g. `NtIdent`).
276
- // - `Some(Some(_))`: Our target already has tokens set (e.g. we've
277
- // parsed something like `#[my_attr] $item`). The actual parsing code
278
- // takes care of prepending any attributes to the nonterminal, so we
279
- // don't need to modify the already captured tokens.
294
+ // return early if `ret` doesn't support tokens or already has some.
280
295
//
281
296
// Note that this check is independent of `force_collect`. There's no
282
297
// need to collect tokens when we don't support tokens or already have
283
298
// tokens.
284
299
let definite_capture_mode = self . capture_cfg
285
300
&& matches ! ( self . capture_state. capturing, Capturing :: Yes )
286
301
&& has_cfg_or_cfg_attr ( & ret_attrs) ;
287
- if !definite_capture_mode && matches ! ( ret . tokens_mut ( ) , None | Some ( Some ( _ ) ) ) {
302
+ if !definite_capture_mode && !ret_can_hold_tokens {
288
303
return Ok ( ret) ;
289
304
}
290
305
@@ -406,12 +421,6 @@ impl<'a> Parser<'a> {
406
421
} ) ;
407
422
let mut tokens_used = false ;
408
423
409
- // If we support tokens and don't already have them, store the newly captured tokens.
410
- if let Some ( target_tokens @ None ) = ret. tokens_mut ( ) {
411
- tokens_used = true ;
412
- * target_tokens = Some ( tokens. clone ( ) ) ;
413
- }
414
-
415
424
// If in "definite capture mode" we need to register a replace range
416
425
// for the `#[cfg]` and/or `#[cfg_attr]` attrs. This allows us to run
417
426
// eager cfg-expansion on the captured token stream.
@@ -432,7 +441,8 @@ impl<'a> Parser<'a> {
432
441
// cfg-expand this AST node.
433
442
let start_pos =
434
443
if has_outer_attrs { attrs. start_pos . unwrap ( ) } else { collect_pos. start_pos } ;
435
- let target = AttrsTarget { attrs : ret_attrs, tokens } ;
444
+ let target =
445
+ AttrsTarget { attrs : ret_attrs. iter ( ) . cloned ( ) . collect ( ) , tokens : tokens. clone ( ) } ;
436
446
tokens_used = true ;
437
447
self . capture_state
438
448
. parser_replacements
@@ -444,6 +454,13 @@ impl<'a> Parser<'a> {
444
454
self . capture_state . inner_attr_parser_ranges . clear ( ) ;
445
455
self . capture_state . seen_attrs . clear ( ) ;
446
456
}
457
+
458
+ // If we support tokens and don't already have them, store the newly captured tokens.
459
+ if let Some ( target_tokens @ None ) = ret. tokens_mut ( ) {
460
+ tokens_used = true ;
461
+ * target_tokens = Some ( tokens) ;
462
+ }
463
+
447
464
assert ! ( tokens_used) ; // check we didn't create `tokens` unnecessarily
448
465
Ok ( ret)
449
466
}
0 commit comments