@@ -203,16 +203,31 @@ const ROOT_NODE: DropIdx = DropIdx::from_u32(0);
203
203
/// in `build_mir`.
204
204
#[ derive( Debug ) ]
205
205
struct DropTree {
206
- /// Drops in the tree.
207
- drops : IndexVec < DropIdx , ( DropData , DropIdx ) > ,
208
- /// Map for finding the inverse of the `next_drop` relation:
209
- ///
210
- /// `previous_drops[(drops[i].1, drops[i].0.local, drops[i].0.kind)] == i`
211
- previous_drops : FxHashMap < ( DropIdx , Local , DropKind ) , DropIdx > ,
206
+ /// Nodes in the drop tree, containing drop data and a link to the next node.
207
+ drops : IndexVec < DropIdx , DropNode > ,
208
+ /// Map for finding the index of an existing node, given its contents.
209
+ existing_drops_map : FxHashMap < DropNodeKey , DropIdx > ,
212
210
/// Edges into the `DropTree` that need to be added once it's lowered.
213
211
entry_points : Vec < ( DropIdx , BasicBlock ) > ,
214
212
}
215
213
214
+ /// A single node in the drop tree.
215
+ #[ derive( Debug ) ]
216
+ struct DropNode {
217
+ /// Info about the drop to be performed at this node in the drop tree.
218
+ data : DropData ,
219
+ /// Index of the "next" drop to perform (in drop order, not declaration order).
220
+ next : DropIdx ,
221
+ }
222
+
223
+ /// Subset of [`DropNode`] used for reverse lookup in a hash table.
224
+ #[ derive( Debug , PartialEq , Eq , Hash ) ]
225
+ struct DropNodeKey {
226
+ next : DropIdx ,
227
+ local : Local ,
228
+ kind : DropKind ,
229
+ }
230
+
216
231
impl Scope {
217
232
/// Whether there's anything to do for the cleanup path, that is,
218
233
/// when unwinding through this scope. This includes destructors,
@@ -247,7 +262,7 @@ trait DropTreeBuilder<'tcx> {
247
262
248
263
/// Links a block outside the drop tree, `from`, to the block `to` inside
249
264
/// the drop tree.
250
- fn add_entry ( cfg : & mut CFG < ' tcx > , from : BasicBlock , to : BasicBlock ) ;
265
+ fn link_entry_point ( cfg : & mut CFG < ' tcx > , from : BasicBlock , to : BasicBlock ) ;
251
266
}
252
267
253
268
impl DropTree {
@@ -258,20 +273,29 @@ impl DropTree {
258
273
let fake_source_info = SourceInfo :: outermost ( DUMMY_SP ) ;
259
274
let fake_data =
260
275
DropData { source_info : fake_source_info, local : Local :: MAX , kind : DropKind :: Storage } ;
261
- let drop_idx = DropIdx :: MAX ;
262
- let drops = IndexVec :: from_elem_n ( ( fake_data, drop_idx) , 1 ) ;
263
- Self { drops, entry_points : Vec :: new ( ) , previous_drops : FxHashMap :: default ( ) }
276
+ let drops = IndexVec :: from_raw ( vec ! [ DropNode { data: fake_data, next: DropIdx :: MAX } ] ) ;
277
+ Self { drops, entry_points : Vec :: new ( ) , existing_drops_map : FxHashMap :: default ( ) }
264
278
}
265
279
266
- fn add_drop ( & mut self , drop : DropData , next : DropIdx ) -> DropIdx {
280
+ /// Adds a node to the drop tree, consisting of drop data and the index of
281
+ /// the "next" drop (in drop order), which could be the sentinel [`ROOT_NODE`].
282
+ ///
283
+ /// If there is already an equivalent node in the tree, nothing is added, and
284
+ /// that node's index is returned. Otherwise, the new node's index is returned.
285
+ fn add_drop ( & mut self , data : DropData , next : DropIdx ) -> DropIdx {
267
286
let drops = & mut self . drops ;
268
287
* self
269
- . previous_drops
270
- . entry ( ( next, drop. local , drop. kind ) )
271
- . or_insert_with ( || drops. push ( ( drop, next) ) )
288
+ . existing_drops_map
289
+ . entry ( DropNodeKey { next, local : data. local , kind : data. kind } )
290
+ // Create a new node, and also add its index to the map.
291
+ . or_insert_with ( || drops. push ( DropNode { data, next } ) )
272
292
}
273
293
274
- fn add_entry ( & mut self , from : BasicBlock , to : DropIdx ) {
294
+ /// Registers `from` as an entry point to this drop tree, at `to`.
295
+ ///
296
+ /// During [`Self::build_mir`], `from` will be linked to the corresponding
297
+ /// block within the drop tree.
298
+ fn add_entry_point ( & mut self , from : BasicBlock , to : DropIdx ) {
275
299
debug_assert ! ( to < self . drops. next_index( ) ) ;
276
300
self . entry_points . push ( ( to, from) ) ;
277
301
}
@@ -326,13 +350,13 @@ impl DropTree {
326
350
let entry_points = & mut self . entry_points ;
327
351
entry_points. sort ( ) ;
328
352
329
- for ( drop_idx, drop_data ) in self . drops . iter_enumerated ( ) . rev ( ) {
353
+ for ( drop_idx, drop_node ) in self . drops . iter_enumerated ( ) . rev ( ) {
330
354
if entry_points. last ( ) . is_some_and ( |entry_point| entry_point. 0 == drop_idx) {
331
355
let block = * blocks[ drop_idx] . get_or_insert_with ( || T :: make_block ( cfg) ) ;
332
356
needs_block[ drop_idx] = Block :: Own ;
333
357
while entry_points. last ( ) . is_some_and ( |entry_point| entry_point. 0 == drop_idx) {
334
358
let entry_block = entry_points. pop ( ) . unwrap ( ) . 1 ;
335
- T :: add_entry ( cfg, entry_block, block) ;
359
+ T :: link_entry_point ( cfg, entry_block, block) ;
336
360
}
337
361
}
338
362
match needs_block[ drop_idx] {
@@ -344,10 +368,10 @@ impl DropTree {
344
368
blocks[ drop_idx] = blocks[ pred] ;
345
369
}
346
370
}
347
- if let DropKind :: Value = drop_data . 0 . kind {
348
- needs_block[ drop_data . 1 ] = Block :: Own ;
371
+ if let DropKind :: Value = drop_node . data . kind {
372
+ needs_block[ drop_node . next ] = Block :: Own ;
349
373
} else if drop_idx != ROOT_NODE {
350
- match & mut needs_block[ drop_data . 1 ] {
374
+ match & mut needs_block[ drop_node . next ] {
351
375
pred @ Block :: None => * pred = Block :: Shares ( drop_idx) ,
352
376
pred @ Block :: Shares ( _) => * pred = Block :: Own ,
353
377
Block :: Own => ( ) ,
@@ -364,34 +388,35 @@ impl DropTree {
364
388
cfg : & mut CFG < ' tcx > ,
365
389
blocks : & IndexSlice < DropIdx , Option < BasicBlock > > ,
366
390
) {
367
- for ( drop_idx, drop_data ) in self . drops . iter_enumerated ( ) . rev ( ) {
391
+ for ( drop_idx, drop_node ) in self . drops . iter_enumerated ( ) . rev ( ) {
368
392
let Some ( block) = blocks[ drop_idx] else { continue } ;
369
- match drop_data . 0 . kind {
393
+ match drop_node . data . kind {
370
394
DropKind :: Value => {
371
395
let terminator = TerminatorKind :: Drop {
372
- target : blocks[ drop_data . 1 ] . unwrap ( ) ,
396
+ target : blocks[ drop_node . next ] . unwrap ( ) ,
373
397
// The caller will handle this if needed.
374
398
unwind : UnwindAction :: Terminate ( UnwindTerminateReason :: InCleanup ) ,
375
- place : drop_data . 0 . local . into ( ) ,
399
+ place : drop_node . data . local . into ( ) ,
376
400
replace : false ,
377
401
} ;
378
- cfg. terminate ( block, drop_data . 0 . source_info , terminator) ;
402
+ cfg. terminate ( block, drop_node . data . source_info , terminator) ;
379
403
}
380
404
// Root nodes don't correspond to a drop.
381
405
DropKind :: Storage if drop_idx == ROOT_NODE => { }
382
406
DropKind :: Storage => {
383
407
let stmt = Statement {
384
- source_info : drop_data . 0 . source_info ,
385
- kind : StatementKind :: StorageDead ( drop_data . 0 . local ) ,
408
+ source_info : drop_node . data . source_info ,
409
+ kind : StatementKind :: StorageDead ( drop_node . data . local ) ,
386
410
} ;
387
411
cfg. push ( block, stmt) ;
388
- let target = blocks[ drop_data . 1 ] . unwrap ( ) ;
412
+ let target = blocks[ drop_node . next ] . unwrap ( ) ;
389
413
if target != block {
390
414
// Diagnostics don't use this `Span` but debuginfo
391
415
// might. Since we don't want breakpoints to be placed
392
416
// here, especially when this is on an unwind path, we
393
417
// use `DUMMY_SP`.
394
- let source_info = SourceInfo { span : DUMMY_SP , ..drop_data. 0 . source_info } ;
418
+ let source_info =
419
+ SourceInfo { span : DUMMY_SP , ..drop_node. data . source_info } ;
395
420
let terminator = TerminatorKind :: Goto { target } ;
396
421
cfg. terminate ( block, source_info, terminator) ;
397
422
}
@@ -673,11 +698,12 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
673
698
. flat_map ( |scope| & scope. drops )
674
699
. fold ( ROOT_NODE , |drop_idx, & drop| drops. add_drop ( drop, drop_idx) ) ;
675
700
676
- drops. add_entry ( block, drop_idx) ;
701
+ drops. add_entry_point ( block, drop_idx) ;
677
702
678
703
// `build_drop_trees` doesn't have access to our source_info, so we
679
704
// create a dummy terminator now. `TerminatorKind::UnwindResume` is used
680
705
// because MIR type checking will panic if it hasn't been overwritten.
706
+ // (See `<ExitScopes as DropTreeBuilder>::link_entry_point`.)
681
707
self . cfg . terminate ( block, source_info, TerminatorKind :: UnwindResume ) ;
682
708
683
709
self . cfg . start_new_block ( ) . unit ( )
@@ -708,11 +734,12 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
708
734
drop_idx = drops. add_drop ( * drop, drop_idx) ;
709
735
}
710
736
}
711
- drops. add_entry ( block, drop_idx) ;
737
+ drops. add_entry_point ( block, drop_idx) ;
712
738
713
739
// `build_drop_trees` doesn't have access to our source_info, so we
714
740
// create a dummy terminator now. `TerminatorKind::UnwindResume` is used
715
741
// because MIR type checking will panic if it hasn't been overwritten.
742
+ // (See `<ExitScopes as DropTreeBuilder>::link_entry_point`.)
716
743
self . cfg . terminate ( block, source_info, TerminatorKind :: UnwindResume ) ;
717
744
}
718
745
@@ -1119,7 +1146,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
1119
1146
) ;
1120
1147
1121
1148
let next_drop = self . diverge_cleanup ( ) ;
1122
- self . scopes . unwind_drops . add_entry ( start, next_drop) ;
1149
+ self . scopes . unwind_drops . add_entry_point ( start, next_drop) ;
1123
1150
}
1124
1151
1125
1152
/// Sets up a path that performs all required cleanup for dropping a
@@ -1153,7 +1180,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
1153
1180
scope. cached_coroutine_drop_block = Some ( cached_drop) ;
1154
1181
}
1155
1182
1156
- self . scopes . coroutine_drops . add_entry ( yield_block, cached_drop) ;
1183
+ self . scopes . coroutine_drops . add_entry_point ( yield_block, cached_drop) ;
1157
1184
}
1158
1185
1159
1186
/// Utility function for *non*-scope code to build their own drops
@@ -1274,9 +1301,9 @@ fn build_scope_drops<'tcx>(
1274
1301
// `unwind_to` should drop the value that we're about to
1275
1302
// schedule. If dropping this value panics, then we continue
1276
1303
// with the *next* value on the unwind path.
1277
- debug_assert_eq ! ( unwind_drops. drops[ unwind_to] . 0 . local, drop_data. local) ;
1278
- debug_assert_eq ! ( unwind_drops. drops[ unwind_to] . 0 . kind, drop_data. kind) ;
1279
- unwind_to = unwind_drops. drops [ unwind_to] . 1 ;
1304
+ debug_assert_eq ! ( unwind_drops. drops[ unwind_to] . data . local, drop_data. local) ;
1305
+ debug_assert_eq ! ( unwind_drops. drops[ unwind_to] . data . kind, drop_data. kind) ;
1306
+ unwind_to = unwind_drops. drops [ unwind_to] . next ;
1280
1307
1281
1308
// If the operand has been moved, and we are not on an unwind
1282
1309
// path, then don't generate the drop. (We only take this into
@@ -1286,7 +1313,7 @@ fn build_scope_drops<'tcx>(
1286
1313
continue ;
1287
1314
}
1288
1315
1289
- unwind_drops. add_entry ( block, unwind_to) ;
1316
+ unwind_drops. add_entry_point ( block, unwind_to) ;
1290
1317
1291
1318
let next = cfg. start_new_block ( ) ;
1292
1319
cfg. terminate (
@@ -1303,9 +1330,9 @@ fn build_scope_drops<'tcx>(
1303
1330
}
1304
1331
DropKind :: Storage => {
1305
1332
if storage_dead_on_unwind {
1306
- debug_assert_eq ! ( unwind_drops. drops[ unwind_to] . 0 . local, drop_data. local) ;
1307
- debug_assert_eq ! ( unwind_drops. drops[ unwind_to] . 0 . kind, drop_data. kind) ;
1308
- unwind_to = unwind_drops. drops [ unwind_to] . 1 ;
1333
+ debug_assert_eq ! ( unwind_drops. drops[ unwind_to] . data . local, drop_data. local) ;
1334
+ debug_assert_eq ! ( unwind_drops. drops[ unwind_to] . data . kind, drop_data. kind) ;
1335
+ unwind_to = unwind_drops. drops [ unwind_to] . next ;
1309
1336
}
1310
1337
// Only temps and vars need their storage dead.
1311
1338
assert ! ( local. index( ) > arg_count) ;
@@ -1335,30 +1362,31 @@ impl<'a, 'tcx: 'a> Builder<'a, 'tcx> {
1335
1362
let is_coroutine = self . coroutine . is_some ( ) ;
1336
1363
1337
1364
// Link the exit drop tree to unwind drop tree.
1338
- if drops. drops . iter ( ) . any ( |( drop , _ ) | drop . kind == DropKind :: Value ) {
1365
+ if drops. drops . iter ( ) . any ( |drop_node| drop_node . data . kind == DropKind :: Value ) {
1339
1366
let unwind_target = self . diverge_cleanup_target ( else_scope, span) ;
1340
1367
let mut unwind_indices = IndexVec :: from_elem_n ( unwind_target, 1 ) ;
1341
- for ( drop_idx, drop_data ) in drops. drops . iter_enumerated ( ) . skip ( 1 ) {
1342
- match drop_data . 0 . kind {
1368
+ for ( drop_idx, drop_node ) in drops. drops . iter_enumerated ( ) . skip ( 1 ) {
1369
+ match drop_node . data . kind {
1343
1370
DropKind :: Storage => {
1344
1371
if is_coroutine {
1345
1372
let unwind_drop = self
1346
1373
. scopes
1347
1374
. unwind_drops
1348
- . add_drop ( drop_data . 0 , unwind_indices[ drop_data . 1 ] ) ;
1375
+ . add_drop ( drop_node . data , unwind_indices[ drop_node . next ] ) ;
1349
1376
unwind_indices. push ( unwind_drop) ;
1350
1377
} else {
1351
- unwind_indices. push ( unwind_indices[ drop_data . 1 ] ) ;
1378
+ unwind_indices. push ( unwind_indices[ drop_node . next ] ) ;
1352
1379
}
1353
1380
}
1354
1381
DropKind :: Value => {
1355
1382
let unwind_drop = self
1356
1383
. scopes
1357
1384
. unwind_drops
1358
- . add_drop ( drop_data. 0 , unwind_indices[ drop_data. 1 ] ) ;
1359
- self . scopes
1360
- . unwind_drops
1361
- . add_entry ( blocks[ drop_idx] . unwrap ( ) , unwind_indices[ drop_data. 1 ] ) ;
1385
+ . add_drop ( drop_node. data , unwind_indices[ drop_node. next ] ) ;
1386
+ self . scopes . unwind_drops . add_entry_point (
1387
+ blocks[ drop_idx] . unwrap ( ) ,
1388
+ unwind_indices[ drop_node. next ] ,
1389
+ ) ;
1362
1390
unwind_indices. push ( unwind_drop) ;
1363
1391
}
1364
1392
}
@@ -1408,10 +1436,10 @@ impl<'a, 'tcx: 'a> Builder<'a, 'tcx> {
1408
1436
// prevent drop elaboration from creating drop flags that would have
1409
1437
// to be captured by the coroutine. I'm not sure how important this
1410
1438
// optimization is, but it is here.
1411
- for ( drop_idx, drop_data ) in drops. drops . iter_enumerated ( ) {
1412
- if let DropKind :: Value = drop_data . 0 . kind {
1413
- debug_assert ! ( drop_data . 1 < drops. drops. next_index( ) ) ;
1414
- drops. entry_points . push ( ( drop_data . 1 , blocks[ drop_idx] . unwrap ( ) ) ) ;
1439
+ for ( drop_idx, drop_node ) in drops. drops . iter_enumerated ( ) {
1440
+ if let DropKind :: Value = drop_node . data . kind {
1441
+ debug_assert ! ( drop_node . next < drops. drops. next_index( ) ) ;
1442
+ drops. entry_points . push ( ( drop_node . next , blocks[ drop_idx] . unwrap ( ) ) ) ;
1415
1443
}
1416
1444
}
1417
1445
Self :: build_unwind_tree ( cfg, drops, fn_span, resume_block) ;
@@ -1442,8 +1470,16 @@ impl<'tcx> DropTreeBuilder<'tcx> for ExitScopes {
1442
1470
fn make_block ( cfg : & mut CFG < ' tcx > ) -> BasicBlock {
1443
1471
cfg. start_new_block ( )
1444
1472
}
1445
- fn add_entry ( cfg : & mut CFG < ' tcx > , from : BasicBlock , to : BasicBlock ) {
1446
- cfg. block_data_mut ( from) . terminator_mut ( ) . kind = TerminatorKind :: Goto { target : to } ;
1473
+ fn link_entry_point ( cfg : & mut CFG < ' tcx > , from : BasicBlock , to : BasicBlock ) {
1474
+ // There should be an existing terminator with real source info and a
1475
+ // dummy TerminatorKind. Replace it with a proper goto.
1476
+ // (The dummy is added by `break_scope` and `break_for_else`.)
1477
+ let term = cfg. block_data_mut ( from) . terminator_mut ( ) ;
1478
+ if let TerminatorKind :: UnwindResume = term. kind {
1479
+ term. kind = TerminatorKind :: Goto { target : to } ;
1480
+ } else {
1481
+ span_bug ! ( term. source_info. span, "unexpected dummy terminator kind: {:?}" , term. kind) ;
1482
+ }
1447
1483
}
1448
1484
}
1449
1485
@@ -1453,7 +1489,7 @@ impl<'tcx> DropTreeBuilder<'tcx> for CoroutineDrop {
1453
1489
fn make_block ( cfg : & mut CFG < ' tcx > ) -> BasicBlock {
1454
1490
cfg. start_new_block ( )
1455
1491
}
1456
- fn add_entry ( cfg : & mut CFG < ' tcx > , from : BasicBlock , to : BasicBlock ) {
1492
+ fn link_entry_point ( cfg : & mut CFG < ' tcx > , from : BasicBlock , to : BasicBlock ) {
1457
1493
let term = cfg. block_data_mut ( from) . terminator_mut ( ) ;
1458
1494
if let TerminatorKind :: Yield { ref mut drop, .. } = term. kind {
1459
1495
* drop = Some ( to) ;
@@ -1473,7 +1509,7 @@ impl<'tcx> DropTreeBuilder<'tcx> for Unwind {
1473
1509
fn make_block ( cfg : & mut CFG < ' tcx > ) -> BasicBlock {
1474
1510
cfg. start_new_cleanup_block ( )
1475
1511
}
1476
- fn add_entry ( cfg : & mut CFG < ' tcx > , from : BasicBlock , to : BasicBlock ) {
1512
+ fn link_entry_point ( cfg : & mut CFG < ' tcx > , from : BasicBlock , to : BasicBlock ) {
1477
1513
let term = & mut cfg. block_data_mut ( from) . terminator_mut ( ) ;
1478
1514
match & mut term. kind {
1479
1515
TerminatorKind :: Drop { unwind, .. } => {
0 commit comments