From 26bbe2cd61fb06bbf93c7781288ef384514ec822 Mon Sep 17 00:00:00 2001 From: Matthew Jasper Date: Sat, 16 Nov 2019 11:35:37 +0000 Subject: [PATCH 01/19] Temp: Initial test update --- src/test/mir-opt/basic_assignment.rs | 8 +- src/test/mir-opt/box_expr.rs | 41 +++--- src/test/mir-opt/const_prop/boxes.rs | 20 +-- .../mir-opt/generator-storage-dead-unwind.rs | 71 ++++----- src/test/mir-opt/graphviz.rs | 13 +- src/test/mir-opt/issue-38669.rs | 20 +-- src/test/mir-opt/issue-49232.rs | 55 +++---- src/test/mir-opt/issue-62289.rs | 66 ++++----- src/test/mir-opt/loop_test.rs | 19 ++- src/test/mir-opt/match-arm-scopes.rs | 120 ++++++++------- src/test/mir-opt/match_false_edges.rs | 138 +++++++++--------- .../mir-opt/nll/region-subtyping-basic.rs | 6 +- .../mir-opt/no-spurious-drop-after-call.rs | 6 +- .../mir-opt/packed-struct-drop-aligned.rs | 12 +- src/test/mir-opt/retag.rs | 10 +- src/test/mir-opt/simple-match.rs | 20 +-- src/test/mir-opt/simplify_cfg.rs | 10 +- src/test/mir-opt/unusual-item-types.rs | 6 - 18 files changed, 305 insertions(+), 336 deletions(-) diff --git a/src/test/mir-opt/basic_assignment.rs b/src/test/mir-opt/basic_assignment.rs index ca0e9fa811a26..8fcf4d7613f88 100644 --- a/src/test/mir-opt/basic_assignment.rs +++ b/src/test/mir-opt/basic_assignment.rs @@ -39,14 +39,14 @@ fn main() { // StorageLive(_5); // StorageLive(_6); // _6 = move _4; -// replace(_5 <- move _6) -> [return: bb2, unwind: bb5]; +// replace(_5 <- move _6) -> [return: bb1, unwind: bb5]; // } // ... -// bb2: { -// drop(_6) -> [return: bb6, unwind: bb4]; +// bb1: { +// drop(_6) -> [return: bb2, unwind: bb6]; // } // ... // bb5 (cleanup): { -// drop(_6) -> bb4; +// drop(_6) -> bb6; // } // END rustc.main.SimplifyCfg-initial.after.mir diff --git a/src/test/mir-opt/box_expr.rs b/src/test/mir-opt/box_expr.rs index 8dc6b73edf6d4..c13f249d83f3c 100644 --- a/src/test/mir-opt/box_expr.rs +++ b/src/test/mir-opt/box_expr.rs @@ -32,45 +32,40 @@ impl Drop for S { // StorageLive(_1); // StorageLive(_2); // _2 = Box(S); -// (*_2) = const S::new() -> [return: bb2, unwind: bb3]; +// (*_2) = const S::new() -> [return: bb1, unwind: bb7]; // } -// -// bb1 (cleanup): { -// resume; -// } -// -// bb2: { +// bb1: { // _1 = move _2; -// drop(_2) -> bb4; +// drop(_2) -> bb2; // } -// -// bb3 (cleanup): { -// drop(_2) -> bb1; -// } -// -// bb4: { +// bb2: { // StorageDead(_2); // StorageLive(_3); // StorageLive(_4); // _4 = move _1; -// _3 = const std::mem::drop::>(move _4) -> [return: bb5, unwind: bb7]; +// _3 = const std::mem::drop::>(move _4) -> [return: bb3, unwind: bb5]; // } -// -// bb5: { +// bb3: { // StorageDead(_4); // StorageDead(_3); // _0 = (); -// drop(_1) -> bb8; +// drop(_1) -> bb4; +// } +// bb4: { +// StorageDead(_1); +// return; +// } +// bb5 (cleanup): { +// drop(_4) -> bb6; // } // bb6 (cleanup): { -// drop(_1) -> bb1; +// drop(_1) -> bb8; // } // bb7 (cleanup): { -// drop(_4) -> bb6; +// drop(_2) -> bb8; // } -// bb8: { -// StorageDead(_1); -// return; +// bb8 (cleanup): { +// resume; // } // } // END rustc.main.ElaborateDrops.before.mir diff --git a/src/test/mir-opt/const_prop/boxes.rs b/src/test/mir-opt/const_prop/boxes.rs index cf134dadf2789..fe25d6a0a2d13 100644 --- a/src/test/mir-opt/const_prop/boxes.rs +++ b/src/test/mir-opt/const_prop/boxes.rs @@ -22,16 +22,16 @@ fn main() { // _2 = (*_3); // _1 = Add(move _2, const 0i32); // ... -// drop(_3) -> [return: bb2, unwind: bb1]; +// drop(_3) -> [return: bb1, unwind: bb2]; // } -// bb1 (cleanup): { -// resume; -// } -// bb2: { +// bb1: { // ... // _0 = (); // ... // } +// bb2 (cleanup): { +// resume; +// } // END rustc.main.ConstProp.before.mir // START rustc.main.ConstProp.after.mir // bb0: { @@ -43,14 +43,14 @@ fn main() { // _2 = (*_3); // _1 = Add(move _2, const 0i32); // ... -// drop(_3) -> [return: bb2, unwind: bb1]; +// drop(_3) -> [return: bb1, unwind: bb2]; // } -// bb1 (cleanup): { -// resume; -// } -// bb2: { +// bb1: { // ... // _0 = (); // ... // } +// bb2 (cleanup): { +// resume; +// } // END rustc.main.ConstProp.after.mir diff --git a/src/test/mir-opt/generator-storage-dead-unwind.rs b/src/test/mir-opt/generator-storage-dead-unwind.rs index 109304d6d22cc..66484fc1f72b2 100644 --- a/src/test/mir-opt/generator-storage-dead-unwind.rs +++ b/src/test/mir-opt/generator-storage-dead-unwind.rs @@ -47,66 +47,67 @@ fn main() { // StorageLive(_3); // _3 = Bar(const 6i32,); // ... -// _1 = suspend(move _5) -> [resume: bb2, drop: bb4]; +// _1 = suspend(move _5) -> [resume: bb1, drop: bb5]; // } -// bb1 (cleanup): { -// resume; -// } -// bb2: { +// bb1: { // ... // StorageLive(_6); // StorageLive(_7); // _7 = move _2; -// _6 = const take::(move _7) -> [return: bb7, unwind: bb9]; +// _6 = const take::(move _7) -> [return: bb2, unwind: bb11]; // } -// bb3 (cleanup): { -// StorageDead(_2); -// drop(_1) -> bb1; +// bb2: { +// StorageDead(_7); +// StorageDead(_6); +// StorageLive(_8); +// StorageLive(_9); +// _9 = move _3; +// _8 = const take::(move _9) -> [return: bb3, unwind: bb10]; // } -// bb4: { +// bb3: { +// StorageDead(_9); +// StorageDead(_8); // ... // StorageDead(_3); -// drop(_2) -> [return: bb5, unwind: bb3]; +// StorageDead(_2); +// drop(_1) -> [return: bb4, unwind: bb9]; +// } +// bb4: { +// return; // } // bb5: { -// StorageDead(_2); -// drop(_1) -> [return: bb6, unwind: bb1]; +// ... +// StorageDead(_3); +// drop(_2) -> [return: bb6, unwind: bb8]; // } // bb6: { -// generator_drop; +// StorageDead(_2); +// drop(_1) -> [return: bb7, unwind: bb9]; // } // bb7: { -// StorageDead(_7); -// StorageDead(_6); -// StorageLive(_8); -// StorageLive(_9); -// _9 = move _3; -// _8 = const take::(move _9) -> [return: bb10, unwind: bb11]; +// generator_drop; // } // bb8 (cleanup): { -// StorageDead(_3); // StorageDead(_2); -// drop(_1) -> bb1; +// drop(_1) -> bb9; // } // bb9 (cleanup): { -// StorageDead(_7); -// StorageDead(_6); -// goto -> bb8; +// resume; // } -// bb10: { +// bb10 (cleanup): { // StorageDead(_9); // StorageDead(_8); -// ... -// StorageDead(_3); -// StorageDead(_2); -// drop(_1) -> [return: bb12, unwind: bb1]; +// goto -> bb12; // } // bb11 (cleanup): { -// StorageDead(_9); -// StorageDead(_8); -// goto -> bb8; +// StorageDead(_7); +// StorageDead(_6); +// goto -> bb12; // } -// bb12: { -// return; +// bb12 (cleanup): { +// StorageDead(_3); +// StorageDead(_2); +// drop(_1) -> bb9; // } + // END rustc.main-{{closure}}.StateTransform.before.mir diff --git a/src/test/mir-opt/graphviz.rs b/src/test/mir-opt/graphviz.rs index fcbb189c1117b..f253388261a78 100644 --- a/src/test/mir-opt/graphviz.rs +++ b/src/test/mir-opt/graphviz.rs @@ -8,13 +8,10 @@ fn main() {} // END RUST SOURCE // START rustc.main.mir_map.0.dot // digraph Mir_0_3 { // The name here MUST be an ASCII identifier. -// graph [fontname="monospace"]; -// node [fontname="monospace"]; -// edge [fontname="monospace"]; -// label=>; -// bb0__0_3 [shape="none", label=<
0
_0 = ()
goto
>]; -// bb1__0_3 [shape="none", label=<
1
resume
>]; -// bb2__0_3 [shape="none", label=<
2
return
>]; -// bb0__0_3 -> bb2__0_3 [label=""]; +// graph [fontname="monospace"]; +// node [fontname="monospace"]; +// edge [fontname="monospace"]; +// label=>; +// bb0__0_3 [shape="none", label=<
0
_0 = ()
return
>]; // } // END rustc.main.mir_map.0.dot diff --git a/src/test/mir-opt/issue-38669.rs b/src/test/mir-opt/issue-38669.rs index d980cc891dc40..6621707c74b78 100644 --- a/src/test/mir-opt/issue-38669.rs +++ b/src/test/mir-opt/issue-38669.rs @@ -16,35 +16,35 @@ fn main() { // StorageLive(_1); // _1 = const false; // FakeRead(ForLet, _1); -// goto -> bb2; +// goto -> bb1; // } -// bb1 (cleanup): { -// resume; +// bb1: { +// falseUnwind -> [real: bb2, cleanup: bb6]; // } // bb2: { -// falseUnwind -> [real: bb3, cleanup: bb1]; -// } -// bb3: { // StorageLive(_3); // StorageLive(_4); // _4 = _1; // FakeRead(ForMatchedPlace, _4); -// switchInt(_4) -> [false: bb5, otherwise: bb4]; +// switchInt(_4) -> [false: bb4, otherwise: bb3]; // } // ... -// bb5: { +// bb4: { // _3 = (); // StorageDead(_4); // StorageDead(_3); // _1 = const true; // _2 = (); -// goto -> bb2; +// goto -> bb1; // } -// bb6: { +// bb5: { // _0 = (); // StorageDead(_4); // StorageDead(_3); // StorageDead(_1); // return; // } +// bb6 (cleanup): { +// resume; +// } // END rustc.main.SimplifyCfg-initial.after.mir diff --git a/src/test/mir-opt/issue-49232.rs b/src/test/mir-opt/issue-49232.rs index d0dbcbd7515f8..48fbaf255a9b8 100644 --- a/src/test/mir-opt/issue-49232.rs +++ b/src/test/mir-opt/issue-49232.rs @@ -29,64 +29,55 @@ fn main() { // goto -> bb1; // } // bb1: { -// falseUnwind -> [real: bb3, cleanup: bb4]; +// falseUnwind -> [real: bb2, cleanup: bb11]; // } // bb2: { -// goto -> bb14; -// } -// bb3: { // StorageLive(_2); // StorageLive(_3); // _3 = const true; // FakeRead(ForMatchedPlace, _3); -// switchInt(_3) -> [false: bb5, otherwise: bb6]; -// } -// bb4 (cleanup): { -// resume; +// switchInt(_3) -> [false: bb3, otherwise: bb4]; // } -// bb5: { -// falseEdges -> [real: bb7, imaginary: bb6]; +// bb3: { +// falseEdges -> [real: bb5, imaginary: bb4]; // } -// bb6: { +// bb4: { // _0 = (); -// goto -> bb8; +// goto -> bb10; // } -// bb7: { +// bb5: { // _2 = const 4i32; -// goto -> bb12; -// } -// bb8: { -// StorageDead(_3); -// goto -> bb9; +// goto -> bb8; // } -// bb9: { -// StorageDead(_2); -// goto -> bb2; -// } -// bb10: { +// bb6: { // _4 = (); // unreachable; -// } -// bb11: { -// goto -> bb12; -// } -// bb12: { +// } +// bb7: { +// goto -> bb8; +// } +// bb8: { // FakeRead(ForLet, _2); // StorageDead(_3); // StorageLive(_5); -// StorageLive(_6); +// StorageLive(_6); // _6 = &_2; -// _5 = const std::mem::drop::<&i32>(move _6) -> [return: bb13, unwind: bb4]; +// _5 = const std::mem::drop::<&i32>(move _6) -> [return: bb9, unwind: bb11]; // } -// bb13: { +// bb9: { // StorageDead(_6); // StorageDead(_5); // _1 = (); // StorageDead(_2); // goto -> bb1; // } -// bb14: { +// bb10: { +// StorageDead(_3); +// StorageDead(_2); // return; // } +// bb11 (cleanup): { +// resume; +// } // } // END rustc.main.mir_map.0.mir diff --git a/src/test/mir-opt/issue-62289.rs b/src/test/mir-opt/issue-62289.rs index a3b517e9bca87..b2b1a71e10291 100644 --- a/src/test/mir-opt/issue-62289.rs +++ b/src/test/mir-opt/issue-62289.rs @@ -24,68 +24,68 @@ fn main() { // StorageLive(_3); // StorageLive(_4); // _4 = std::option::Option::::None; -// _3 = const as std::ops::Try>::into_result(move _4) -> [return: bb2, unwind: bb3]; +// _3 = const as std::ops::Try>::into_result(move _4) -> [return: bb1, unwind: bb12]; // } -// bb1 (cleanup): { -// resume; -// } -// bb2: { +// bb1: { // StorageDead(_4); // _5 = discriminant(_3); -// switchInt(move _5) -> [0isize: bb10, 1isize: bb5, otherwise: bb4]; +// switchInt(move _5) -> [0isize: bb6, 1isize: bb3, otherwise: bb2]; // } -// bb3 (cleanup): { -// drop(_2) -> bb1; -// } -// bb4: { +// bb2: { // unreachable; // } -// bb5: { +// bb3: { // StorageLive(_6); // _6 = ((_3 as Err).0: std::option::NoneError); // StorageLive(_8); // StorageLive(_9); // _9 = _6; -// _8 = const >::from(move _9) -> [return: bb7, unwind: bb3]; -// } -// bb6: { -// return; +// _8 = const >::from(move _9) -> [return: bb4, unwind: bb12]; // } -// bb7: { +// bb4: { // StorageDead(_9); -// _0 = const > as std::ops::Try>::from_error(move _8) -> [return: bb8, unwind: bb3]; +// _0 = const > as std::ops::Try>::from_error(move _8) -> [return: bb5, unwind: bb12]; // } -// bb8: { +// bb5: { // StorageDead(_8); // StorageDead(_6); // drop(_2) -> bb9; // } -// bb9: { -// StorageDead(_2); -// StorageDead(_1); -// StorageDead(_3); -// goto -> bb6; -// } -// bb10: { +// bb6: { // StorageLive(_10); // _10 = ((_3 as Ok).0: u32); // (*_2) = _10; // StorageDead(_10); // _1 = move _2; -// drop(_2) -> [return: bb12, unwind: bb11]; -// } -// bb11 (cleanup): { -// drop(_1) -> bb1; +// drop(_2) -> [return: bb7, unwind: bb11]; // } -// bb12: { +// bb7: { // StorageDead(_2); // _0 = std::option::Option::>::Some(move _1,); -// drop(_1) -> bb13; +// drop(_1) -> bb8; // } -// bb13: { +// bb8: { +// StorageDead(_1); +// StorageDead(_3); +// goto -> bb10; +// } +// bb9: { +// StorageDead(_2); // StorageDead(_1); // StorageDead(_3); -// goto -> bb6; +// goto -> bb10; +// } +// bb10: { +// return; +// } +// bb11 (cleanup): { +// drop(_1) -> bb13; +// } +// bb12 (cleanup): { +// drop(_2) -> bb13; +// } +// bb13 (cleanup): { +// resume; // } // } // END rustc.test.ElaborateDrops.before.mir diff --git a/src/test/mir-opt/loop_test.rs b/src/test/mir-opt/loop_test.rs index 418febbdc01eb..f0bd001cf8374 100644 --- a/src/test/mir-opt/loop_test.rs +++ b/src/test/mir-opt/loop_test.rs @@ -18,27 +18,26 @@ fn main() { // END RUST SOURCE // START rustc.main.SimplifyCfg-qualify-consts.after.mir // ... -// bb1 (cleanup): { -// resume; -// } -// ... -// bb3: { // Entry into the loop +// bb2: { // Entry into the loop // _1 = (); // StorageDead(_2); // StorageDead(_1); // StorageLive(_4); -// goto -> bb5; +// goto -> bb4; // } // ... -// bb5: { // The loop_block -// falseUnwind -> [real: bb6, cleanup: bb1]; +// bb4: { // The loop_block +// falseUnwind -> [real: bb5, cleanup: bb6]; // } -// bb6: { // The loop body (body_block) +// bb5: { // The loop body (body_block) // StorageLive(_6); // _6 = const 1i32; // FakeRead(ForLet, _6); // StorageDead(_6); -// goto -> bb5; +// goto -> bb4; +// } +// bb6 (cleanup): { +// resume; // } // ... // END rustc.main.SimplifyCfg-qualify-consts.after.mir diff --git a/src/test/mir-opt/match-arm-scopes.rs b/src/test/mir-opt/match-arm-scopes.rs index c898d3a6f168c..f6fe02ed37e4f 100644 --- a/src/test/mir-opt/match-arm-scopes.rs +++ b/src/test/mir-opt/match-arm-scopes.rs @@ -58,31 +58,28 @@ fn main() { // } // bb0: { // FakeRead(ForMatchedPlace, _2); -// switchInt((_2.0: bool)) -> [false: bb2, otherwise: bb5]; +// switchInt((_2.0: bool)) -> [false: bb1, otherwise: bb4]; // } -// bb1 (cleanup): { -// resume; +// bb1: { +// falseEdges -> [real: bb7, imaginary: bb2]; // } // bb2: { -// falseEdges -> [real: bb8, imaginary: bb3]; +// falseEdges -> [real: bb13, imaginary: bb3]; // } // bb3: { -// falseEdges -> [real: bb17, imaginary: bb4]; +// falseEdges -> [real: bb21, imaginary: bb22]; // } // bb4: { -// falseEdges -> [real: bb25, imaginary: bb26]; +// switchInt((_2.1: bool)) -> [false: bb2, otherwise: bb5]; // } // bb5: { -// switchInt((_2.1: bool)) -> [false: bb3, otherwise: bb6]; -// } -// bb6: { -// switchInt((_2.0: bool)) -> [false: bb26, otherwise: bb4]; +// switchInt((_2.0: bool)) -> [false: bb22, otherwise: bb3]; // } -// bb7: { // arm 1 +// bb6: { // arm 1 // _0 = const 1i32; -// drop(_7) -> [return: bb23, unwind: bb13]; +// drop(_7) -> [return: bb19, unwind: bb27]; // } -// bb8: { // guard - first time +// bb7: { // guard - first time // StorageLive(_6); // _6 = &(_2.1: bool); // StorageLive(_8); @@ -93,34 +90,23 @@ fn main() { // StorageLive(_10); // _10 = _1; // FakeRead(ForMatchedPlace, _10); -// switchInt(_10) -> [false: bb10, otherwise: bb9]; +// switchInt(_10) -> [false: bb9, otherwise: bb8]; // } -// bb9: { -// falseEdges -> [real: bb11, imaginary: bb10]; +// bb8: { +// falseEdges -> [real: bb10, imaginary: bb9]; // } -// bb10: { // `else` block - first time +// bb9: { // `else` block - first time // _9 = (*_6); // StorageDead(_10); -// switchInt(move _9) -> [false: bb16, otherwise: bb15]; +// switchInt(move _9) -> [false: bb12, otherwise: bb11]; // } -// bb11: { // `return 3` - first time +// bb10: { // `return 3` - first time // _0 = const 3i32; // StorageDead(_10); // StorageDead(_9); -// StorageDead(_8); -// StorageDead(_6); -// goto -> bb14; -// } -// bb12: { -// return; -// } -// bb13 (cleanup): { -// drop(_2) -> bb1; +// goto -> bb25; // } -// bb14: { -// drop(_2) -> [return: bb12, unwind: bb1]; -// } -// bb15: { +// bb11: { // StorageDead(_9); // FakeRead(ForMatchGuard, _3); // FakeRead(ForMatchGuard, _4); @@ -130,15 +116,15 @@ fn main() { // _5 = (_2.1: bool); // StorageLive(_7); // _7 = move (_2.2: std::string::String); -// goto -> bb7; +// goto -> bb6; // } -// bb16: { // guard otherwise case - first time +// bb12: { // guard otherwise case - first time // StorageDead(_9); // StorageDead(_8); // StorageDead(_6); -// falseEdges -> [real: bb5, imaginary: bb3]; +// falseEdges -> [real: bb4, imaginary: bb2]; // } -// bb17: { // guard - second time +// bb13: { // guard - second time // StorageLive(_6); // _6 = &(_2.0: bool); // StorageLive(_8); @@ -149,25 +135,23 @@ fn main() { // StorageLive(_13); // _13 = _1; // FakeRead(ForMatchedPlace, _13); -// switchInt(_13) -> [false: bb19, otherwise: bb18]; +// switchInt(_13) -> [false: bb15, otherwise: bb14]; // } -// bb18: { -// falseEdges -> [real: bb20, imaginary: bb19]; +// bb14: { +// falseEdges -> [real: bb16, imaginary: bb15]; // } -// bb19: { // `else` block - second time +// bb15: { // `else` block - second time // _12 = (*_6); // StorageDead(_13); -// switchInt(move _12) -> [false: bb22, otherwise: bb21]; +// switchInt(move _12) -> [false: bb18, otherwise: bb17]; // } -// bb20: { +// bb16: { // `return 3` - second time // _0 = const 3i32; // StorageDead(_13); // StorageDead(_12); -// StorageDead(_8); -// StorageDead(_6); -// goto -> bb14; +// goto -> bb25; // } -// bb21: { // bindings for arm 1 +// bb17: { // bindings for arm 1 // StorageDead(_12); // FakeRead(ForMatchGuard, _3); // FakeRead(ForMatchGuard, _4); @@ -177,46 +161,60 @@ fn main() { // _5 = (_2.0: bool); // StorageLive(_7); // _7 = move (_2.2: std::string::String); -// goto -> bb7; +// goto -> bb6; // } -// bb22: { // Guard otherwise case - second time +// bb18: { // Guard otherwise case - second time // StorageDead(_12); // StorageDead(_8); // StorageDead(_6); -// falseEdges -> [real: bb6, imaginary: bb4]; +// falseEdges -> [real: bb5, imaginary: bb3]; // } -// bb23: { // rest of arm 1 +// bb19: { // rest of arm 1 // StorageDead(_7); // StorageDead(_5); // StorageDead(_8); // StorageDead(_6); -// goto -> bb28; +// goto -> bb24; // } -// bb24: { // arm 2 +// bb20: { // arm 2 // _0 = const 2i32; -// drop(_16) -> [return: bb27, unwind: bb13]; +// drop(_16) -> [return: bb23, unwind: bb27]; // } -// bb25: { // bindings for arm 2 - first pattern +// bb21: { // bindings for arm 2 - first pattern // StorageLive(_15); // _15 = (_2.1: bool); // StorageLive(_16); // _16 = move (_2.2: std::string::String); -// goto -> bb24; +// goto -> bb20; // } -// bb26: { // bindings for arm 2 - second pattern +// bb22: { // bindings for arm 2 - second pattern // StorageLive(_15); // _15 = (_2.1: bool); // StorageLive(_16); // _16 = move (_2.2: std::string::String); -// goto -> bb24; +// goto -> bb20; // } -// bb27: { // rest of arm 2 +// bb23: { // rest of arm 2 // StorageDead(_16); // StorageDead(_15); -// goto -> bb28; +// goto -> bb24; +// } +// bb24: { +// drop(_2) -> [return: bb26, unwind: bb28]; // } -// bb28: { -// drop(_2) -> [return: bb12, unwind: bb1]; +// bb25: { +// StorageDead(_8); +// StorageDead(_6); +// drop(_2) -> [return: bb26, unwind: bb28]; +// } +// bb26: { +// return; +// } +// bb27 (cleanup): { +// drop(_2) -> bb28; +// } +// bb28 (cleanup): { +// resume; // } // END rustc.complicated_match.SimplifyCfg-initial.after.mir // START rustc.complicated_match.ElaborateDrops.after.mir diff --git a/src/test/mir-opt/match_false_edges.rs b/src/test/mir-opt/match_false_edges.rs index 648856b5523d3..afc49c48a7f05 100644 --- a/src/test/mir-opt/match_false_edges.rs +++ b/src/test/mir-opt/match_false_edges.rs @@ -45,35 +45,32 @@ fn main() { // _2 = std::option::Option::::Some(const 42i32,); // FakeRead(ForMatchedPlace, _2); // _3 = discriminant(_2); -// switchInt(move _3) -> [0isize: bb4, 1isize: bb2, otherwise: bb5]; +// switchInt(move _3) -> [0isize: bb3, 1isize: bb1, otherwise: bb4]; // } -// bb1 (cleanup): { -// resume; +// bb1: { +// falseEdges -> [real: bb5, imaginary: bb2]; //pre_binding1 // } // bb2: { -// falseEdges -> [real: bb6, imaginary: bb3]; //pre_binding1 -// } -// bb3: { -// falseEdges -> [real: bb10, imaginary: bb4]; //pre_binding2 +// falseEdges -> [real: bb9, imaginary: bb3]; //pre_binding2 // } -// bb4: { //pre_binding3 and arm3 +// bb3: { // pre_binding3 and arm3 // _1 = (const 3i32, const 3i32); -// goto -> bb11; +// goto -> bb10; // } -// bb5: { +// bb4: { // unreachable; // } -// bb6: { // binding1 and guard +// bb5: { // binding1 and guard // StorageLive(_6); // _6 = &(((promoted[0]: std::option::Option) as Some).0: i32); // _4 = &shallow _2; // StorageLive(_7); -// _7 = const guard() -> [return: bb7, unwind: bb1]; +// _7 = const guard() -> [return: bb6, unwind: bb11]; // } -// bb7: { // end of guard -// switchInt(move _7) -> [false: bb9, otherwise: bb8]; +// bb6: { // end of guard +// switchInt(move _7) -> [false: bb8, otherwise: bb7]; // } -// bb8: { // arm1 +// bb7: { // arm1 // StorageDead(_7); // FakeRead(ForMatchGuard, _4); // FakeRead(ForGuardBinding, _6); @@ -85,14 +82,14 @@ fn main() { // StorageDead(_8); // StorageDead(_5); // StorageDead(_6); -// goto -> bb11; +// goto -> bb10; // } -// bb9: { // to pre_binding2 +// bb8: { // to pre_binding2 // StorageDead(_7); // StorageDead(_6); -// goto -> bb3; +// goto -> bb2; // } -// bb10: { // arm2 +// bb9: { // arm2 // StorageLive(_9); // _9 = ((_2 as Some).0: i32); // StorageLive(_10); @@ -100,14 +97,17 @@ fn main() { // _1 = (const 2i32, move _10); // StorageDead(_10); // StorageDead(_9); -// goto -> bb11; +// goto -> bb10; // } -// bb11: { // arm3 +// bb10: { // match exit // StorageDead(_2); // StorageDead(_1); // _0 = (); // return; // } +// bb11 (cleanup): { +// resume; +// } // END rustc.full_tested_match.PromoteTemps.after.mir // // START rustc.full_tested_match2.PromoteTemps.before.mir @@ -116,31 +116,28 @@ fn main() { // _2 = std::option::Option::::Some(const 42i32,); // FakeRead(ForMatchedPlace, _2); // _3 = discriminant(_2); -// switchInt(move _3) -> [0isize: bb3, 1isize: bb2, otherwise: bb4]; +// switchInt(move _3) -> [0isize: bb2, 1isize: bb1, otherwise: bb3]; // } -// bb1 (cleanup): { -// resume; +// bb1: { +// falseEdges -> [real: bb4, imaginary: bb2]; // } // bb2: { -// falseEdges -> [real: bb5, imaginary: bb3]; +// falseEdges -> [real: bb8, imaginary: bb9]; // } // bb3: { -// falseEdges -> [real: bb9, imaginary: bb10]; -// } -// bb4: { // to arm3 (can skip 2 since this is `Some`) // unreachable; // } -// bb5: { // binding1 and guard +// bb4: { // binding1 and guard // StorageLive(_6); // _6 = &((_2 as Some).0: i32); // _4 = &shallow _2; // StorageLive(_7); -// _7 = const guard() -> [return: bb6, unwind: bb1]; +// _7 = const guard() -> [return: bb5, unwind: bb11]; // } -// bb6: { // end of guard -// switchInt(move _7) -> [false: bb8, otherwise: bb7]; +// bb5: { // end of guard +// switchInt(move _7) -> [false: bb7, otherwise: bb6]; // } -// bb7: { +// bb6: { // StorageDead(_7); // FakeRead(ForMatchGuard, _4); // FakeRead(ForGuardBinding, _6); @@ -152,18 +149,18 @@ fn main() { // StorageDead(_8); // StorageDead(_5); // StorageDead(_6); -// goto -> bb11; +// goto -> bb10; // } -// bb8: { // to pre_binding3 (can skip 2 since this is `Some`) +// bb7: { // to pre_binding3 (can skip 2 since this is `Some`) // StorageDead(_7); // StorageDead(_6); -// falseEdges -> [real: bb10, imaginary: bb3]; +// falseEdges -> [real: bb9, imaginary: bb2]; // } -// bb9: { // arm2 +// bb8: { // arm2 // _1 = (const 3i32, const 3i32); -// goto -> bb11; +// goto -> bb10; // } -// bb10: { // binding3 and arm3 +// bb9: { // binding3 and arm3 // StorageLive(_9); // _9 = ((_2 as Some).0: i32); // StorageLive(_10); @@ -171,14 +168,17 @@ fn main() { // _1 = (const 2i32, move _10); // StorageDead(_10); // StorageDead(_9); -// goto -> bb11; +// goto -> bb10; // } -// bb11: { +// bb10: { // StorageDead(_2); // StorageDead(_1); // _0 = (); // return; // } +// bb11 (cleanup): { +// resume; +// } // END rustc.full_tested_match2.PromoteTemps.before.mir // // START rustc.main.PromoteTemps.before.mir @@ -187,31 +187,28 @@ fn main() { // _2 = std::option::Option::::Some(const 1i32,); // FakeRead(ForMatchedPlace, _2); // _4 = discriminant(_2); -// switchInt(move _4) -> [1isize: bb2, otherwise: bb3]; +// switchInt(move _4) -> [1isize: bb1, otherwise: bb2]; // } -// bb1 (cleanup): { -// resume; +// bb1: { +// falseEdges -> [real: bb4, imaginary: bb2]; // } // bb2: { -// falseEdges -> [real: bb5, imaginary: bb3]; +// falseEdges -> [real: bb8, imaginary: bb3]; // } // bb3: { -// falseEdges -> [real: bb9, imaginary: bb4]; +// falseEdges -> [real: bb9, imaginary: bb13]; // } // bb4: { -// falseEdges -> [real: bb10, imaginary: bb14]; -// } -// bb5: { // StorageLive(_7); // _7 = &((_2 as Some).0: i32); // _5 = &shallow _2; // StorageLive(_8); -// _8 = const guard() -> [return: bb6, unwind: bb1]; +// _8 = const guard() -> [return: bb5, unwind: bb15]; // } -// bb6: { //end of guard1 -// switchInt(move _8) -> [false: bb8, otherwise: bb7]; +// bb5: { //end of guard1 +// switchInt(move _8) -> [false: bb7, otherwise: bb6]; // } -// bb7: { +// bb6: { // StorageDead(_8); // FakeRead(ForMatchGuard, _5); // FakeRead(ForGuardBinding, _7); @@ -220,34 +217,34 @@ fn main() { // _1 = const 1i32; // StorageDead(_6); // StorageDead(_7); -// goto -> bb15; +// goto -> bb14; // } -// bb8: { +// bb7: { // StorageDead(_8); // StorageDead(_7); -// falseEdges -> [real: bb3, imaginary: bb3]; +// falseEdges -> [real: bb2, imaginary: bb2]; // } -// bb9: { // binding2 & arm2 +// bb8: { // binding2 & arm2 // StorageLive(_9); // _9 = _2; // _1 = const 2i32; // StorageDead(_9); -// goto -> bb15; +// goto -> bb14; // } -// bb10: { // binding3: Some(y) if guard2(y) +// bb9: { // binding3: Some(y) if guard2(y) // StorageLive(_11); // _11 = &((_2 as Some).0: i32); // _5 = &shallow _2; // StorageLive(_12); // StorageLive(_13); // _13 = (*_11); -// _12 = const guard2(move _13) -> [return: bb11, unwind: bb1]; +// _12 = const guard2(move _13) -> [return: bb10, unwind: bb15]; // } -// bb11: { // end of guard2 +// bb10: { // end of guard2 // StorageDead(_13); -// switchInt(move _12) -> [false: bb13, otherwise: bb12]; +// switchInt(move _12) -> [false: bb12, otherwise: bb11]; // } -// bb12: { // binding4 & arm4 +// bb11: { // binding4 & arm4 // StorageDead(_12); // FakeRead(ForMatchGuard, _5); // FakeRead(ForGuardBinding, _11); @@ -256,24 +253,27 @@ fn main() { // _1 = const 3i32; // StorageDead(_10); // StorageDead(_11); -// goto -> bb15; +// goto -> bb14; // } -// bb13: { +// bb12: { // StorageDead(_12); // StorageDead(_11); -// falseEdges -> [real: bb14, imaginary: bb14]; +// falseEdges -> [real: bb13, imaginary: bb13]; // } -// bb14: { +// bb13: { // StorageLive(_14); // _14 = _2; // _1 = const 4i32; // StorageDead(_14); -// goto -> bb15; +// goto -> bb14; // } -// bb15: { +// bb14: { // StorageDead(_2); // StorageDead(_1); // _0 = (); // return; // } +// bb15 (cleanup): { +// resume; +// } // END rustc.main.PromoteTemps.before.mir diff --git a/src/test/mir-opt/nll/region-subtyping-basic.rs b/src/test/mir-opt/nll/region-subtyping-basic.rs index 8228d9740f0d3..9e8b5d4ed6f70 100644 --- a/src/test/mir-opt/nll/region-subtyping-basic.rs +++ b/src/test/mir-opt/nll/region-subtyping-basic.rs @@ -22,9 +22,9 @@ fn main() { // END RUST SOURCE // START rustc.main.nll.0.mir -// | '_#2r | U0 | {bb2[0..=8], bb3[0], bb5[0..=2]} -// | '_#3r | U0 | {bb2[1..=8], bb3[0], bb5[0..=2]} -// | '_#4r | U0 | {bb2[4..=8], bb3[0], bb5[0..=2]} +// | '_#2r | U0 | {bb1[0..=8], bb2[0], bb4[0..=2]} +// | '_#3r | U0 | {bb1[1..=8], bb2[0], bb4[0..=2]} +// | '_#4r | U0 | {bb1[4..=8], bb2[0], bb4[0..=2]} // END rustc.main.nll.0.mir // START rustc.main.nll.0.mir // let _2: &'_#3r usize; diff --git a/src/test/mir-opt/no-spurious-drop-after-call.rs b/src/test/mir-opt/no-spurious-drop-after-call.rs index 782bc31186ca5..e4e906edd195a 100644 --- a/src/test/mir-opt/no-spurious-drop-after-call.rs +++ b/src/test/mir-opt/no-spurious-drop-after-call.rs @@ -10,11 +10,11 @@ fn main() { // END RUST SOURCE // START rustc.main.ElaborateDrops.before.mir -// bb2: { +// bb1: { // StorageDead(_3); -// _1 = const std::mem::drop::(move _2) -> [return: bb3, unwind: bb4]; +// _1 = const std::mem::drop::(move _2) -> [return: bb2, unwind: bb3]; // } -// bb3: { +// bb2: { // StorageDead(_2); // StorageDead(_4); // StorageDead(_1); diff --git a/src/test/mir-opt/packed-struct-drop-aligned.rs b/src/test/mir-opt/packed-struct-drop-aligned.rs index da73cc96348f0..6599635ebe8a0 100644 --- a/src/test/mir-opt/packed-struct-drop-aligned.rs +++ b/src/test/mir-opt/packed-struct-drop-aligned.rs @@ -36,23 +36,23 @@ impl Drop for Droppy { // _6 = move (_1.0: Aligned); // drop(_6) -> [return: bb4, unwind: bb3]; // } -// bb1 (cleanup): { -// resume; -// } -// bb2: { +// bb1: { // StorageDead(_1); // return; // } +// bb2 (cleanup): { +// resume; +// } // bb3 (cleanup): { // (_1.0: Aligned) = move _4; -// drop(_1) -> bb1; +// drop(_1) -> bb2; // } // bb4: { // StorageDead(_6); // (_1.0: Aligned) = move _4; // StorageDead(_4); // _0 = (); -// drop(_1) -> [return: bb2, unwind: bb1]; +// drop(_1) -> [return: bb1, unwind: bb2]; // } // } // END rustc.main.EraseRegions.before.mir diff --git a/src/test/mir-opt/retag.rs b/src/test/mir-opt/retag.rs index 96b848eb1d41c..00795dc56df4b 100644 --- a/src/test/mir-opt/retag.rs +++ b/src/test/mir-opt/retag.rs @@ -65,12 +65,12 @@ fn main() { // ... // bb0: { // ... -// _3 = const Test::foo(move _4, move _6) -> [return: bb2, unwind: bb3]; +// _3 = const Test::foo(move _4, move _6) -> [return: bb1, unwind: bb7]; // } // // ... // -// bb2: { +// bb1: { // Retag(_3); // ... // _9 = move _3; @@ -87,13 +87,13 @@ fn main() { // _12 = move _13 as *mut i32 (Misc); // Retag([raw] _12); // ... -// _16 = move _17(move _18) -> bb5; +// _16 = move _17(move _18) -> bb3; // } // -// bb5: { +// bb3: { // Retag(_16); // ... -// _20 = const Test::foo_shr(move _21, move _23) -> [return: bb6, unwind: bb7]; +// _20 = const Test::foo_shr(move _21, move _23) -> [return: bb4, unwind: bb6]; // } // // ... diff --git a/src/test/mir-opt/simple-match.rs b/src/test/mir-opt/simple-match.rs index fc1a3bb1bf453..9e5709e9d5b06 100644 --- a/src/test/mir-opt/simple-match.rs +++ b/src/test/mir-opt/simple-match.rs @@ -14,26 +14,20 @@ fn main() {} // START rustc.match_bool.mir_map.0.mir // bb0: { // FakeRead(ForMatchedPlace, _1); -// switchInt(_1) -> [false: bb3, otherwise: bb2]; +// switchInt(_1) -> [false: bb2, otherwise: bb1]; // } -// bb1 (cleanup): { -// resume; +// bb1: { +// falseEdges -> [real: bb3, imaginary: bb2]; // } // bb2: { -// falseEdges -> [real: bb4, imaginary: bb3]; -// } -// bb3: { // _0 = const 20usize; -// goto -> bb5; +// goto -> bb4; // } -// bb4: { +// bb3: { // _0 = const 10usize; -// goto -> bb5; -// } -// bb5: { -// goto -> bb6; +// goto -> bb4; // } -// bb6: { +// bb4: { // return; // } // END rustc.match_bool.mir_map.0.mir diff --git a/src/test/mir-opt/simplify_cfg.rs b/src/test/mir-opt/simplify_cfg.rs index ef843f7158130..adecf80411736 100644 --- a/src/test/mir-opt/simplify_cfg.rs +++ b/src/test/mir-opt/simplify_cfg.rs @@ -19,20 +19,20 @@ fn bar() -> bool { // goto -> bb1; // } // bb1: { -// falseUnwind -> [real: bb3, cleanup: bb4]; +// falseUnwind -> [real: bb2, cleanup: bb11]; // } // ... -// bb11: { +// bb9: { // ... // goto -> bb1; // } // END rustc.main.SimplifyCfg-initial.before.mir // START rustc.main.SimplifyCfg-initial.after.mir // bb0: { -// falseUnwind -> [real: bb1, cleanup: bb2]; +// falseUnwind -> [real: bb1, cleanup: bb6]; // } // ... -// bb5: { +// bb4: { // ... // goto -> bb0; // } @@ -43,7 +43,7 @@ fn bar() -> bool { // } // bb1: { // StorageLive(_2); -// _2 = const bar() -> bb3; +// _2 = const bar() -> bb2; // } // END rustc.main.SimplifyCfg-early-opt.before.mir // START rustc.main.SimplifyCfg-early-opt.after.mir diff --git a/src/test/mir-opt/unusual-item-types.rs b/src/test/mir-opt/unusual-item-types.rs index f4d848dfc7ad1..77ee2bc8bfecf 100644 --- a/src/test/mir-opt/unusual-item-types.rs +++ b/src/test/mir-opt/unusual-item-types.rs @@ -30,9 +30,6 @@ fn main() { // _0 = const 2i32; // return; // } -// bb1 (cleanup): { -// resume; -// } // END rustc.{{impl}}-ASSOCIATED_CONSTANT.mir_map.0.mir // START rustc.E-V-{{constant}}.mir_map.0.mir @@ -40,9 +37,6 @@ fn main() { // _0 = const 5isize; // return; // } -// bb1 (cleanup): { -// resume; -// } // END rustc.E-V-{{constant}}.mir_map.0.mir // START rustc.ptr-real_drop_in_place.std__vec__Vec_i32_.AddMovesForPackedDrops.before.mir From 179fc4211e9a75f1214b51c8c76ab195799ca852 Mon Sep 17 00:00:00 2001 From: Matthew Jasper Date: Sat, 16 Nov 2019 13:23:31 +0000 Subject: [PATCH 02/19] Temp: Initial impl --- src/librustc_mir/build/block.rs | 23 +- src/librustc_mir/build/expr/as_rvalue.rs | 4 +- src/librustc_mir/build/expr/into.rs | 15 +- src/librustc_mir/build/matches/mod.rs | 14 +- src/librustc_mir/build/matches/test.rs | 4 +- src/librustc_mir/build/mod.rs | 58 +- src/librustc_mir/build/scope.rs | 1105 +++++++++++----------- 7 files changed, 579 insertions(+), 644 deletions(-) diff --git a/src/librustc_mir/build/block.rs b/src/librustc_mir/build/block.rs index 7353ca9285ddb..7749bcc51f4c6 100644 --- a/src/librustc_mir/build/block.rs +++ b/src/librustc_mir/build/block.rs @@ -26,16 +26,19 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { self.in_opt_scope(opt_destruction_scope.map(|de|(de, source_info)), move |this| { this.in_scope((region_scope, source_info), LintLevel::Inherited, move |this| { if targeted_by_break { - // This is a `break`-able block - let exit_block = this.cfg.start_new_block(); - let block_exit = this.in_breakable_scope( - None, exit_block, destination.clone(), |this| { - this.ast_block_stmts(destination, block, span, stmts, expr, - safety_mode) - }); - this.cfg.terminate(unpack!(block_exit), source_info, - TerminatorKind::Goto { target: exit_block }); - exit_block.unit() + this.in_breakable_scope( + None, + destination.clone(), + span, + |this| Some(this.ast_block_stmts( + destination, + block, + span, + stmts, + expr, + safety_mode, + )), + ) } else { this.ast_block_stmts(destination, block, span, stmts, expr, safety_mode) diff --git a/src/librustc_mir/build/expr/as_rvalue.rs b/src/librustc_mir/build/expr/as_rvalue.rs index 37eb0cc9d961e..332b1da195902 100644 --- a/src/librustc_mir/build/expr/as_rvalue.rs +++ b/src/librustc_mir/build/expr/as_rvalue.rs @@ -257,14 +257,14 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { ExprKind::Yield { value } => { let value = unpack!(block = this.as_operand(block, scope, value)); let resume = this.cfg.start_new_block(); - let cleanup = this.generator_drop_cleanup(); + this.generator_drop_cleanup(block); this.cfg.terminate( block, source_info, TerminatorKind::Yield { value: value, resume: resume, - drop: cleanup, + drop: None, }, ); resume.and(this.unit_rvalue()) diff --git a/src/librustc_mir/build/expr/into.rs b/src/librustc_mir/build/expr/into.rs index e991181189f41..d1bccbac008b9 100644 --- a/src/librustc_mir/build/expr/into.rs +++ b/src/librustc_mir/build/expr/into.rs @@ -157,7 +157,6 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { // body, even when the exact code in the body cannot unwind let loop_block = this.cfg.start_new_block(); - let exit_block = this.cfg.start_new_block(); // start the loop this.cfg.terminate( @@ -168,18 +167,18 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { this.in_breakable_scope( Some(loop_block), - exit_block, destination.clone(), + expr_span, move |this| { // conduct the test, if necessary let body_block = this.cfg.start_new_block(); - let diverge_cleanup = this.diverge_cleanup(); + this.diverge_from(loop_block); this.cfg.terminate( loop_block, source_info, TerminatorKind::FalseUnwind { real_target: body_block, - unwind: Some(diverge_cleanup), + unwind: None, }, ); @@ -193,9 +192,9 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { source_info, TerminatorKind::Goto { target: loop_block }, ); + None }, - ); - exit_block.unit() + ) } ExprKind::Call { ty, fun, args, from_hir_call } => { let intrinsic = match ty.kind { @@ -244,17 +243,17 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { .collect(); let success = this.cfg.start_new_block(); - let cleanup = this.diverge_cleanup(); this.record_operands_moved(&args); + this.diverge_from(block); this.cfg.terminate( block, source_info, TerminatorKind::Call { func: fun, args, - cleanup: Some(cleanup), + cleanup: None, // FIXME(varkor): replace this with an uninhabitedness-based check. // This requires getting access to the current module to call // `tcx.is_ty_uninhabited_from`, which is currently tricky to do. diff --git a/src/librustc_mir/build/matches/mod.rs b/src/librustc_mir/build/matches/mod.rs index ada547aa39c9e..4cb0c9e162a2d 100644 --- a/src/librustc_mir/build/matches/mod.rs +++ b/src/librustc_mir/build/matches/mod.rs @@ -12,7 +12,6 @@ use crate::build::{GuardFrame, GuardFrameLocal, LocalsForNode}; use crate::hair::{self, *}; use rustc::hir::HirId; use rustc::mir::*; -use rustc::middle::region; use rustc::ty::{self, CanonicalUserTypeAnnotation, Ty}; use rustc::ty::layout::VariantIdx; use rustc_index::bit_set::BitSet; @@ -228,8 +227,6 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { }; // Step 5. Create everything else: the guards and the arms. - let match_scope = self.scopes.topmost(); - let arm_end_blocks: Vec<_> = arm_candidates.into_iter().map(|(arm, mut candidates)| { let arm_source_info = self.source_info(arm.span); let arm_scope = (arm.scope, arm_source_info); @@ -250,7 +247,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { arm.guard.clone(), &fake_borrow_temps, scrutinee_span, - match_scope, + //match_scope, ); } else { arm_block = this.cfg.start_new_block(); @@ -261,7 +258,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { arm.guard.clone(), &fake_borrow_temps, scrutinee_span, - match_scope, + //match_scope, ); this.cfg.terminate( binding_end, @@ -1353,7 +1350,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { guard: Option>, fake_borrows: &Vec<(PlaceRef<'_, 'tcx>, Local)>, scrutinee_span: Span, - region_scope: region::Scope, + //region_scope: region::Scope, ) -> BasicBlock { debug!("bind_and_guard_matched_candidate(candidate={:?})", candidate); @@ -1524,11 +1521,10 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { }); } - self.exit_scope( - source_info.span, - region_scope, + self.exit_top_scope( otherwise_post_guard_block, candidate.otherwise_block.unwrap(), + source_info, ); // We want to ensure that the matched candidates are bound diff --git a/src/librustc_mir/build/matches/test.rs b/src/librustc_mir/build/matches/test.rs index 5c2f72c0a061f..47e772a386a2e 100644 --- a/src/librustc_mir/build/matches/test.rs +++ b/src/librustc_mir/build/matches/test.rs @@ -449,7 +449,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { let bool_ty = self.hir.bool_ty(); let eq_result = self.temp(bool_ty, source_info.span); let eq_block = self.cfg.start_new_block(); - let cleanup = self.diverge_cleanup(); + self.diverge_from(block); self.cfg.terminate(block, source_info, TerminatorKind::Call { func: Operand::Constant(box Constant { span: source_info.span, @@ -464,7 +464,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { }), args: vec![val, expect], destination: Some((eq_result.clone(), eq_block)), - cleanup: Some(cleanup), + cleanup: None, from_hir_call: false, }); diff --git a/src/librustc_mir/build/mod.rs b/src/librustc_mir/build/mod.rs index 6b458cc244c9e..d6d011452647c 100644 --- a/src/librustc_mir/build/mod.rs +++ b/src/librustc_mir/build/mod.rs @@ -316,11 +316,6 @@ struct Builder<'a, 'tcx> { upvar_mutbls: Vec, unit_temp: Option>, - /// Cached block with the `RESUME` terminator; this is created - /// when first set of cleanups are built. - cached_resume_block: Option, - /// Cached block with the `RETURN` terminator. - cached_return_block: Option, /// Cached block with the `UNREACHABLE` terminator. cached_unreachable_block: Option, } @@ -614,43 +609,32 @@ where id: body.value.hir_id.local_id, data: region::ScopeData::Arguments }; - let mut block = START_BLOCK; let source_info = builder.source_info(span); let call_site_s = (call_site_scope, source_info); - unpack!(block = builder.in_scope(call_site_s, LintLevel::Inherited, |builder| { - if should_abort_on_panic(tcx, fn_def_id, abi) { - builder.schedule_abort(); - } - + unpack!(builder.in_scope(call_site_s, LintLevel::Inherited, |builder| { let arg_scope_s = (arg_scope, source_info); - // `return_block` is called when we evaluate a `return` expression, so - // we just use `START_BLOCK` here. - unpack!(block = builder.in_breakable_scope( + // Attribute epilogue to function's closing brace + let fn_end = span.shrink_to_hi(); + let return_block = unpack!(builder.in_breakable_scope( None, - START_BLOCK, Place::return_place(), + fn_end, |builder| { - builder.in_scope(arg_scope_s, LintLevel::Inherited, |builder| { - builder.args_and_body(block, &arguments, arg_scope, &body.value) - }) + Some(builder.in_scope(arg_scope_s, LintLevel::Inherited, |builder| { + builder.args_and_body(START_BLOCK, &arguments, arg_scope, &body.value) + })) }, )); - // Attribute epilogue to function's closing brace - let fn_end = span.shrink_to_hi(); let source_info = builder.source_info(fn_end); - let return_block = builder.return_block(); - builder.cfg.terminate(block, source_info, - TerminatorKind::Goto { target: return_block }); - builder.cfg.terminate(return_block, source_info, - TerminatorKind::Return); + builder.cfg.terminate(return_block, source_info, TerminatorKind::Return); + let should_abort = should_abort_on_panic(tcx, fn_def_id, abi); + builder.build_drop_trees(should_abort); // Attribute any unreachable codepaths to the function's closing brace if let Some(unreachable_block) = builder.cached_unreachable_block { - builder.cfg.terminate(unreachable_block, source_info, - TerminatorKind::Unreachable); + builder.cfg.terminate(unreachable_block, source_info, TerminatorKind::Unreachable); } return_block.unit() })); - assert_eq!(block, builder.return_block()); let mut spread_arg = None; if abi == Abi::RustCall { @@ -694,9 +678,6 @@ fn construct_const<'a, 'tcx>( let source_info = builder.source_info(span); builder.cfg.terminate(block, source_info, TerminatorKind::Return); - // Constants can't `return` so a return block should not be created. - assert_eq!(builder.cached_return_block, None); - // Constants may be match expressions in which case an unreachable block may // be created, so terminate it properly. if let Some(unreachable_block) = builder.cached_unreachable_block { @@ -738,7 +719,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { fn_span: span, arg_count, is_generator, - scopes: Default::default(), + scopes: scope::Scopes::new(is_generator), block_context: BlockContext::new(), source_scopes: IndexVec::new(), source_scope: OUTERMOST_SOURCE_SCOPE, @@ -755,8 +736,6 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { upvar_mutbls, var_indices: Default::default(), unit_temp: None, - cached_resume_block: None, - cached_return_block: None, cached_unreachable_block: None, }; @@ -933,17 +912,6 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { } } } - - fn return_block(&mut self) -> BasicBlock { - match self.cached_return_block { - Some(rb) => rb, - None => { - let rb = self.cfg.start_new_block(); - self.cached_return_block = Some(rb); - rb - } - } - } } /////////////////////////////////////////////////////////////////////////// diff --git a/src/librustc_mir/build/scope.rs b/src/librustc_mir/build/scope.rs index 1b3d8641f204e..b34dcd22b3975 100644 --- a/src/librustc_mir/build/scope.rs +++ b/src/librustc_mir/build/scope.rs @@ -89,8 +89,24 @@ use rustc::hir; use rustc::mir::*; use syntax_pos::{DUMMY_SP, Span}; use rustc_data_structures::fx::FxHashMap; -use std::collections::hash_map::Entry; -use std::mem; +use rustc_index::vec::{IndexVec, Idx}; + +#[derive(Debug)] +pub struct Scopes<'tcx> { + scopes: Vec, + /// The current set of breakable scopes. See module comment for more details. + breakable_scopes: Vec>, + + /// Drops that need to be done on unwind paths. See the comment on + /// [DropTree] for more details. + unwind_drops: DropTree, + + /// Drops that need to be done on paths to the `GeneratorDrop` terminator. + generator_drops: DropTree, + + // TODO: what's this? + // cached_unwind_drop: DropIdx, +} #[derive(Debug)] struct Scope { @@ -110,27 +126,12 @@ struct Scope { drops: Vec, moved_locals: Vec, - - /// The cache for drop chain on “normal” exit into a particular BasicBlock. - cached_exits: FxHashMap<(BasicBlock, region::Scope), BasicBlock>, - - /// The cache for drop chain on "generator drop" exit. - cached_generator_drop: Option, - - /// The cache for drop chain on "unwind" exit. - cached_unwind: CachedBlock, -} - -#[derive(Debug, Default)] -pub struct Scopes<'tcx> { - scopes: Vec, - /// The current set of breakable scopes. See module comment for more details. - breakable_scopes: Vec>, } -#[derive(Debug)] +#[derive(Clone, Copy, Debug)] struct DropData { - /// span where drop obligation was incurred (typically where place was declared) + /// The `Span` where drop obligation was incurred (typically where place was + /// declared) span: Span, /// local to drop @@ -138,46 +139,23 @@ struct DropData { /// Whether this is a value Drop or a StorageDead. kind: DropKind, - - /// The cached blocks for unwinds. - cached_block: CachedBlock, -} - -#[derive(Debug, Default, Clone, Copy)] -struct CachedBlock { - /// The cached block for the cleanups-on-diverge path. This block - /// contains code to run the current drop and all the preceding - /// drops (i.e., those having lower index in Drop’s Scope drop - /// array) - unwind: Option, - - /// The cached block for unwinds during cleanups-on-generator-drop path - /// - /// This is split from the standard unwind path here to prevent drop - /// elaboration from creating drop flags that would have to be captured - /// by the generator. I'm not sure how important this optimization is, - /// but it is here. - generator_drop: Option, } -#[derive(Debug, PartialEq, Eq)] +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub(crate) enum DropKind { Value, Storage, } -#[derive(Clone, Debug)] +#[derive(Debug)] struct BreakableScope<'tcx> { /// Region scope of the loop region_scope: region::Scope, - /// Where the body of the loop begins. `None` if block - continue_block: Option, - /// Block to branch into when the loop or block terminates (either by being - /// `break`-en out from, or by having its condition to become false) - break_block: BasicBlock, /// The destination of the loop/block expression itself (i.e., where to put - /// the result of a `break` expression) + /// the result of a `break` or `return` expression) break_destination: Place<'tcx>, + /// Drops that happen on the + drops: DropTree, } /// The target of an expression that breaks out of a scope @@ -188,59 +166,31 @@ pub enum BreakableTarget { Return, } -impl CachedBlock { - fn invalidate(&mut self) { - *self = CachedBlock::default(); - } +rustc_index::newtype_index! { + struct DropIdx { .. } +} - fn get(&self, generator_drop: bool) -> Option { - if generator_drop { - self.generator_drop - } else { - self.unwind - } - } +const ROOT_NODE: DropIdx = DropIdx::from_u32_const(0); +const CONTINUE_NODE: DropIdx = DropIdx::from_u32_const(1); - fn ref_mut(&mut self, generator_drop: bool) -> &mut Option { - if generator_drop { - &mut self.generator_drop - } else { - &mut self.unwind - } - } +/// A tree of drops that we have deferred lowering. +// TODO say some more. +#[derive(Debug)] +struct DropTree { + /// The next item to drop, if there is one. + // TODO actual comment + drops: IndexVec, + /// Map for finding the inverse of the `next_drop` relation: + /// + /// `previous_drops[(next_drop[i], drops[i].local, drops[i].kind] == i` + previous_drops: FxHashMap<(DropIdx, Local, DropKind), DropIdx>, + /// Edges into the `DropTree` that need to be added once it's lowered. + entry_points: Vec<(DropIdx, BasicBlock)>, + /// The number of root nodes in the tree. + num_roots: DropIdx, } impl Scope { - /// Invalidates all the cached blocks in the scope. - /// - /// Should always be run for all inner scopes when a drop is pushed into some scope enclosing a - /// larger extent of code. - /// - /// `storage_only` controls whether to invalidate only drop paths that run `StorageDead`. - /// `this_scope_only` controls whether to invalidate only drop paths that refer to the current - /// top-of-scope (as opposed to dependent scopes). - fn invalidate_cache(&mut self, storage_only: bool, is_generator: bool, this_scope_only: bool) { - // FIXME: maybe do shared caching of `cached_exits` etc. to handle functions - // with lots of `try!`? - - // cached exits drop storage and refer to the top-of-scope - self.cached_exits.clear(); - - // the current generator drop and unwind refer to top-of-scope - self.cached_generator_drop = None; - - let ignore_unwinds = storage_only && !is_generator; - if !ignore_unwinds { - self.cached_unwind.invalidate(); - } - - if !ignore_unwinds && !this_scope_only { - for drop_data in &mut self.drops { - drop_data.cached_block.invalidate(); - } - } - } - /// Given a span and this scope's source scope, make a SourceInfo. fn source_info(&self, span: Span) -> SourceInfo { SourceInfo { @@ -249,7 +199,6 @@ impl Scope { } } - /// Whether there's anything to do for the cleanup path, that is, /// when unwinding through this scope. This includes destructors, /// but not StorageDead statements, which don't get emitted at all @@ -269,9 +218,39 @@ impl Scope { } } +impl DropTree { + fn new(num_roots: usize) -> Self { + let fake_data = DropData { span: DUMMY_SP, local: Local::MAX, kind: DropKind::Storage }; + let drop_idx = DropIdx::MAX; + let drops = IndexVec::from_elem_n((fake_data, drop_idx), num_roots); + Self { + drops, + num_roots: DropIdx::from_usize(num_roots), + entry_points: Vec::new(), + previous_drops: FxHashMap::default(), + } + } + + fn add_drop(&mut self, drop: DropData, next: DropIdx) -> DropIdx { + let drops = &mut self.drops; + *self.previous_drops + .entry((next, drop.local, drop.kind)) + .or_insert_with(|| drops.push((drop, next))) + } + + fn add_entry(&mut self, from: BasicBlock, to: DropIdx) { + self.entry_points.push((to, from)); + } +} + impl<'tcx> Scopes<'tcx> { - fn len(&self) -> usize { - self.scopes.len() + pub(crate) fn new(is_generator: bool) -> Self { + Self { + scopes: Vec::new(), + breakable_scopes: Vec::new(), + unwind_drops: DropTree::new(1), + generator_drops: DropTree::new(is_generator as usize), + } } fn push_scope(&mut self, region_scope: (region::Scope, SourceInfo), vis_scope: SourceScope) { @@ -282,91 +261,39 @@ impl<'tcx> Scopes<'tcx> { region_scope_span: region_scope.1.span, drops: vec![], moved_locals: vec![], - cached_generator_drop: None, - cached_exits: Default::default(), - cached_unwind: CachedBlock::default(), }); } fn pop_scope( &mut self, region_scope: (region::Scope, SourceInfo), - ) -> (Scope, Option) { + ) -> Scope { let scope = self.scopes.pop().unwrap(); assert_eq!(scope.region_scope, region_scope.0); - let unwind_to = self.scopes.last() - .and_then(|next_scope| next_scope.cached_unwind.get(false)); - (scope, unwind_to) - } - - fn may_panic(&self, scope_count: usize) -> bool { - let len = self.len(); - self.scopes[(len - scope_count)..].iter().any(|s| s.needs_cleanup()) - } - - /// Finds the breakable scope for a given label. This is used for - /// resolving `return`, `break` and `continue`. - fn find_breakable_scope( - &self, - span: Span, - target: BreakableTarget, - ) -> (BasicBlock, region::Scope, Option>) { - let get_scope = |scope: region::Scope| { - // find the loop-scope by its `region::Scope`. - self.breakable_scopes.iter() - .rfind(|breakable_scope| breakable_scope.region_scope == scope) - .unwrap_or_else(|| span_bug!(span, "no enclosing breakable scope found")) - }; - match target { - BreakableTarget::Return => { - let scope = &self.breakable_scopes[0]; - if scope.break_destination != Place::return_place() { - span_bug!(span, "`return` in item with no return scope"); - } - (scope.break_block, scope.region_scope, Some(scope.break_destination.clone())) - } - BreakableTarget::Break(scope) => { - let scope = get_scope(scope); - (scope.break_block, scope.region_scope, Some(scope.break_destination.clone())) - } - BreakableTarget::Continue(scope) => { - let scope = get_scope(scope); - let continue_block = scope.continue_block - .unwrap_or_else(|| span_bug!(span, "missing `continue` block")); - (continue_block, scope.region_scope, None) - } - } + scope } - fn num_scopes_above(&self, region_scope: region::Scope, span: Span) -> usize { - let scope_count = self.scopes.iter().rev() - .position(|scope| scope.region_scope == region_scope) + fn scope_index(&self, region_scope: region::Scope, span: Span) -> usize { + self.scopes.iter() + .rposition(|scope| scope.region_scope == region_scope) .unwrap_or_else(|| { span_bug!(span, "region_scope {:?} does not enclose", region_scope) - }); - let len = self.len(); - assert!(scope_count < len, "should not use `exit_scope` to pop ALL scopes"); - scope_count + }) } fn iter_mut(&mut self) -> impl DoubleEndedIterator + '_ { self.scopes.iter_mut().rev() } - fn top_scopes(&mut self, count: usize) -> impl DoubleEndedIterator + '_ { - let len = self.len(); - self.scopes[len - count..].iter_mut() - } - /// Returns the topmost active scope, which is known to be alive until /// the next scope expression. pub(super) fn topmost(&self) -> region::Scope { self.scopes.last().expect("topmost_scope: no scopes present").region_scope } - fn source_info(&self, index: usize, span: Span) -> SourceInfo { - self.scopes[self.len() - index].source_info(span) - } +// fn source_info(&self, index: usize, span: Span) -> SourceInfo { +// self.scopes[self.len() - index].source_info(span) +// } } impl<'a, 'tcx> Builder<'a, 'tcx> { @@ -374,25 +301,45 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { // ========================== // Start a breakable scope, which tracks where `continue`, `break` and // `return` should branch to. - pub fn in_breakable_scope(&mut self, - loop_block: Option, - break_block: BasicBlock, - break_destination: Place<'tcx>, - f: F) -> R - where F: FnOnce(&mut Builder<'a, 'tcx>) -> R + pub fn in_breakable_scope( + &mut self, + loop_block: Option, + break_destination: Place<'tcx>, + span: Span, + f: F, + ) -> BlockAnd<()> + where F: FnOnce(&mut Builder<'a, 'tcx>) -> Option> { let region_scope = self.scopes.topmost(); let scope = BreakableScope { region_scope, - continue_block: loop_block, - break_block, break_destination, + drops: DropTree::new(1 + loop_block.is_some() as usize), }; self.scopes.breakable_scopes.push(scope); - let res = f(self); + let normal_exit_block = f(self); let breakable_scope = self.scopes.breakable_scopes.pop().unwrap(); assert!(breakable_scope.region_scope == region_scope); - res + let break_block = self.build_exit_tree(breakable_scope.drops, loop_block); + match (normal_exit_block, break_block) { + (Some(block), None) | (None, Some(block)) => block, + (None, None) => self.cfg.start_new_block().unit(), + (Some(normal_block), Some(exit_block)) => { + let target = self.cfg.start_new_block(); + let source_info = self.source_info(span); + self.cfg.terminate( + unpack!(normal_block), + source_info, + TerminatorKind::Goto { target } + ); + self.cfg.terminate( + unpack!(exit_block), + source_info, + TerminatorKind::Goto { target } + ); + target.unit() + } + } } pub fn in_opt_scope(&mut self, @@ -471,28 +418,15 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { mut block: BasicBlock) -> BlockAnd<()> { debug!("pop_scope({:?}, {:?})", region_scope, block); - // If we are emitting a `drop` statement, we need to have the cached - // diverge cleanup pads ready in case that drop panics. - if self.scopes.may_panic(1) { - self.diverge_cleanup(); - } - let (scope, unwind_to) = self.scopes.pop_scope(region_scope); - let unwind_to = unwind_to.unwrap_or_else(|| self.resume_block()); - unpack!(block = build_scope_drops( - &mut self.cfg, - self.is_generator, - &scope, - block, - unwind_to, - self.arg_count, - false, // not generator - false, // not unwind path - )); + block = self.leave_top_scope(block); + + self.scopes.pop_scope(region_scope); block.unit() } + /// Sets up the drops for breaking from `block` to `target`. pub fn break_scope( &mut self, mut block: BasicBlock, @@ -500,144 +434,125 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { scope: BreakableTarget, source_info: SourceInfo, ) -> BlockAnd<()> { - let (mut target_block, region_scope, destination) - = self.scopes.find_breakable_scope(source_info.span, scope); - if let BreakableTarget::Return = scope { - // We call this now, rather than when we start lowering the - // function so that the return block doesn't precede the entire - // rest of the CFG. Some passes and LLVM prefer blocks to be in - // approximately CFG order. - target_block = self.return_block(); - } - if let Some(destination) = destination { + let span = source_info.span; + + let get_scope_index = |scope: region::Scope| { + // find the loop-scope by its `region::Scope`. + self.scopes.breakable_scopes.iter() + .rposition(|breakable_scope| breakable_scope.region_scope == scope) + .unwrap_or_else(|| { + span_bug!(span, "no enclosing breakable scope found") + }) + }; + let (break_index, destination) = match scope { + BreakableTarget::Return => { + let scope = &self.scopes.breakable_scopes[0]; + if scope.break_destination != Place::return_place() { + span_bug!(span, "`return` in item with no return scope"); + } + (0, Some(scope.break_destination.clone())) + } + BreakableTarget::Break(scope) => { + let break_index = get_scope_index(scope); + ( + break_index, + Some(self.scopes.breakable_scopes[break_index].break_destination.clone()), + ) + } + BreakableTarget::Continue(scope) => { + let break_index = get_scope_index(scope); + (break_index, None) + } + }; + + if let Some(ref destination) = destination { if let Some(value) = value { debug!("stmt_expr Break val block_context.push(SubExpr)"); self.block_context.push(BlockFrame::SubExpr); - unpack!(block = self.into(&destination, block, value)); + unpack!(block = self.into(destination, block, value)); self.block_context.pop(); } else { - self.cfg.push_assign_unit(block, source_info, &destination) + self.cfg.push_assign_unit(block, source_info, destination) } } else { assert!(value.is_none(), "`return` and `break` should have a destination"); } - self.exit_scope(source_info.span, region_scope, block, target_block); + + let region_scope = self.scopes.breakable_scopes[break_index].region_scope; + let scope_index = self.scopes.scope_index(region_scope, span); + let exited_scopes = &self.scopes.scopes[scope_index + 1..]; + let scope_drops = exited_scopes.iter().flat_map(|scope| &scope.drops); + + let drops = &mut self.scopes.breakable_scopes[break_index].drops; + let mut drop_idx = DropIdx::from_u32(destination.is_none() as u32); + for drop in scope_drops { + drop_idx = drops.add_drop(*drop, drop_idx); + } + drops.add_entry(block, drop_idx); + // TODO: explain this hack! + self.cfg.terminate(block, source_info, TerminatorKind::Resume); + self.cfg.start_new_block().unit() } - /// Branch out of `block` to `target`, exiting all scopes up to - /// and including `region_scope`. This will insert whatever drops are - /// needed. See module comment for details. - pub fn exit_scope(&mut self, - span: Span, - region_scope: region::Scope, - mut block: BasicBlock, - target: BasicBlock) { - debug!("exit_scope(region_scope={:?}, block={:?}, target={:?})", - region_scope, block, target); - let scope_count = self.scopes.num_scopes_above(region_scope, span); + // TODO: use in pop_top_scope. + pub fn exit_top_scope( + &mut self, + mut block: BasicBlock, + target: BasicBlock, + source_info: SourceInfo, + ) { + block = self.leave_top_scope(block); + self.cfg.terminate( + block, + source_info, + TerminatorKind::Goto { target }, + ); + } + fn leave_top_scope(&mut self, block: BasicBlock) -> BasicBlock { // If we are emitting a `drop` statement, we need to have the cached // diverge cleanup pads ready in case that drop panics. - let may_panic = self.scopes.may_panic(scope_count); - if may_panic { - self.diverge_cleanup(); - } - - let mut scopes = self.scopes.top_scopes(scope_count + 1).rev(); - let mut scope = scopes.next().unwrap(); - for next_scope in scopes { - if scope.drops.is_empty() { - scope = next_scope; - continue; + let scope = self.scopes.scopes.last().expect("exit_top_scope called with no scopes"); + let is_generator = self.is_generator; + let needs_cleanup = scope.needs_cleanup(); + + let unwind_to = if needs_cleanup { + let mut drops = self.scopes.scopes.iter() + .flat_map(|scope| &scope.drops) + .filter(|drop| is_generator || drop.kind == DropKind::Value); + let mut next_drop = ROOT_NODE; + let mut drop_info = drops.next().unwrap(); + for previous_drop_info in drops { + next_drop = self.scopes.unwind_drops.add_drop(*drop_info, next_drop); + drop_info = previous_drop_info; } - let source_info = scope.source_info(span); - block = match scope.cached_exits.entry((target, region_scope)) { - Entry::Occupied(e) => { - self.cfg.terminate(block, source_info, - TerminatorKind::Goto { target: *e.get() }); - return; - } - Entry::Vacant(v) => { - let b = self.cfg.start_new_block(); - self.cfg.terminate(block, source_info, - TerminatorKind::Goto { target: b }); - v.insert(b); - b - } - }; - - let unwind_to = next_scope.cached_unwind.get(false).unwrap_or_else(|| { - debug_assert!(!may_panic, "cached block not present?"); - START_BLOCK - }); - - unpack!(block = build_scope_drops( - &mut self.cfg, - self.is_generator, - scope, - block, - unwind_to, - self.arg_count, - false, // not generator - false, // not unwind path - )); - - scope = next_scope; - } - - let source_info = self.scopes.source_info(scope_count, span); - self.cfg.terminate(block, source_info, TerminatorKind::Goto { target }); + next_drop + } else { + DropIdx::MAX + }; + unpack!(build_scope_drops( + &mut self.cfg, + &mut self.scopes.unwind_drops, + scope, + block, + unwind_to, + is_generator && needs_cleanup, + self.arg_count, + )) } - /// Creates a path that performs all required cleanup for dropping a generator. + /// Sets up a path that performs all required cleanup for dropping a generator. /// /// This path terminates in GeneratorDrop. Returns the start of the path. /// None indicates there’s no cleanup to do at this point. - pub fn generator_drop_cleanup(&mut self) -> Option { - // Fill in the cache for unwinds - self.diverge_cleanup_gen(true); - - let src_info = self.scopes.source_info(self.scopes.len(), self.fn_span); - let resume_block = self.resume_block(); - let mut scopes = self.scopes.iter_mut().peekable(); - let mut block = self.cfg.start_new_block(); - let result = block; - - while let Some(scope) = scopes.next() { - block = if let Some(b) = scope.cached_generator_drop { - self.cfg.terminate(block, src_info, - TerminatorKind::Goto { target: b }); - return Some(result); - } else { - let b = self.cfg.start_new_block(); - scope.cached_generator_drop = Some(b); - self.cfg.terminate(block, src_info, - TerminatorKind::Goto { target: b }); - b - }; - - let unwind_to = scopes.peek().as_ref().map(|scope| { - scope.cached_unwind.get(true).unwrap_or_else(|| { - span_bug!(src_info.span, "cached block not present?") - }) - }).unwrap_or(resume_block); - - unpack!(block = build_scope_drops( - &mut self.cfg, - self.is_generator, - scope, - block, - unwind_to, - self.arg_count, - true, // is generator - true, // is cached path - )); + pub fn generator_drop_cleanup(&mut self, yield_block: BasicBlock) { + let drops = self.scopes.scopes.iter().flat_map(|scope| &scope.drops); + let mut next_drop = ROOT_NODE; + for drop in drops { + next_drop = self.scopes.generator_drops.add_drop(*drop, next_drop); } - - self.cfg.terminate(block, src_info, TerminatorKind::GeneratorDrop); - - Some(result) + self.scopes.generator_drops.add_entry(yield_block, next_drop); } /// Creates a new source scope, nested in the current one. @@ -711,15 +626,6 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { } } - // Schedule an abort block - this is used for some ABIs that cannot unwind - pub fn schedule_abort(&mut self) -> BasicBlock { - let source_info = self.scopes.source_info(self.scopes.len(), self.fn_span); - let abortblk = self.cfg.start_new_cleanup_block(); - self.cfg.terminate(abortblk, source_info, TerminatorKind::Abort); - self.cached_resume_block = Some(abortblk); - abortblk - } - // Scheduling drops // ================ pub fn schedule_drop_storage_and_value( @@ -732,11 +638,10 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { self.schedule_drop(span, region_scope, local, DropKind::Value); } - /// Indicates that `place` should be dropped on exit from - /// `region_scope`. + /// Indicates that `place` should be dropped on exit from `region_scope`. /// - /// When called with `DropKind::Storage`, `place` should be a local - /// with an index higher than the current `self.arg_count`. + /// When called with `DropKind::Storage`, `place` shouldn't be the return + /// place, or a function parameter. pub fn schedule_drop( &mut self, span: Span, @@ -744,7 +649,8 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { local: Local, drop_kind: DropKind, ) { - let needs_drop = match drop_kind { + // TODO: add back in caching. + let _needs_drop = match drop_kind { DropKind::Value => { if !self.hir.needs_drop(self.local_decls[local].ty) { return } true @@ -761,71 +667,21 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { } }; - for scope in self.scopes.iter_mut() { - let this_scope = scope.region_scope == region_scope; - // When building drops, we try to cache chains of drops in such a way so these drops - // could be reused by the drops which would branch into the cached (already built) - // blocks. This, however, means that whenever we add a drop into a scope which already - // had some blocks built (and thus, cached) for it, we must invalidate all caches which - // might branch into the scope which had a drop just added to it. This is necessary, - // because otherwise some other code might use the cache to branch into already built - // chain of drops, essentially ignoring the newly added drop. - // - // For example consider there’s two scopes with a drop in each. These are built and - // thus the caches are filled: - // - // +--------------------------------------------------------+ - // | +---------------------------------+ | - // | | +--------+ +-------------+ | +---------------+ | - // | | | return | <-+ | drop(outer) | <-+ | drop(middle) | | - // | | +--------+ +-------------+ | +---------------+ | - // | +------------|outer_scope cache|--+ | - // +------------------------------|middle_scope cache|------+ - // - // Now, a new, inner-most scope is added along with a new drop into both inner-most and - // outer-most scopes: - // - // +------------------------------------------------------------+ - // | +----------------------------------+ | - // | | +--------+ +-------------+ | +---------------+ | +-------------+ - // | | | return | <+ | drop(new) | <-+ | drop(middle) | <--+| drop(inner) | - // | | +--------+ | | drop(outer) | | +---------------+ | +-------------+ - // | | +-+ +-------------+ | | - // | +---|invalid outer_scope cache|----+ | - // +----=----------------|invalid middle_scope cache|-----------+ - // - // If, when adding `drop(new)` we do not invalidate the cached blocks for both - // outer_scope and middle_scope, then, when building drops for the inner (right-most) - // scope, the old, cached blocks, without `drop(new)` will get used, producing the - // wrong results. - // - // The cache and its invalidation for unwind branch is somewhat special. The cache is - // per-drop, rather than per scope, which has a several different implications. Adding - // a new drop into a scope will not invalidate cached blocks of the prior drops in the - // scope. That is true, because none of the already existing drops will have an edge - // into a block with the newly added drop. - // - // Note that this code iterates scopes from the inner-most to the outer-most, - // invalidating caches of each scope visited. This way bare minimum of the - // caches gets invalidated. i.e., if a new drop is added into the middle scope, the - // cache of outer scope stays intact. - scope.invalidate_cache(!needs_drop, self.is_generator, this_scope); - if this_scope { - let region_scope_span = region_scope.span(self.hir.tcx(), - &self.hir.region_scope_tree); - // Attribute scope exit drops to scope's closing brace. - let scope_end = self.hir.tcx().sess.source_map().end_point(region_scope_span); - - scope.drops.push(DropData { - span: scope_end, - local, - kind: drop_kind, - cached_block: CachedBlock::default(), - }); - return; - } - } - span_bug!(span, "region scope {:?} not in scope to drop {:?}", region_scope, local); + let scope = self.scopes.iter_mut() + .find(|scope| scope.region_scope == region_scope) + .unwrap_or_else(|| { + span_bug!(span, "region scope {:?} not in scope to drop {:?}", region_scope, local); + }); + + let region_scope_span = region_scope.span(self.hir.tcx(), &self.hir.region_scope_tree); + // Attribute scope exit drops to scope's closing brace. + let scope_end = self.hir.tcx().sess.source_map().end_point(region_scope_span); + + scope.drops.push(DropData { + span: scope_end, + local, + kind: drop_kind, + }); } /// Indicates that the "local operand" stored in `local` is @@ -864,10 +720,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { /// spurious borrow-check errors -- the problem, ironically, is /// not the `DROP(_X)` itself, but the (spurious) unwind pathways /// that it creates. See #64391 for an example. - pub fn record_operands_moved( - &mut self, - operands: &[Operand<'tcx>], - ) { + pub fn record_operands_moved(&mut self, operands: &[Operand<'tcx>]) { let scope = match self.local_scope() { None => { // if there is no local scope, operands won't be dropped anyway @@ -957,8 +810,6 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { ); } } - - top_scope.invalidate_cache(true, self.is_generator, true); } else { bug!("Expected as_local_operand to produce a temporary"); } @@ -968,58 +819,26 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { (true_block, false_block) } - /// Creates a path that performs all required cleanup for unwinding. - /// - /// This path terminates in Resume. Returns the start of the path. - /// See module comment for more details. - pub fn diverge_cleanup(&mut self) -> BasicBlock { - self.diverge_cleanup_gen(false) - } - - fn resume_block(&mut self) -> BasicBlock { - if let Some(target) = self.cached_resume_block { - target - } else { - let resumeblk = self.cfg.start_new_cleanup_block(); - self.cfg.terminate(resumeblk, - SourceInfo { - scope: OUTERMOST_SOURCE_SCOPE, - span: self.fn_span - }, - TerminatorKind::Resume); - self.cached_resume_block = Some(resumeblk); - resumeblk + fn diverge_cleanup(&mut self) -> DropIdx { + let is_generator = self.is_generator; + let drops = self.scopes.scopes.iter() + .flat_map(|scope| &scope.drops) + .filter(|drop| is_generator || drop.kind == DropKind::Value); + let mut next_drop = ROOT_NODE; + for drop in drops { + next_drop = self.scopes.unwind_drops.add_drop(*drop, next_drop); } + next_drop } - fn diverge_cleanup_gen(&mut self, generator_drop: bool) -> BasicBlock { - // Build up the drops in **reverse** order. The end result will - // look like: - // - // scopes[n] -> scopes[n-1] -> ... -> scopes[0] - // - // However, we build this in **reverse order**. That is, we - // process scopes[0], then scopes[1], etc, pointing each one at - // the result generates from the one before. Along the way, we - // store caches. If everything is cached, we'll just walk right - // to left reading the cached results but never created anything. - - // Find the last cached block - debug!("diverge_cleanup_gen(self.scopes = {:?})", self.scopes); - let cached_cleanup = self.scopes.iter_mut().enumerate() - .find_map(|(idx, ref scope)| { - let cached_block = scope.cached_unwind.get(generator_drop)?; - Some((cached_block, idx)) - }); - let (mut target, first_uncached) = cached_cleanup - .unwrap_or_else(|| (self.resume_block(), self.scopes.len())); - - for scope in self.scopes.top_scopes(first_uncached) { - target = build_diverge_scope(&mut self.cfg, scope.region_scope_span, - scope, target, generator_drop, self.is_generator); - } - - target + /// Prepares to create a path that performs all required cleanup for + /// unwinding. + /// + /// This path terminates in Resume. The path isn't created until after all + /// of the non-unwind paths in this item have been lowered. + pub fn diverge_from(&mut self, start: BasicBlock) { + let next_drop = self.diverge_cleanup(); + self.scopes.unwind_drops.add_entry(start, next_drop); } /// Utility function for *non*-scope code to build their own drops @@ -1030,13 +849,13 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { value: Operand<'tcx>) -> BlockAnd<()> { let source_info = self.source_info(span); let next_target = self.cfg.start_new_block(); - let diverge_target = self.diverge_cleanup(); + self.diverge_from(block); self.cfg.terminate(block, source_info, TerminatorKind::DropAndReplace { location, value, target: next_target, - unwind: Some(diverge_target), + unwind: None, }); next_target.unit() } @@ -1053,7 +872,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { let source_info = self.source_info(span); let success_block = self.cfg.start_new_block(); - let cleanup = self.diverge_cleanup(); + self.diverge_from(block); self.cfg.terminate(block, source_info, TerminatorKind::Assert { @@ -1061,7 +880,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { expected, msg, target: success_block, - cleanup: Some(cleanup), + cleanup: None, }); success_block @@ -1079,20 +898,18 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { assert_eq!(top_scope.region_scope, region_scope); top_scope.drops.clear(); - top_scope.invalidate_cache(false, self.is_generator, true); } } /// Builds drops for pop_scope and exit_scope. fn build_scope_drops<'tcx>( cfg: &mut CFG<'tcx>, - is_generator: bool, + unwind_drops: &mut DropTree, scope: &Scope, mut block: BasicBlock, - last_unwind_to: BasicBlock, + mut unwind_to: DropIdx, + storage_dead_on_unwind: bool, arg_count: usize, - generator_drop: bool, - is_cached_path: bool, ) -> BlockAnd<()> { debug!("build_scope_drops({:?} -> {:?})", block, scope); @@ -1115,8 +932,7 @@ fn build_scope_drops<'tcx>( // drops for the unwind path should have already been generated by // `diverge_cleanup_gen`. - for drop_idx in (0..scope.drops.len()).rev() { - let drop_data = &scope.drops[drop_idx]; + for drop_data in scope.drops.iter().rev() { let source_info = scope.source_info(drop_data.span); let local = drop_data.local; @@ -1126,22 +942,25 @@ fn build_scope_drops<'tcx>( // path, then don't generate the drop. (We only take this into // account for non-unwind paths so as not to disturb the // caching mechanism.) - if !is_cached_path && scope.moved_locals.iter().any(|&o| o == local) { + if scope.moved_locals.iter().any(|&o| o == local) { + unwind_to = unwind_drops.drops[unwind_to].1; continue; } - let unwind_to = get_unwind_to(scope, is_generator, drop_idx, generator_drop) - .unwrap_or(last_unwind_to); + unwind_drops.entry_points.push((unwind_to, block)); let next = cfg.start_new_block(); cfg.terminate(block, source_info, TerminatorKind::Drop { location: local.into(), target: next, - unwind: Some(unwind_to) + unwind: None }); block = next; } DropKind::Storage => { + if storage_dead_on_unwind { + unwind_to = unwind_drops.drops[unwind_to].1; + } // Only temps and vars need their storage dead. assert!(local.index() > arg_count); cfg.push(block, Statement { @@ -1154,137 +973,287 @@ fn build_scope_drops<'tcx>( block.unit() } -fn get_unwind_to( - scope: &Scope, - is_generator: bool, - unwind_from: usize, - generator_drop: bool, -) -> Option { - for drop_idx in (0..unwind_from).rev() { - let drop_data = &scope.drops[drop_idx]; - match (is_generator, &drop_data.kind) { - (true, DropKind::Storage) => { - return Some(drop_data.cached_block.get(generator_drop).unwrap_or_else(|| { - span_bug!(drop_data.span, "cached block not present for {:?}", drop_data) - })); +trait DropTreeBuilder<'tcx> { + fn make_block(cfg: &mut CFG<'tcx>) -> BasicBlock; + fn add_entry(cfg: &mut CFG<'tcx>, from: BasicBlock, to: BasicBlock); +} +impl<'a, 'tcx: 'a> Builder<'a, 'tcx> { + fn build_exit_tree( + &mut self, + mut drops: DropTree, + continue_block: Option, + ) -> Option> { + let mut blocks = IndexVec::from_elem(None, &drops.drops); + if continue_block.is_some() { + blocks[CONTINUE_NODE] = continue_block; + debug_assert_eq!(drops.num_roots, DropIdx::new(2)); + } else { + debug_assert_eq!(drops.num_roots, CONTINUE_NODE); + } + build_drop_tree::(&mut self.cfg, &mut drops, &mut blocks); + if drops.drops.iter().any(|(drop, _)| drop.kind == DropKind::Value) { + let unwind_target = self.diverge_cleanup(); + let num_roots = drops.num_roots.index(); + let mut unwind_indices = IndexVec::from_elem_n(unwind_target, num_roots); + for (drop_idx, drop_data) in drops.drops.iter_enumerated().skip(num_roots) { + match drop_data.0.kind { + DropKind::Storage => { + if self.is_generator { + let unwind_drop = self + .scopes + .unwind_drops + .add_drop(drop_data.0, unwind_indices[drop_data.1]); + unwind_indices.push(unwind_drop); + } else { + unwind_indices.push(unwind_indices[drop_data.1]); + } + } + DropKind::Value => { + let unwind_drop = self + .scopes + .unwind_drops + .add_drop(drop_data.0, unwind_indices[drop_data.1]); + self.scopes.unwind_drops.add_entry( + blocks[drop_idx].unwrap(), + unwind_indices[drop_data.1], + ); + unwind_indices.push(unwind_drop); + } + } } - (false, DropKind::Value) => { - return Some(drop_data.cached_block.get(generator_drop).unwrap_or_else(|| { - span_bug!(drop_data.span, "cached block not present for {:?}", drop_data) - })); + } + blocks[ROOT_NODE].map(BasicBlock::unit) + } + + crate fn build_drop_trees(&mut self, should_abort: bool) { + if self.is_generator { + Self::build_generator_drop_tree( + &mut self.cfg, + &mut self.scopes.generator_drops, + self.fn_span, + should_abort, + ); + } + Self::build_unwind_tree( + &mut self.cfg, + &mut self.scopes.unwind_drops, + self.fn_span, + should_abort, + ); + } + + fn build_generator_drop_tree( + cfg: &mut CFG<'tcx>, + drops: &mut DropTree, + fn_span: Span, + should_abort: bool, + ) { + let mut blocks = IndexVec::from_elem(None, &drops.drops); + build_drop_tree::(cfg, drops, &mut blocks); + // TODO: unwind? + if let Some(root_block) = blocks[ROOT_NODE] { + cfg.terminate( + root_block, + SourceInfo { + scope: OUTERMOST_SOURCE_SCOPE, + span: fn_span + }, + TerminatorKind::GeneratorDrop, + ); + } + // Reuse the generator drop tree as the unwind tree. + // + // This is a different tree to the standard unwind paths here to + // prevent drop elaboration from creating drop flags that would have + // to be captured by the generator. I'm not sure how important this + // optimization is, but it is here. + for (drop_idx, drop_data) in drops.drops.iter_enumerated() { + if let DropKind::Value = drop_data.0.kind { + drops.entry_points.push((drop_data.1, blocks[drop_idx].unwrap())); } - _ => (), } + Self::build_unwind_tree(cfg, drops, fn_span, should_abort); } - None -} -fn build_diverge_scope<'tcx>(cfg: &mut CFG<'tcx>, - span: Span, - scope: &mut Scope, - mut target: BasicBlock, - generator_drop: bool, - is_generator: bool) - -> BasicBlock -{ - // Build up the drops in **reverse** order. The end result will - // look like: - // - // [drops[n]] -...-> [drops[0]] -> [target] - // - // The code in this function reads from right to left. At each - // point, we check for cached blocks representing the - // remainder. If everything is cached, we'll just walk right to - // left reading the cached results but never create anything. + fn build_unwind_tree( + cfg: &mut CFG<'tcx>, + drops: &mut DropTree, + fn_span: Span, + should_abort: bool, + ) { + let mut blocks = IndexVec::from_elem(None, &drops.drops); + build_drop_tree::(cfg, drops, &mut blocks); + if let Some(resume_block) = blocks[ROOT_NODE] { + let terminator = if should_abort { + TerminatorKind::Abort + } else { + TerminatorKind::Resume + }; + cfg.terminate( + resume_block, + SourceInfo { + scope: OUTERMOST_SOURCE_SCOPE, + span: fn_span + }, + terminator, + ); + } + } +} - let source_scope = scope.source_scope; - let source_info = |span| SourceInfo { +fn source_info(span: Span) -> SourceInfo { + SourceInfo { span, - scope: source_scope - }; - - // We keep track of StorageDead statements to prepend to our current block - // and store them here, in reverse order. - let mut storage_deads = vec![]; - - let mut target_built_by_us = false; - - // Build up the drops. Here we iterate the vector in - // *forward* order, so that we generate drops[0] first (right to - // left in diagram above). - debug!("build_diverge_scope({:?})", scope.drops); - for (j, drop_data) in scope.drops.iter_mut().enumerate() { - debug!("build_diverge_scope drop_data[{}]: {:?}", j, drop_data); - // Only full value drops are emitted in the diverging path, - // not StorageDead, except in the case of generators. - // - // Note: This may not actually be what we desire (are we - // "freeing" stack storage as we unwind, or merely observing a - // frozen stack)? In particular, the intent may have been to - // match the behavior of clang, but on inspection eddyb says - // this is not what clang does. - match drop_data.kind { - DropKind::Storage if is_generator => { - storage_deads.push(Statement { - source_info: source_info(drop_data.span), - kind: StatementKind::StorageDead(drop_data.local) - }); - if !target_built_by_us { - // We cannot add statements to an existing block, so we create a new - // block for our StorageDead statements. - let block = cfg.start_new_cleanup_block(); - let source_info = SourceInfo { span: DUMMY_SP, scope: source_scope }; - cfg.terminate(block, source_info, - TerminatorKind::Goto { target: target }); - target = block; - target_built_by_us = true; + scope: OUTERMOST_SOURCE_SCOPE, + } +} + +fn build_drop_tree<'tcx, T: DropTreeBuilder<'tcx>>( + cfg: &mut CFG<'tcx>, + drops: &mut DropTree, + blocks: &mut IndexVec>, +) { + debug!("build_drop_tree(drops = {:#?})", drops); + // TODO: Some comment about this. + #[derive(Clone, Copy)] + enum NeedsBlock { + NoPredecessor, + CanShare(DropIdx), + NeedsOwn, + } + + // TODO: Split this into two functions. + + // If a drop has multiple predecessors, they need to be in separate blocks + // so that they can both banch to the current drop. + let mut needs_block = IndexVec::from_elem(NeedsBlock::NoPredecessor, &drops.drops); + for root_idx in (ROOT_NODE..drops.num_roots).skip(1) { + needs_block[root_idx] = NeedsBlock::NeedsOwn; + } + + let entry_points = &mut drops.entry_points; + entry_points.sort(); + + for (drop_idx, drop_data) in drops.drops.iter_enumerated().rev() { + if entry_points.last().map_or(false, |entry_point| entry_point.0 == drop_idx) { + let block = *blocks[drop_idx].get_or_insert_with(|| T::make_block(cfg)); + needs_block[drop_idx] = NeedsBlock::NeedsOwn; + while entry_points.last().map_or(false, |entry_point| entry_point.0 == drop_idx) { + let entry_block = entry_points.pop().unwrap().1; + T::add_entry(cfg, entry_block, block); + } + } + match needs_block[drop_idx] { + NeedsBlock::NoPredecessor => continue, + NeedsBlock::NeedsOwn => { + blocks[drop_idx].get_or_insert_with(|| T::make_block(cfg)); + } + NeedsBlock::CanShare(pred) => { + blocks[drop_idx] = blocks[pred]; + } + } + if let DropKind::Value = drop_data.0.kind { + needs_block[drop_data.1] = NeedsBlock::NeedsOwn; + } else { + if drop_idx >= drops.num_roots { + match &mut needs_block[drop_data.1] { + pred @ NeedsBlock::NoPredecessor => *pred = NeedsBlock::CanShare(drop_idx), + pred @ NeedsBlock::CanShare(_) => *pred = NeedsBlock::NeedsOwn, + NeedsBlock::NeedsOwn => (), } - *drop_data.cached_block.ref_mut(generator_drop) = Some(target); } - DropKind::Storage => {} + } + } + assert!(entry_points.is_empty()); + debug!("build_drop_tree: blocks = {:#?}", blocks); + + for (drop_idx, drop_data) in drops.drops.iter_enumerated().rev() { + if let NeedsBlock::NoPredecessor = needs_block[drop_idx] { + continue; + } + match drop_data.0.kind { DropKind::Value => { - let cached_block = drop_data.cached_block.ref_mut(generator_drop); - target = if let Some(cached_block) = *cached_block { - storage_deads.clear(); - target_built_by_us = false; - cached_block - } else { - push_storage_deads(cfg, target, &mut storage_deads); - let block = cfg.start_new_cleanup_block(); - cfg.terminate( - block, - source_info(drop_data.span), - TerminatorKind::Drop { - location: drop_data.local.into(), - target, - unwind: None - }, - ); - *cached_block = Some(block); - target_built_by_us = true; - block + let terminator = TerminatorKind::Drop { + target: blocks[drop_data.1].unwrap(), + // TODO: The caller will register this if needed. + unwind: None, + location: drop_data.0.local.into(), }; + cfg.terminate( + blocks[drop_idx].unwrap(), + source_info(drop_data.0.span), + terminator, + ); } - }; + // Root nodes don't correspond to a drop. + DropKind::Storage if drop_idx < drops.num_roots => {} + DropKind::Storage => { + let block = blocks[drop_idx].unwrap(); + let stmt = Statement { + source_info: source_info(drop_data.0.span), + kind: StatementKind::StorageDead(drop_data.0.local), + }; + cfg.push(block, stmt); + let target = blocks[drop_data.1].unwrap(); + if target != block { + let terminator = TerminatorKind::Goto { target }; + cfg.terminate(block, source_info(drop_data.0.span), terminator); + } + } + } } - push_storage_deads(cfg, target, &mut storage_deads); - *scope.cached_unwind.ref_mut(generator_drop) = Some(target); +} - assert!(storage_deads.is_empty()); - debug!("build_diverge_scope({:?}, {:?}) = {:?}", scope, span, target); +struct ExitScopes; - target +impl<'tcx> DropTreeBuilder<'tcx> for ExitScopes { + fn make_block(cfg: &mut CFG<'tcx>) -> BasicBlock { + cfg.start_new_block() + } + fn add_entry(cfg: &mut CFG<'tcx>, from: BasicBlock, to: BasicBlock) { + cfg.block_data_mut(from).terminator_mut().kind = TerminatorKind::Goto { target: to }; + } } -fn push_storage_deads(cfg: &mut CFG<'tcx>, - target: BasicBlock, - storage_deads: &mut Vec>) { - if storage_deads.is_empty() { return; } - let statements = &mut cfg.block_data_mut(target).statements; - storage_deads.reverse(); - debug!("push_storage_deads({:?}), storage_deads={:?}, statements={:?}", - target, storage_deads, statements); - storage_deads.append(statements); - mem::swap(statements, storage_deads); - assert!(storage_deads.is_empty()); +struct GeneratorDrop; + +impl<'tcx> DropTreeBuilder<'tcx> for GeneratorDrop { + fn make_block(cfg: &mut CFG<'tcx>) -> BasicBlock { + cfg.start_new_block() + } + fn add_entry(cfg: &mut CFG<'tcx>, from: BasicBlock, to: BasicBlock) { + let kind = &mut cfg.block_data_mut(from).terminator_mut().kind; + if let TerminatorKind::Yield { drop, .. } = kind { + *drop = Some(to); + }; + } +} + +struct Unwind; + +impl<'tcx> DropTreeBuilder<'tcx> for Unwind { + fn make_block(cfg: &mut CFG<'tcx>) -> BasicBlock { + cfg.start_new_cleanup_block() + } + fn add_entry(cfg: &mut CFG<'tcx>, from: BasicBlock, to: BasicBlock) { + let term = &mut cfg.block_data_mut(from).terminator_mut().kind; + match term { + TerminatorKind::Drop { unwind, .. } + | TerminatorKind::DropAndReplace { unwind, .. } + | TerminatorKind::FalseUnwind { unwind, .. } + | TerminatorKind::Call { cleanup: unwind, .. } + | TerminatorKind::Assert { cleanup: unwind, .. } => { + *unwind = Some(to); + }, + TerminatorKind::Goto { .. } + | TerminatorKind::SwitchInt { .. } + | TerminatorKind::Resume + | TerminatorKind::Abort + | TerminatorKind::Return + | TerminatorKind::Unreachable + | TerminatorKind::Yield { .. } + | TerminatorKind::GeneratorDrop + | TerminatorKind::FalseEdges { .. } => bug!("cannot unwind from {:?}", term), + } + } } From b6a2ae9bbb4891fdd46236cae93af662cfd0e419 Mon Sep 17 00:00:00 2001 From: Matthew Jasper Date: Sat, 16 Nov 2019 14:21:59 +0000 Subject: [PATCH 03/19] Temp: Cleanup 1 --- src/librustc_mir/build/scope.rs | 104 ++++++++++++++++++++------------ 1 file changed, 65 insertions(+), 39 deletions(-) diff --git a/src/librustc_mir/build/scope.rs b/src/librustc_mir/build/scope.rs index b34dcd22b3975..53f9229ecdc26 100644 --- a/src/librustc_mir/build/scope.rs +++ b/src/librustc_mir/build/scope.rs @@ -160,7 +160,7 @@ struct BreakableScope<'tcx> { /// The target of an expression that breaks out of a scope #[derive(Clone, Copy, Debug)] -pub enum BreakableTarget { +pub(crate) enum BreakableTarget { Continue(region::Scope), Break(region::Scope), Return, @@ -287,7 +287,7 @@ impl<'tcx> Scopes<'tcx> { /// Returns the topmost active scope, which is known to be alive until /// the next scope expression. - pub(super) fn topmost(&self) -> region::Scope { + fn topmost(&self) -> region::Scope { self.scopes.last().expect("topmost_scope: no scopes present").region_scope } @@ -301,7 +301,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { // ========================== // Start a breakable scope, which tracks where `continue`, `break` and // `return` should branch to. - pub fn in_breakable_scope( + crate fn in_breakable_scope( &mut self, loop_block: Option, break_destination: Place<'tcx>, @@ -342,7 +342,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { } } - pub fn in_opt_scope(&mut self, + crate fn in_opt_scope(&mut self, opt_scope: Option<(region::Scope, SourceInfo)>, f: F) -> BlockAnd @@ -361,7 +361,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { /// Convenience wrapper that pushes a scope and then executes `f` /// to build its contents, popping the scope afterwards. - pub fn in_scope(&mut self, + crate fn in_scope(&mut self, region_scope: (region::Scope, SourceInfo), lint_level: LintLevel, f: F) @@ -406,14 +406,14 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { /// scope and call `pop_scope` afterwards. Note that these two /// calls must be paired; using `in_scope` as a convenience /// wrapper maybe preferable. - pub fn push_scope(&mut self, region_scope: (region::Scope, SourceInfo)) { + crate fn push_scope(&mut self, region_scope: (region::Scope, SourceInfo)) { self.scopes.push_scope(region_scope, self.source_scope); } /// Pops a scope, which should have region scope `region_scope`, /// adding any drops onto the end of `block` that are needed. /// This must match 1-to-1 with `push_scope`. - pub fn pop_scope(&mut self, + crate fn pop_scope(&mut self, region_scope: (region::Scope, SourceInfo), mut block: BasicBlock) -> BlockAnd<()> { @@ -427,7 +427,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { } /// Sets up the drops for breaking from `block` to `target`. - pub fn break_scope( + crate fn break_scope( &mut self, mut block: BasicBlock, value: Option>, @@ -496,7 +496,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { } // TODO: use in pop_top_scope. - pub fn exit_top_scope( + crate fn exit_top_scope( &mut self, mut block: BasicBlock, target: BasicBlock, @@ -546,7 +546,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { /// /// This path terminates in GeneratorDrop. Returns the start of the path. /// None indicates there’s no cleanup to do at this point. - pub fn generator_drop_cleanup(&mut self, yield_block: BasicBlock) { + crate fn generator_drop_cleanup(&mut self, yield_block: BasicBlock) { let drops = self.scopes.scopes.iter().flat_map(|scope| &scope.drops); let mut next_drop = ROOT_NODE; for drop in drops { @@ -556,7 +556,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { } /// Creates a new source scope, nested in the current one. - pub fn new_source_scope(&mut self, + crate fn new_source_scope(&mut self, span: Span, lint_level: LintLevel, safety: Option) -> SourceScope { @@ -583,7 +583,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { } /// Given a span and the current source scope, make a SourceInfo. - pub fn source_info(&self, span: Span) -> SourceInfo { + crate fn source_info(&self, span: Span) -> SourceInfo { SourceInfo { span, scope: self.source_scope @@ -614,7 +614,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { /// /// When building statics/constants, returns `None` since /// intermediate values do not have to be dropped in that case. - pub fn local_scope(&self) -> Option { + crate fn local_scope(&self) -> Option { match self.hir.body_owner_kind { hir::BodyOwnerKind::Const | hir::BodyOwnerKind::Static(_) => @@ -628,7 +628,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { // Scheduling drops // ================ - pub fn schedule_drop_storage_and_value( + crate fn schedule_drop_storage_and_value( &mut self, span: Span, region_scope: region::Scope, @@ -642,7 +642,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { /// /// When called with `DropKind::Storage`, `place` shouldn't be the return /// place, or a function parameter. - pub fn schedule_drop( + crate fn schedule_drop( &mut self, span: Span, region_scope: region::Scope, @@ -720,7 +720,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { /// spurious borrow-check errors -- the problem, ironically, is /// not the `DROP(_X)` itself, but the (spurious) unwind pathways /// that it creates. See #64391 for an example. - pub fn record_operands_moved(&mut self, operands: &[Operand<'tcx>]) { + crate fn record_operands_moved(&mut self, operands: &[Operand<'tcx>]) { let scope = match self.local_scope() { None => { // if there is no local scope, operands won't be dropped anyway @@ -756,7 +756,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { /// /// This is a special case because the temporary for the condition needs to /// be dropped on both the true and the false arm. - pub fn test_bool( + crate fn test_bool( &mut self, mut block: BasicBlock, condition: Expr<'tcx>, @@ -836,13 +836,13 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { /// /// This path terminates in Resume. The path isn't created until after all /// of the non-unwind paths in this item have been lowered. - pub fn diverge_from(&mut self, start: BasicBlock) { + crate fn diverge_from(&mut self, start: BasicBlock) { let next_drop = self.diverge_cleanup(); self.scopes.unwind_drops.add_entry(start, next_drop); } /// Utility function for *non*-scope code to build their own drops - pub fn build_drop_and_replace(&mut self, + crate fn build_drop_and_replace(&mut self, block: BasicBlock, span: Span, location: Place<'tcx>, @@ -863,7 +863,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { /// Creates an Assert terminator and return the success block. /// If the boolean condition operand is not the expected value, /// a runtime panic will be caused with the given message. - pub fn assert(&mut self, block: BasicBlock, + crate fn assert(&mut self, block: BasicBlock, cond: Operand<'tcx>, expected: bool, msg: AssertMessage<'tcx>, @@ -892,7 +892,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { /// /// This is only needed for `match` arm scopes, because they have one /// entrance per pattern, but only one exit. - pub(crate) fn clear_top_scope(&mut self, region_scope: region::Scope) { + crate fn clear_top_scope(&mut self, region_scope: region::Scope) { let top_scope = self.scopes.scopes.last_mut().unwrap(); assert_eq!(top_scope.region_scope, region_scope); @@ -1027,30 +1027,26 @@ impl<'a, 'tcx: 'a> Builder<'a, 'tcx> { crate fn build_drop_trees(&mut self, should_abort: bool) { if self.is_generator { - Self::build_generator_drop_tree( + self.build_generator_drop_trees( + should_abort, + ); + } else { + Self::build_unwind_tree( &mut self.cfg, - &mut self.scopes.generator_drops, + &mut self.scopes.unwind_drops, self.fn_span, should_abort, ); } - Self::build_unwind_tree( - &mut self.cfg, - &mut self.scopes.unwind_drops, - self.fn_span, - should_abort, - ); } - fn build_generator_drop_tree( - cfg: &mut CFG<'tcx>, - drops: &mut DropTree, - fn_span: Span, - should_abort: bool, - ) { + fn build_generator_drop_trees(&mut self, should_abort: bool) { + // Build the drop tree for dropping the generator while it's suspended. + let drops = &mut self.scopes.generator_drops; + let cfg = &mut self.cfg; + let fn_span = self.fn_span; let mut blocks = IndexVec::from_elem(None, &drops.drops); build_drop_tree::(cfg, drops, &mut blocks); - // TODO: unwind? if let Some(root_block) = blocks[ROOT_NODE] { cfg.terminate( root_block, @@ -1061,7 +1057,17 @@ impl<'a, 'tcx: 'a> Builder<'a, 'tcx> { TerminatorKind::GeneratorDrop, ); } - // Reuse the generator drop tree as the unwind tree. + + // Build the drop tree for unwinding in the normal control flow paths. + let resume_block = Self::build_unwind_tree( + cfg, + &mut self.scopes.unwind_drops, + fn_span, + should_abort, + ); + + // Build the drop tree for unwinding when dropping a suspended + // generator. // // This is a different tree to the standard unwind paths here to // prevent drop elaboration from creating drop flags that would have @@ -1072,7 +1078,24 @@ impl<'a, 'tcx: 'a> Builder<'a, 'tcx> { drops.entry_points.push((drop_data.1, blocks[drop_idx].unwrap())); } } - Self::build_unwind_tree(cfg, drops, fn_span, should_abort); + let mut blocks = IndexVec::from_elem(None, &drops.drops); + blocks[ROOT_NODE] = resume_block; + build_drop_tree::(cfg, drops, &mut blocks); + if let (None, Some(new_resume_block)) = (resume_block, blocks[ROOT_NODE]) { + let terminator = if should_abort { + TerminatorKind::Abort + } else { + TerminatorKind::Resume + }; + cfg.terminate( + new_resume_block, + SourceInfo { + scope: OUTERMOST_SOURCE_SCOPE, + span: fn_span + }, + terminator, + ); + } } fn build_unwind_tree( @@ -1080,7 +1103,7 @@ impl<'a, 'tcx: 'a> Builder<'a, 'tcx> { drops: &mut DropTree, fn_span: Span, should_abort: bool, - ) { + ) -> Option { let mut blocks = IndexVec::from_elem(None, &drops.drops); build_drop_tree::(cfg, drops, &mut blocks); if let Some(resume_block) = blocks[ROOT_NODE] { @@ -1097,6 +1120,9 @@ impl<'a, 'tcx: 'a> Builder<'a, 'tcx> { }, terminator, ); + Some(resume_block) + } else { + None } } } From f1965364509df21a1331be93758c27563bdf83f4 Mon Sep 17 00:00:00 2001 From: Matthew Jasper Date: Sat, 16 Nov 2019 14:37:27 +0000 Subject: [PATCH 04/19] Temp: Cleanup 2 (SourceInfo) --- src/librustc_mir/build/scope.rs | 44 +++++++++++---------------------- 1 file changed, 14 insertions(+), 30 deletions(-) diff --git a/src/librustc_mir/build/scope.rs b/src/librustc_mir/build/scope.rs index 53f9229ecdc26..a727974853f8e 100644 --- a/src/librustc_mir/build/scope.rs +++ b/src/librustc_mir/build/scope.rs @@ -132,7 +132,7 @@ struct Scope { struct DropData { /// The `Span` where drop obligation was incurred (typically where place was /// declared) - span: Span, + source_info: SourceInfo, /// local to drop local: Local, @@ -177,8 +177,7 @@ const CONTINUE_NODE: DropIdx = DropIdx::from_u32_const(1); // TODO say some more. #[derive(Debug)] struct DropTree { - /// The next item to drop, if there is one. - // TODO actual comment + /// Drops in the tree. drops: IndexVec, /// Map for finding the inverse of the `next_drop` relation: /// @@ -191,14 +190,6 @@ struct DropTree { } impl Scope { - /// Given a span and this scope's source scope, make a SourceInfo. - fn source_info(&self, span: Span) -> SourceInfo { - SourceInfo { - span, - scope: self.source_scope - } - } - /// Whether there's anything to do for the cleanup path, that is, /// when unwinding through this scope. This includes destructors, /// but not StorageDead statements, which don't get emitted at all @@ -220,7 +211,12 @@ impl Scope { impl DropTree { fn new(num_roots: usize) -> Self { - let fake_data = DropData { span: DUMMY_SP, local: Local::MAX, kind: DropKind::Storage }; + let fake_source_info = SourceInfo { span: DUMMY_SP, scope: OUTERMOST_SOURCE_SCOPE }; + let fake_data = DropData { + source_info: fake_source_info, + local: Local::MAX, + kind: DropKind::Storage, + }; let drop_idx = DropIdx::MAX; let drops = IndexVec::from_elem_n((fake_data, drop_idx), num_roots); Self { @@ -290,10 +286,6 @@ impl<'tcx> Scopes<'tcx> { fn topmost(&self) -> region::Scope { self.scopes.last().expect("topmost_scope: no scopes present").region_scope } - -// fn source_info(&self, index: usize, span: Span) -> SourceInfo { -// self.scopes[self.len() - index].source_info(span) -// } } impl<'a, 'tcx> Builder<'a, 'tcx> { @@ -495,7 +487,6 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { self.cfg.start_new_block().unit() } - // TODO: use in pop_top_scope. crate fn exit_top_scope( &mut self, mut block: BasicBlock, @@ -678,7 +669,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { let scope_end = self.hir.tcx().sess.source_map().end_point(region_scope_span); scope.drops.push(DropData { - span: scope_end, + source_info: SourceInfo { span: scope_end, scope: scope.source_scope }, local, kind: drop_kind, }); @@ -791,7 +782,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { bug!("Drop scheduled on top of condition variable") } DropKind::Storage => { - let source_info = top_scope.source_info(top_drop_data.span); + let source_info = top_drop_data.source_info; let local = top_drop_data.local; assert_eq!(local, cond_temp, "Drop scheduled on top of condition"); self.cfg.push( @@ -933,7 +924,7 @@ fn build_scope_drops<'tcx>( // `diverge_cleanup_gen`. for drop_data in scope.drops.iter().rev() { - let source_info = scope.source_info(drop_data.span); + let source_info = drop_data.source_info; let local = drop_data.local; match drop_data.kind { @@ -1127,13 +1118,6 @@ impl<'a, 'tcx: 'a> Builder<'a, 'tcx> { } } -fn source_info(span: Span) -> SourceInfo { - SourceInfo { - span, - scope: OUTERMOST_SOURCE_SCOPE, - } -} - fn build_drop_tree<'tcx, T: DropTreeBuilder<'tcx>>( cfg: &mut CFG<'tcx>, drops: &mut DropTree, @@ -1207,7 +1191,7 @@ fn build_drop_tree<'tcx, T: DropTreeBuilder<'tcx>>( }; cfg.terminate( blocks[drop_idx].unwrap(), - source_info(drop_data.0.span), + drop_data.0.source_info, terminator, ); } @@ -1216,14 +1200,14 @@ fn build_drop_tree<'tcx, T: DropTreeBuilder<'tcx>>( DropKind::Storage => { let block = blocks[drop_idx].unwrap(); let stmt = Statement { - source_info: source_info(drop_data.0.span), + source_info: drop_data.0.source_info, kind: StatementKind::StorageDead(drop_data.0.local), }; cfg.push(block, stmt); let target = blocks[drop_data.1].unwrap(); if target != block { let terminator = TerminatorKind::Goto { target }; - cfg.terminate(block, source_info(drop_data.0.span), terminator); + cfg.terminate(block, drop_data.0.source_info, terminator); } } } From f6d79aeb806b90220bbb83948afe96638b4a670b Mon Sep 17 00:00:00 2001 From: Matthew Jasper Date: Sat, 16 Nov 2019 15:53:27 +0000 Subject: [PATCH 05/19] Temp: Cleanup 3 (comments) --- src/librustc_mir/build/scope.rs | 307 +++++++++++++++++--------------- 1 file changed, 168 insertions(+), 139 deletions(-) diff --git a/src/librustc_mir/build/scope.rs b/src/librustc_mir/build/scope.rs index a727974853f8e..5eb613e84ae62 100644 --- a/src/librustc_mir/build/scope.rs +++ b/src/librustc_mir/build/scope.rs @@ -6,30 +6,31 @@ contents, and then pop it off. Every scope is named by a ### SEME Regions -When pushing a new scope, we record the current point in the graph (a +When pushing a new [Scope], we record the current point in the graph (a basic block); this marks the entry to the scope. We then generate more stuff in the control-flow graph. Whenever the scope is exited, either via a `break` or `return` or just by fallthrough, that marks an exit from the scope. Each lexical scope thus corresponds to a single-entry, multiple-exit (SEME) region in the control-flow graph. -For now, we keep a mapping from each `region::Scope` to its -corresponding SEME region for later reference (see caveat in next -paragraph). This is because region scopes are tied to -them. Eventually, when we shift to non-lexical lifetimes, there should -be no need to remember this mapping. +For now, we record the `region::Scope` to each SEME region for later reference +(see caveat in next paragraph). This is because destruction scopes are tied to +them. This may change in the future so that MIR lowering determines its own +destruction scopes. ### Not so SEME Regions In the course of building matches, it sometimes happens that certain code (namely guards) gets executed multiple times. This means that the scope lexical scope may in fact correspond to multiple, disjoint SEME regions. So in fact our -mapping is from one scope to a vector of SEME regions. +mapping is from one scope to a vector of SEME regions. Since the SEME regions +are disjoint, the mapping is still one-to-one for the set of SEME regions that +we're currently in. -Also in matches, the scopes assigned to arms are not even SEME regions! Each -arm has a single region with one entry for each pattern. We manually +Also in matches, the scopes assigned to arms are not always even SEME regions! +Each arm has a single region with one entry for each pattern. We manually manipulate the scheduled drops in this scope to avoid dropping things multiple -times, although drop elaboration would clean this up for value drops. +times. ### Drops @@ -60,25 +61,23 @@ that for now); any later drops would also drop `y`. There are numerous "normal" ways to early exit a scope: `break`, `continue`, `return` (panics are handled separately). Whenever an -early exit occurs, the method `exit_scope` is called. It is given the +early exit occurs, the method `break_scope` is called. It is given the current point in execution where the early exit occurs, as well as the scope you want to branch to (note that all early exits from to some -other enclosing scope). `exit_scope` will record this exit point and -also add all drops. +other enclosing scope). `break_scope` will record the set of drops currently +scheduled in a [DropTree]. Later, before `in_breakable_scope` exits, the drops +will be added to the CFG. -Panics are handled in a similar fashion, except that a panic always -returns out to the `DIVERGE_BLOCK`. To trigger a panic, simply call -`panic(p)` with the current point `p`. Or else you can call -`diverge_cleanup`, which will produce a block that you can branch to -which does the appropriate cleanup and then diverges. `panic(p)` -simply calls `diverge_cleanup()` and adds an edge from `p` to the -result. +Panics are handled in a similar fashion, except that the drops are added to the +mir once the rest of the function has finished being lowered. If a terminator +can panic, call `diverge_from(block)` with the block containing the terminator +`block`. -### Loop scopes +### Breakable scopes In addition to the normal scope stack, we track a loop scope stack -that contains only loops. It tracks where a `break` and `continue` -should go to. +that contains only loops and breakable blocks. It tracks where a `break`, +`continue` or `return` should go to. */ @@ -104,7 +103,7 @@ pub struct Scopes<'tcx> { /// Drops that need to be done on paths to the `GeneratorDrop` terminator. generator_drops: DropTree, - // TODO: what's this? + // TODO: implement caching // cached_unwind_drop: DropIdx, } @@ -173,8 +172,15 @@ rustc_index::newtype_index! { const ROOT_NODE: DropIdx = DropIdx::from_u32_const(0); const CONTINUE_NODE: DropIdx = DropIdx::from_u32_const(1); -/// A tree of drops that we have deferred lowering. -// TODO say some more. +/// A tree (usually, sometimes this is a forest of two trees) of drops that we +/// have deferred lowering. It's used for: +/// +/// * Drops on unwind paths +/// * Drops on generator drop paths (when a suspended generator is dropped) +/// * Drops on return and loop exit paths +/// +/// Once no more nodes could be added to the tree, we lower it to MIR in one go +/// in `build_drop_tree`. #[derive(Debug)] struct DropTree { /// Drops in the tree. @@ -185,8 +191,8 @@ struct DropTree { previous_drops: FxHashMap<(DropIdx, Local, DropKind), DropIdx>, /// Edges into the `DropTree` that need to be added once it's lowered. entry_points: Vec<(DropIdx, BasicBlock)>, - /// The number of root nodes in the tree. - num_roots: DropIdx, + /// The first non-root nodes in the forest. + first_non_root: DropIdx, } impl Scope { @@ -209,6 +215,13 @@ impl Scope { } } +/// A trait that determined how [DropTree::lower_to_mir] creates its blocks and +/// links to any entry nodes. +trait DropTreeBuilder<'tcx> { + fn make_block(cfg: &mut CFG<'tcx>) -> BasicBlock; + fn add_entry(cfg: &mut CFG<'tcx>, from: BasicBlock, to: BasicBlock); +} + impl DropTree { fn new(num_roots: usize) -> Self { let fake_source_info = SourceInfo { span: DUMMY_SP, scope: OUTERMOST_SOURCE_SCOPE }; @@ -221,7 +234,7 @@ impl DropTree { let drops = IndexVec::from_elem_n((fake_data, drop_idx), num_roots); Self { drops, - num_roots: DropIdx::from_usize(num_roots), + first_non_root: DropIdx::from_usize(num_roots), entry_points: Vec::new(), previous_drops: FxHashMap::default(), } @@ -237,6 +250,123 @@ impl DropTree { fn add_entry(&mut self, from: BasicBlock, to: DropIdx) { self.entry_points.push((to, from)); } + + fn build_mir<'tcx, T: DropTreeBuilder<'tcx>>( + &mut self, + cfg: &mut CFG<'tcx>, + blocks: &mut IndexVec>, + ) { + debug!("DropTree::lower_to_mir(drops = {:#?})", self); + + self.assign_blocks::(cfg, blocks); + self.link_blocks(cfg, blocks) + } + + /// Assign blocks for all of the drops in the drop tree that need them. + fn assign_blocks<'tcx, T: DropTreeBuilder<'tcx>>( + &mut self, + cfg: &mut CFG<'tcx>, + blocks: &mut IndexVec>, + ) { + // StorageDead statements can share blocks with each other and also with + // a Drop terminator. We iterate through the blocks to find which blocks + // need + #[derive(Clone, Copy)] + enum Block { + // This drop is unreachable + None, + // This drop is only reachable through the `StorageDead` with the + // specified index. + Shares(DropIdx), + // This drop has more than one way of being reached, or it is + // branched to from outside the tree, or it's predecessor is a + // `Value` drop. + Own, + } + + let mut needs_block = IndexVec::from_elem(Block::None, &self.drops); + if self.first_non_root > CONTINUE_NODE { + // `continue` already has its own node. + needs_block[CONTINUE_NODE] = Block::Own; + } + + // Sort so that we only need to check the last + let entry_points = &mut self.entry_points; + entry_points.sort(); + + for (drop_idx, drop_data) in self.drops.iter_enumerated().rev() { + if entry_points.last().map_or(false, |entry_point| entry_point.0 == drop_idx) { + let block = *blocks[drop_idx].get_or_insert_with(|| T::make_block(cfg)); + needs_block[drop_idx] = Block::Own; + while entry_points.last().map_or(false, |entry_point| entry_point.0 == drop_idx) { + let entry_block = entry_points.pop().unwrap().1; + T::add_entry(cfg, entry_block, block); + } + } + match needs_block[drop_idx] { + Block::None => continue, + Block::Own => { + blocks[drop_idx].get_or_insert_with(|| T::make_block(cfg)); + } + Block::Shares(pred) => { + blocks[drop_idx] = blocks[pred]; + } + } + if let DropKind::Value = drop_data.0.kind { + needs_block[drop_data.1] = Block::Own; + } else { + if drop_idx >= self.first_non_root { + match &mut needs_block[drop_data.1] { + pred @ Block::None => *pred = Block::Shares(drop_idx), + pred @ Block::Shares(_) => *pred = Block::Own, + Block::Own => (), + } + } + } + } + + debug!("assign_blocks: blocks = {:#?}", blocks); + assert!(entry_points.is_empty()); + } + + fn link_blocks(&self, cfg: &mut CFG<'tcx>, blocks: &IndexVec>) { + for (drop_idx, drop_data) in self.drops.iter_enumerated().rev() { + let block = if let Some(block) = blocks[drop_idx] { + block + } else { + continue; + }; + match drop_data.0.kind { + DropKind::Value => { + let terminator = TerminatorKind::Drop { + target: blocks[drop_data.1].unwrap(), + // The caller will handle this if needed. + unwind: None, + location: drop_data.0.local.into(), + }; + cfg.terminate( + block, + drop_data.0.source_info, + terminator, + ); + } + // Root nodes don't correspond to a drop. + DropKind::Storage if drop_idx < self.first_non_root => {} + DropKind::Storage => { + let stmt = Statement { + source_info: drop_data.0.source_info, + kind: StatementKind::StorageDead(drop_data.0.local), + }; + cfg.push(block, stmt); + let target = blocks[drop_data.1].unwrap(); + if target != block { + let terminator = TerminatorKind::Goto { target }; + cfg.terminate(block, drop_data.0.source_info, terminator); + } + } + } + } + } } impl<'tcx> Scopes<'tcx> { @@ -481,7 +611,9 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { drop_idx = drops.add_drop(*drop, drop_idx); } drops.add_entry(block, drop_idx); - // TODO: explain this hack! + // `build_drop_tree` doesn't have access to our source_info, so we + // create a dummy terminator now. `TerminatorKind::Resume` is used + // because MIR type checking will panic if it hasn't been overwritten. self.cfg.terminate(block, source_info, TerminatorKind::Resume); self.cfg.start_new_block().unit() @@ -892,7 +1024,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { } } -/// Builds drops for pop_scope and exit_scope. +/// Builds drops for pop_scope and leave_top_scope. fn build_scope_drops<'tcx>( cfg: &mut CFG<'tcx>, unwind_drops: &mut DropTree, @@ -964,10 +1096,6 @@ fn build_scope_drops<'tcx>( block.unit() } -trait DropTreeBuilder<'tcx> { - fn make_block(cfg: &mut CFG<'tcx>) -> BasicBlock; - fn add_entry(cfg: &mut CFG<'tcx>, from: BasicBlock, to: BasicBlock); -} impl<'a, 'tcx: 'a> Builder<'a, 'tcx> { fn build_exit_tree( &mut self, @@ -977,14 +1105,11 @@ impl<'a, 'tcx: 'a> Builder<'a, 'tcx> { let mut blocks = IndexVec::from_elem(None, &drops.drops); if continue_block.is_some() { blocks[CONTINUE_NODE] = continue_block; - debug_assert_eq!(drops.num_roots, DropIdx::new(2)); - } else { - debug_assert_eq!(drops.num_roots, CONTINUE_NODE); } - build_drop_tree::(&mut self.cfg, &mut drops, &mut blocks); + drops.build_mir::(&mut self.cfg, &mut blocks); if drops.drops.iter().any(|(drop, _)| drop.kind == DropKind::Value) { let unwind_target = self.diverge_cleanup(); - let num_roots = drops.num_roots.index(); + let num_roots = drops.first_non_root.index(); let mut unwind_indices = IndexVec::from_elem_n(unwind_target, num_roots); for (drop_idx, drop_data) in drops.drops.iter_enumerated().skip(num_roots) { match drop_data.0.kind { @@ -1037,7 +1162,7 @@ impl<'a, 'tcx: 'a> Builder<'a, 'tcx> { let cfg = &mut self.cfg; let fn_span = self.fn_span; let mut blocks = IndexVec::from_elem(None, &drops.drops); - build_drop_tree::(cfg, drops, &mut blocks); + drops.build_mir::(cfg, &mut blocks); if let Some(root_block) = blocks[ROOT_NODE] { cfg.terminate( root_block, @@ -1071,7 +1196,7 @@ impl<'a, 'tcx: 'a> Builder<'a, 'tcx> { } let mut blocks = IndexVec::from_elem(None, &drops.drops); blocks[ROOT_NODE] = resume_block; - build_drop_tree::(cfg, drops, &mut blocks); + drops.build_mir::(cfg, &mut blocks); if let (None, Some(new_resume_block)) = (resume_block, blocks[ROOT_NODE]) { let terminator = if should_abort { TerminatorKind::Abort @@ -1096,7 +1221,7 @@ impl<'a, 'tcx: 'a> Builder<'a, 'tcx> { should_abort: bool, ) -> Option { let mut blocks = IndexVec::from_elem(None, &drops.drops); - build_drop_tree::(cfg, drops, &mut blocks); + drops.build_mir::(cfg, &mut blocks); if let Some(resume_block) = blocks[ROOT_NODE] { let terminator = if should_abort { TerminatorKind::Abort @@ -1118,102 +1243,6 @@ impl<'a, 'tcx: 'a> Builder<'a, 'tcx> { } } -fn build_drop_tree<'tcx, T: DropTreeBuilder<'tcx>>( - cfg: &mut CFG<'tcx>, - drops: &mut DropTree, - blocks: &mut IndexVec>, -) { - debug!("build_drop_tree(drops = {:#?})", drops); - // TODO: Some comment about this. - #[derive(Clone, Copy)] - enum NeedsBlock { - NoPredecessor, - CanShare(DropIdx), - NeedsOwn, - } - - // TODO: Split this into two functions. - - // If a drop has multiple predecessors, they need to be in separate blocks - // so that they can both banch to the current drop. - let mut needs_block = IndexVec::from_elem(NeedsBlock::NoPredecessor, &drops.drops); - for root_idx in (ROOT_NODE..drops.num_roots).skip(1) { - needs_block[root_idx] = NeedsBlock::NeedsOwn; - } - - let entry_points = &mut drops.entry_points; - entry_points.sort(); - - for (drop_idx, drop_data) in drops.drops.iter_enumerated().rev() { - if entry_points.last().map_or(false, |entry_point| entry_point.0 == drop_idx) { - let block = *blocks[drop_idx].get_or_insert_with(|| T::make_block(cfg)); - needs_block[drop_idx] = NeedsBlock::NeedsOwn; - while entry_points.last().map_or(false, |entry_point| entry_point.0 == drop_idx) { - let entry_block = entry_points.pop().unwrap().1; - T::add_entry(cfg, entry_block, block); - } - } - match needs_block[drop_idx] { - NeedsBlock::NoPredecessor => continue, - NeedsBlock::NeedsOwn => { - blocks[drop_idx].get_or_insert_with(|| T::make_block(cfg)); - } - NeedsBlock::CanShare(pred) => { - blocks[drop_idx] = blocks[pred]; - } - } - if let DropKind::Value = drop_data.0.kind { - needs_block[drop_data.1] = NeedsBlock::NeedsOwn; - } else { - if drop_idx >= drops.num_roots { - match &mut needs_block[drop_data.1] { - pred @ NeedsBlock::NoPredecessor => *pred = NeedsBlock::CanShare(drop_idx), - pred @ NeedsBlock::CanShare(_) => *pred = NeedsBlock::NeedsOwn, - NeedsBlock::NeedsOwn => (), - } - } - } - } - assert!(entry_points.is_empty()); - debug!("build_drop_tree: blocks = {:#?}", blocks); - - for (drop_idx, drop_data) in drops.drops.iter_enumerated().rev() { - if let NeedsBlock::NoPredecessor = needs_block[drop_idx] { - continue; - } - match drop_data.0.kind { - DropKind::Value => { - let terminator = TerminatorKind::Drop { - target: blocks[drop_data.1].unwrap(), - // TODO: The caller will register this if needed. - unwind: None, - location: drop_data.0.local.into(), - }; - cfg.terminate( - blocks[drop_idx].unwrap(), - drop_data.0.source_info, - terminator, - ); - } - // Root nodes don't correspond to a drop. - DropKind::Storage if drop_idx < drops.num_roots => {} - DropKind::Storage => { - let block = blocks[drop_idx].unwrap(); - let stmt = Statement { - source_info: drop_data.0.source_info, - kind: StatementKind::StorageDead(drop_data.0.local), - }; - cfg.push(block, stmt); - let target = blocks[drop_data.1].unwrap(); - if target != block { - let terminator = TerminatorKind::Goto { target }; - cfg.terminate(block, drop_data.0.source_info, terminator); - } - } - } - } -} - struct ExitScopes; impl<'tcx> DropTreeBuilder<'tcx> for ExitScopes { From 49703930c7501d223af186b8e95780d867e2c350 Mon Sep 17 00:00:00 2001 From: Matthew Jasper Date: Sat, 16 Nov 2019 16:06:12 +0000 Subject: [PATCH 06/19] Temp: Cleanup 4 (resume block) --- src/librustc_mir/build/scope.rs | 41 +++++++++------------------------ 1 file changed, 11 insertions(+), 30 deletions(-) diff --git a/src/librustc_mir/build/scope.rs b/src/librustc_mir/build/scope.rs index 5eb613e84ae62..c1f1f1c28cd20 100644 --- a/src/librustc_mir/build/scope.rs +++ b/src/librustc_mir/build/scope.rs @@ -1152,6 +1152,7 @@ impl<'a, 'tcx: 'a> Builder<'a, 'tcx> { &mut self.scopes.unwind_drops, self.fn_span, should_abort, + &mut None, ); } } @@ -1175,12 +1176,9 @@ impl<'a, 'tcx: 'a> Builder<'a, 'tcx> { } // Build the drop tree for unwinding in the normal control flow paths. - let resume_block = Self::build_unwind_tree( - cfg, - &mut self.scopes.unwind_drops, - fn_span, - should_abort, - ); + let resume_block = &mut None; + let unwind_drops = &mut self.scopes.unwind_drops; + Self::build_unwind_tree(cfg, unwind_drops, fn_span, should_abort, resume_block); // Build the drop tree for unwinding when dropping a suspended // generator. @@ -1194,24 +1192,7 @@ impl<'a, 'tcx: 'a> Builder<'a, 'tcx> { drops.entry_points.push((drop_data.1, blocks[drop_idx].unwrap())); } } - let mut blocks = IndexVec::from_elem(None, &drops.drops); - blocks[ROOT_NODE] = resume_block; - drops.build_mir::(cfg, &mut blocks); - if let (None, Some(new_resume_block)) = (resume_block, blocks[ROOT_NODE]) { - let terminator = if should_abort { - TerminatorKind::Abort - } else { - TerminatorKind::Resume - }; - cfg.terminate( - new_resume_block, - SourceInfo { - scope: OUTERMOST_SOURCE_SCOPE, - span: fn_span - }, - terminator, - ); - } + Self::build_unwind_tree(cfg, drops, fn_span, should_abort, resume_block); } fn build_unwind_tree( @@ -1219,26 +1200,26 @@ impl<'a, 'tcx: 'a> Builder<'a, 'tcx> { drops: &mut DropTree, fn_span: Span, should_abort: bool, - ) -> Option { + resume_block: &mut Option, + ) { let mut blocks = IndexVec::from_elem(None, &drops.drops); + blocks[ROOT_NODE] = *resume_block; drops.build_mir::(cfg, &mut blocks); - if let Some(resume_block) = blocks[ROOT_NODE] { + if let (None, Some(resume)) = (*resume_block, blocks[ROOT_NODE]) { let terminator = if should_abort { TerminatorKind::Abort } else { TerminatorKind::Resume }; cfg.terminate( - resume_block, + resume, SourceInfo { scope: OUTERMOST_SOURCE_SCOPE, span: fn_span }, terminator, ); - Some(resume_block) - } else { - None + *resume_block = blocks[ROOT_NODE]; } } } From d88faaaad87b4a2dfb453d63d2fd6c2d12654cc5 Mon Sep 17 00:00:00 2001 From: Matthew Jasper Date: Sat, 16 Nov 2019 22:19:24 +0000 Subject: [PATCH 07/19] Temp: Cleanup 5 (colors) --- src/librustc_mir/util/graphviz.rs | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/src/librustc_mir/util/graphviz.rs b/src/librustc_mir/util/graphviz.rs index ff2946d3a690e..cb65164e7110e 100644 --- a/src/librustc_mir/util/graphviz.rs +++ b/src/librustc_mir/util/graphviz.rs @@ -102,10 +102,15 @@ pub fn write_node_label(block: BasicBlock, write!(w, r#""#)?; // Basic block number at the top. - write!(w, r#""#, - attrs=r#"bgcolor="gray" align="center""#, + let (blk, color) = if data.is_cleanup { + (format!("{} (cleanup)", block.index()), "light blue") + } else { + (format!("{}", block.index()), "gray") + }; + write!(w, r#""#, colspan=num_cols, - blk=block.index())?; + blk=blk, + color=color)?; init(w)?; From afe8a416e6948df3478eed41b8ae5b0ac176b57a Mon Sep 17 00:00:00 2001 From: Matthew Jasper Date: Sun, 17 Nov 2019 09:36:31 +0000 Subject: [PATCH 08/19] Temp: Cleanup 6 (Separate exit trees) --- src/librustc_mir/build/mod.rs | 2 +- src/librustc_mir/build/scope.rs | 64 +++++++++++++++++---------------- 2 files changed, 35 insertions(+), 31 deletions(-) diff --git a/src/librustc_mir/build/mod.rs b/src/librustc_mir/build/mod.rs index d6d011452647c..8e774aa901cf7 100644 --- a/src/librustc_mir/build/mod.rs +++ b/src/librustc_mir/build/mod.rs @@ -719,7 +719,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { fn_span: span, arg_count, is_generator, - scopes: scope::Scopes::new(is_generator), + scopes: scope::Scopes::new(), block_context: BlockContext::new(), source_scopes: IndexVec::new(), source_scope: OUTERMOST_SOURCE_SCOPE, diff --git a/src/librustc_mir/build/scope.rs b/src/librustc_mir/build/scope.rs index c1f1f1c28cd20..7582cc2f718c1 100644 --- a/src/librustc_mir/build/scope.rs +++ b/src/librustc_mir/build/scope.rs @@ -153,8 +153,10 @@ struct BreakableScope<'tcx> { /// The destination of the loop/block expression itself (i.e., where to put /// the result of a `break` or `return` expression) break_destination: Place<'tcx>, - /// Drops that happen on the - drops: DropTree, + /// Drops that happen on the `break`/`return` path. + break_drops: DropTree, + /// Drops that happen on the `continue` path. + continue_drops: Option, } /// The target of an expression that breaks out of a scope @@ -170,10 +172,8 @@ rustc_index::newtype_index! { } const ROOT_NODE: DropIdx = DropIdx::from_u32_const(0); -const CONTINUE_NODE: DropIdx = DropIdx::from_u32_const(1); -/// A tree (usually, sometimes this is a forest of two trees) of drops that we -/// have deferred lowering. It's used for: +/// A tree of drops that we have deferred lowering. It's used for: /// /// * Drops on unwind paths /// * Drops on generator drop paths (when a suspended generator is dropped) @@ -187,12 +187,10 @@ struct DropTree { drops: IndexVec, /// Map for finding the inverse of the `next_drop` relation: /// - /// `previous_drops[(next_drop[i], drops[i].local, drops[i].kind] == i` + /// `previous_drops[(drops[i].1, drops[i].0.local, drops[i].0.kind] == i` previous_drops: FxHashMap<(DropIdx, Local, DropKind), DropIdx>, /// Edges into the `DropTree` that need to be added once it's lowered. entry_points: Vec<(DropIdx, BasicBlock)>, - /// The first non-root nodes in the forest. - first_non_root: DropIdx, } impl Scope { @@ -223,7 +221,7 @@ trait DropTreeBuilder<'tcx> { } impl DropTree { - fn new(num_roots: usize) -> Self { + fn new() -> Self { let fake_source_info = SourceInfo { span: DUMMY_SP, scope: OUTERMOST_SOURCE_SCOPE }; let fake_data = DropData { source_info: fake_source_info, @@ -231,10 +229,9 @@ impl DropTree { kind: DropKind::Storage, }; let drop_idx = DropIdx::MAX; - let drops = IndexVec::from_elem_n((fake_data, drop_idx), num_roots); + let drops = IndexVec::from_elem_n((fake_data, drop_idx), 1); Self { drops, - first_non_root: DropIdx::from_usize(num_roots), entry_points: Vec::new(), previous_drops: FxHashMap::default(), } @@ -248,6 +245,7 @@ impl DropTree { } fn add_entry(&mut self, from: BasicBlock, to: DropIdx) { + debug_assert!(to < self.drops.next_index()); self.entry_points.push((to, from)); } @@ -285,9 +283,11 @@ impl DropTree { } let mut needs_block = IndexVec::from_elem(Block::None, &self.drops); - if self.first_non_root > CONTINUE_NODE { - // `continue` already has its own node. - needs_block[CONTINUE_NODE] = Block::Own; + if blocks[ROOT_NODE].is_some() { + // In some cases (such as drops for `continue`) the root node + // already has a block. In this case, make sure that we don't + // override it. + needs_block[ROOT_NODE] = Block::Own; } // Sort so that we only need to check the last @@ -315,7 +315,7 @@ impl DropTree { if let DropKind::Value = drop_data.0.kind { needs_block[drop_data.1] = Block::Own; } else { - if drop_idx >= self.first_non_root { + if drop_idx != ROOT_NODE { match &mut needs_block[drop_data.1] { pred @ Block::None => *pred = Block::Shares(drop_idx), pred @ Block::Shares(_) => *pred = Block::Own, @@ -351,7 +351,7 @@ impl DropTree { ); } // Root nodes don't correspond to a drop. - DropKind::Storage if drop_idx < self.first_non_root => {} + DropKind::Storage if drop_idx == ROOT_NODE => {} DropKind::Storage => { let stmt = Statement { source_info: drop_data.0.source_info, @@ -370,12 +370,12 @@ impl DropTree { } impl<'tcx> Scopes<'tcx> { - pub(crate) fn new(is_generator: bool) -> Self { + pub(crate) fn new() -> Self { Self { scopes: Vec::new(), breakable_scopes: Vec::new(), - unwind_drops: DropTree::new(1), - generator_drops: DropTree::new(is_generator as usize), + unwind_drops: DropTree::new(), + generator_drops: DropTree::new(), } } @@ -436,13 +436,17 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { let scope = BreakableScope { region_scope, break_destination, - drops: DropTree::new(1 + loop_block.is_some() as usize), + break_drops: DropTree::new(), + continue_drops: loop_block.map(|_| DropTree::new()), }; self.scopes.breakable_scopes.push(scope); let normal_exit_block = f(self); let breakable_scope = self.scopes.breakable_scopes.pop().unwrap(); assert!(breakable_scope.region_scope == region_scope); - let break_block = self.build_exit_tree(breakable_scope.drops, loop_block); + let break_block = self.build_exit_tree(breakable_scope.break_drops, None); + breakable_scope.continue_drops.map(|drops| { + self.build_exit_tree(drops, loop_block); + }); match (normal_exit_block, break_block) { (Some(block), None) | (None, Some(block)) => block, (None, None) => self.cfg.start_new_block().unit(), @@ -602,10 +606,12 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { let region_scope = self.scopes.breakable_scopes[break_index].region_scope; let scope_index = self.scopes.scope_index(region_scope, span); - let exited_scopes = &self.scopes.scopes[scope_index + 1..]; - let scope_drops = exited_scopes.iter().flat_map(|scope| &scope.drops); + let drops = if destination.is_some() { + &mut self.scopes.breakable_scopes[break_index].break_drops + } else { + self.scopes.breakable_scopes[break_index].continue_drops.as_mut().unwrap() + }; - let drops = &mut self.scopes.breakable_scopes[break_index].drops; let mut drop_idx = DropIdx::from_u32(destination.is_none() as u32); for drop in scope_drops { drop_idx = drops.add_drop(*drop, drop_idx); @@ -1103,15 +1109,13 @@ impl<'a, 'tcx: 'a> Builder<'a, 'tcx> { continue_block: Option, ) -> Option> { let mut blocks = IndexVec::from_elem(None, &drops.drops); - if continue_block.is_some() { - blocks[CONTINUE_NODE] = continue_block; - } + blocks[ROOT_NODE] = continue_block; + drops.build_mir::(&mut self.cfg, &mut blocks); if drops.drops.iter().any(|(drop, _)| drop.kind == DropKind::Value) { let unwind_target = self.diverge_cleanup(); - let num_roots = drops.first_non_root.index(); - let mut unwind_indices = IndexVec::from_elem_n(unwind_target, num_roots); - for (drop_idx, drop_data) in drops.drops.iter_enumerated().skip(num_roots) { + let mut unwind_indices = IndexVec::from_elem_n(unwind_target, 1); + for (drop_idx, drop_data) in drops.drops.iter_enumerated().skip(1) { match drop_data.0.kind { DropKind::Storage => { if self.is_generator { From e253d5b4e182bc592dde0356cfc3bbfff8cf22c7 Mon Sep 17 00:00:00 2001 From: Matthew Jasper Date: Sun, 17 Nov 2019 11:26:00 +0000 Subject: [PATCH 09/19] Temp: Caching --- src/librustc_mir/build/scope.rs | 125 ++++++++++++++++++++------------ 1 file changed, 78 insertions(+), 47 deletions(-) diff --git a/src/librustc_mir/build/scope.rs b/src/librustc_mir/build/scope.rs index 7582cc2f718c1..c3514c423c642 100644 --- a/src/librustc_mir/build/scope.rs +++ b/src/librustc_mir/build/scope.rs @@ -102,9 +102,6 @@ pub struct Scopes<'tcx> { /// Drops that need to be done on paths to the `GeneratorDrop` terminator. generator_drops: DropTree, - - // TODO: implement caching - // cached_unwind_drop: DropIdx, } #[derive(Debug)] @@ -125,6 +122,14 @@ struct Scope { drops: Vec, moved_locals: Vec, + + /// The drop index that will drop everything in and below this scope on an + /// unwind path. + cached_unwind_block: Option, + + /// The drop index that will drop everything in and below this scope on a + /// generator drop path. + cached_generator_drop_block: Option, } #[derive(Clone, Copy, Debug)] @@ -211,6 +216,11 @@ impl Scope { DropKind::Storage => false, }) } + + fn invalidate_cache(&mut self) { + self.cached_unwind_block = None; + self.cached_generator_drop_block = None; + } } /// A trait that determined how [DropTree::lower_to_mir] creates its blocks and @@ -387,6 +397,8 @@ impl<'tcx> Scopes<'tcx> { region_scope_span: region_scope.1.span, drops: vec![], moved_locals: vec![], + cached_unwind_block: None, + cached_generator_drop_block: None, }); } @@ -407,10 +419,6 @@ impl<'tcx> Scopes<'tcx> { }) } - fn iter_mut(&mut self) -> impl DoubleEndedIterator + '_ { - self.scopes.iter_mut().rev() - } - /// Returns the topmost active scope, which is known to be alive until /// the next scope expression. fn topmost(&self) -> region::Scope { @@ -611,10 +619,11 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { } else { self.scopes.breakable_scopes[break_index].continue_drops.as_mut().unwrap() }; - - let mut drop_idx = DropIdx::from_u32(destination.is_none() as u32); - for drop in scope_drops { - drop_idx = drops.add_drop(*drop, drop_idx); + let mut drop_idx = ROOT_NODE; + for scope in &self.scopes.scopes[scope_index + 1..] { + for drop in &scope.drops { + drop_idx = drops.add_drop(*drop, drop_idx); + } } drops.add_entry(block, drop_idx); // `build_drop_tree` doesn't have access to our source_info, so we @@ -671,19 +680,6 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { )) } - /// Sets up a path that performs all required cleanup for dropping a generator. - /// - /// This path terminates in GeneratorDrop. Returns the start of the path. - /// None indicates there’s no cleanup to do at this point. - crate fn generator_drop_cleanup(&mut self, yield_block: BasicBlock) { - let drops = self.scopes.scopes.iter().flat_map(|scope| &scope.drops); - let mut next_drop = ROOT_NODE; - for drop in drops { - next_drop = self.scopes.generator_drops.add_drop(*drop, next_drop); - } - self.scopes.generator_drops.add_entry(yield_block, next_drop); - } - /// Creates a new source scope, nested in the current one. crate fn new_source_scope(&mut self, span: Span, @@ -778,8 +774,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { local: Local, drop_kind: DropKind, ) { - // TODO: add back in caching. - let _needs_drop = match drop_kind { + let needs_drop = match drop_kind { DropKind::Value => { if !self.hir.needs_drop(self.local_decls[local].ty) { return } true @@ -796,21 +791,28 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { } }; - let scope = self.scopes.iter_mut() - .find(|scope| scope.region_scope == region_scope) - .unwrap_or_else(|| { - span_bug!(span, "region scope {:?} not in scope to drop {:?}", region_scope, local); - }); + let invalidate_caches = needs_drop || self.is_generator; + for scope in self.scopes.scopes.iter_mut().rev() { + if invalidate_caches { + scope.invalidate_cache(); + } - let region_scope_span = region_scope.span(self.hir.tcx(), &self.hir.region_scope_tree); - // Attribute scope exit drops to scope's closing brace. - let scope_end = self.hir.tcx().sess.source_map().end_point(region_scope_span); + if scope.region_scope == region_scope { + let region_scope_span = region_scope.span(self.hir.tcx(), &self.hir.region_scope_tree); + // Attribute scope exit drops to scope's closing brace. + let scope_end = self.hir.tcx().sess.source_map().end_point(region_scope_span); - scope.drops.push(DropData { - source_info: SourceInfo { span: scope_end, scope: scope.source_scope }, - local, - kind: drop_kind, - }); + scope.drops.push(DropData { + source_info: SourceInfo { span: scope_end, scope: scope.source_scope }, + local, + kind: drop_kind, + }); + + return; + } + } + + span_bug!(span, "region scope {:?} not in scope to drop {:?}", region_scope, local); } /// Indicates that the "local operand" stored in `local` is @@ -857,7 +859,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { } Some(local_scope) => { - self.scopes.iter_mut().find(|scope| scope.region_scope == local_scope) + self.scopes.scopes.iter_mut().rfind(|scope| scope.region_scope == local_scope) .unwrap_or_else(|| bug!("scope {:?} not found in scope list!", local_scope)) } }; @@ -914,6 +916,9 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { // Manually drop the condition on both branches. let top_scope = self.scopes.scopes.last_mut().unwrap(); let top_drop_data = top_scope.drops.pop().unwrap(); + if self.is_generator { + top_scope.invalidate_cache(); + } match top_drop_data.kind { DropKind::Value { .. } => { @@ -950,14 +955,20 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { fn diverge_cleanup(&mut self) -> DropIdx { let is_generator = self.is_generator; - let drops = self.scopes.scopes.iter() - .flat_map(|scope| &scope.drops) - .filter(|drop| is_generator || drop.kind == DropKind::Value); - let mut next_drop = ROOT_NODE; - for drop in drops { - next_drop = self.scopes.unwind_drops.add_drop(*drop, next_drop); + let (uncached_scope, mut cached_drop) = self.scopes.scopes.iter().enumerate().rev() + .find_map(|(scope_idx, scope)| { + scope.cached_unwind_block.map(|cached_block| (scope_idx + 1, cached_block)) + }) + .unwrap_or((0, ROOT_NODE)); + for scope in &mut self.scopes.scopes[uncached_scope..] { + for drop in &scope.drops { + if is_generator || drop.kind == DropKind::Value { + cached_drop = self.scopes.unwind_drops.add_drop(*drop, cached_drop); + } + } + scope.cached_unwind_block = Some(cached_drop); } - next_drop + cached_drop } /// Prepares to create a path that performs all required cleanup for @@ -970,6 +981,25 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { self.scopes.unwind_drops.add_entry(start, next_drop); } + /// Sets up a path that performs all required cleanup for dropping a generator. + /// + /// This path terminates in GeneratorDrop. Returns the start of the path. + /// None indicates there’s no cleanup to do at this point. + crate fn generator_drop_cleanup(&mut self, yield_block: BasicBlock) { + let (uncached_scope, mut cached_drop) = self.scopes.scopes.iter().enumerate().rev() + .find_map(|(scope_idx, scope)| { + scope.cached_generator_drop_block.map(|cached_block| (scope_idx + 1, cached_block)) + }) + .unwrap_or((0, ROOT_NODE)); + for scope in &mut self.scopes.scopes[uncached_scope..] { + for drop in &scope.drops { + cached_drop = self.scopes.generator_drops.add_drop(*drop, cached_drop); + } + scope.cached_generator_drop_block = Some(cached_drop); + } + self.scopes.generator_drops.add_entry(yield_block, cached_drop); + } + /// Utility function for *non*-scope code to build their own drops crate fn build_drop_and_replace(&mut self, block: BasicBlock, @@ -1027,6 +1057,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { assert_eq!(top_scope.region_scope, region_scope); top_scope.drops.clear(); + top_scope.invalidate_cache(); } } From ebe5971f2348410bc049dae9529c11007d1d5c54 Mon Sep 17 00:00:00 2001 From: Matthew Jasper Date: Sun, 17 Nov 2019 11:26:19 +0000 Subject: [PATCH 10/19] Temp: More test changes --- src/test/codegen/drop.rs | 6 ++-- .../mir-opt/generator-storage-dead-unwind.rs | 32 +++++++++---------- 2 files changed, 19 insertions(+), 19 deletions(-) diff --git a/src/test/codegen/drop.rs b/src/test/codegen/drop.rs index 959929fbafbf1..2499adc6f8f99 100644 --- a/src/test/codegen/drop.rs +++ b/src/test/codegen/drop.rs @@ -23,13 +23,13 @@ pub fn droppy() { // FIXME(eddyb) the `void @` forces a match on the instruction, instead of the // comment, that's `; call core::ptr::real_drop_in_place::` // for the `v0` mangling, should switch to matching on that once `legacy` is gone. +// CHECK-NOT: call void @{{.*}}drop_in_place{{.*}}SomeUniqueName +// CHECK: invoke void @{{.*}}drop_in_place{{.*}}SomeUniqueName +// CHECK: invoke void @{{.*}}drop_in_place{{.*}}SomeUniqueName // CHECK-NOT: invoke void @{{.*}}drop_in_place{{.*}}SomeUniqueName // CHECK: call void @{{.*}}drop_in_place{{.*}}SomeUniqueName // CHECK: call void @{{.*}}drop_in_place{{.*}}SomeUniqueName -// CHECK-NOT: call void @{{.*}}drop_in_place{{.*}}SomeUniqueName -// CHECK: invoke void @{{.*}}drop_in_place{{.*}}SomeUniqueName // CHECK: call void @{{.*}}drop_in_place{{.*}}SomeUniqueName -// CHECK: invoke void @{{.*}}drop_in_place{{.*}}SomeUniqueName // CHECK: call void @{{.*}}drop_in_place{{.*}}SomeUniqueName // CHECK-NOT: {{(call|invoke) void @.*}}drop_in_place{{.*}}SomeUniqueName // The next line checks for the } that ends the function definition diff --git a/src/test/mir-opt/generator-storage-dead-unwind.rs b/src/test/mir-opt/generator-storage-dead-unwind.rs index 66484fc1f72b2..861959e7182ec 100644 --- a/src/test/mir-opt/generator-storage-dead-unwind.rs +++ b/src/test/mir-opt/generator-storage-dead-unwind.rs @@ -54,7 +54,7 @@ fn main() { // StorageLive(_6); // StorageLive(_7); // _7 = move _2; -// _6 = const take::(move _7) -> [return: bb2, unwind: bb11]; +// _6 = const take::(move _7) -> [return: bb2, unwind: bb9]; // } // bb2: { // StorageDead(_7); @@ -62,7 +62,7 @@ fn main() { // StorageLive(_8); // StorageLive(_9); // _9 = move _3; -// _8 = const take::(move _9) -> [return: bb3, unwind: bb10]; +// _8 = const take::(move _9) -> [return: bb3, unwind: bb8]; // } // bb3: { // StorageDead(_9); @@ -70,7 +70,7 @@ fn main() { // ... // StorageDead(_3); // StorageDead(_2); -// drop(_1) -> [return: bb4, unwind: bb9]; +// drop(_1) -> [return: bb4, unwind: bb11]; // } // bb4: { // return; @@ -78,36 +78,36 @@ fn main() { // bb5: { // ... // StorageDead(_3); -// drop(_2) -> [return: bb6, unwind: bb8]; +// drop(_2) -> [return: bb6, unwind: bb12]; // } // bb6: { // StorageDead(_2); -// drop(_1) -> [return: bb7, unwind: bb9]; +// drop(_1) -> [return: bb7, unwind: bb11]; // } // bb7: { // generator_drop; // } // bb8 (cleanup): { -// StorageDead(_2); -// drop(_1) -> bb9; +// StorageDead(_9); +// StorageDead(_8); +// goto -> bb10; // } // bb9 (cleanup): { -// resume; +// StorageDead(_7); +// StorageDead(_6); +// goto -> bb10; // } // bb10 (cleanup): { -// StorageDead(_9); -// StorageDead(_8); -// goto -> bb12; +// StorageDead(_3); +// StorageDead(_2); +// drop(_1) -> bb11; // } // bb11 (cleanup): { -// StorageDead(_7); -// StorageDead(_6); -// goto -> bb12; +// resume; // } // bb12 (cleanup): { -// StorageDead(_3); // StorageDead(_2); -// drop(_1) -> bb9; +// drop(_1) -> bb11; // } // END rustc.main-{{closure}}.StateTransform.before.mir From 442e064018b8c21faa905c7228816448a959dc32 Mon Sep 17 00:00:00 2001 From: Matthew Jasper Date: Sun, 17 Nov 2019 11:30:48 +0000 Subject: [PATCH 11/19] Reduce the number of drop-flag assignments in unwind paths --- .../dataflow/move_paths/builder.rs | 5 +-- src/librustc_mir/util/elaborate_drops.rs | 42 +++++-------------- src/test/mir-opt/unusual-item-types.rs | 15 +++---- 3 files changed, 18 insertions(+), 44 deletions(-) diff --git a/src/librustc_mir/dataflow/move_paths/builder.rs b/src/librustc_mir/dataflow/move_paths/builder.rs index 52016d4c9363a..906776ed64259 100644 --- a/src/librustc_mir/dataflow/move_paths/builder.rs +++ b/src/librustc_mir/dataflow/move_paths/builder.rs @@ -348,6 +348,7 @@ impl<'b, 'a, 'tcx> Gatherer<'b, 'a, 'tcx> { fn gather_terminator(&mut self, term: &Terminator<'tcx>) { match term.kind { TerminatorKind::Goto { target: _ } + | TerminatorKind::Return | TerminatorKind::Resume | TerminatorKind::Abort | TerminatorKind::GeneratorDrop @@ -355,10 +356,6 @@ impl<'b, 'a, 'tcx> Gatherer<'b, 'a, 'tcx> { | TerminatorKind::FalseUnwind { .. } | TerminatorKind::Unreachable => {} - TerminatorKind::Return => { - self.gather_move(&Place::return_place()); - } - TerminatorKind::Assert { ref cond, .. } => { self.gather_operand(cond); } diff --git a/src/librustc_mir/util/elaborate_drops.rs b/src/librustc_mir/util/elaborate_drops.rs index 67e5bfafafd12..1038d45e65289 100644 --- a/src/librustc_mir/util/elaborate_drops.rs +++ b/src/librustc_mir/util/elaborate_drops.rs @@ -163,8 +163,6 @@ where }); } DropStyle::Static => { - let loc = self.terminator_loc(bb); - self.elaborator.clear_drop_flag(loc, self.path, DropFlagMode::Deep); self.elaborator.patch().patch_terminator(bb, TerminatorKind::Drop { location: self.place.clone(), target: self.succ, @@ -172,9 +170,7 @@ where }); } DropStyle::Conditional => { - let unwind = self.unwind; // FIXME(#43234) - let succ = self.succ; - let drop_bb = self.complete_drop(Some(DropFlagMode::Deep), succ, unwind); + let drop_bb = self.complete_drop(self.succ, self.unwind); self.elaborator.patch().patch_terminator(bb, TerminatorKind::Goto { target: drop_bb }); @@ -236,7 +232,7 @@ where // Using `self.path` here to condition the drop on // our own drop flag. path: self.path - }.complete_drop(None, succ, unwind) + }.complete_drop(succ, unwind) } } @@ -265,13 +261,7 @@ where // Clear the "master" drop flag at the end. This is needed // because the "master" drop protects the ADT's discriminant, // which is invalidated after the ADT is dropped. - let (succ, unwind) = (self.succ, self.unwind); // FIXME(#43234) - ( - self.drop_flag_reset_block(DropFlagMode::Shallow, succ, unwind), - unwind.map(|unwind| { - self.drop_flag_reset_block(DropFlagMode::Shallow, unwind, Unwind::InCleanup) - }) - ) + (self.drop_flag_reset_block(DropFlagMode::Shallow, self.succ, self.unwind), self.unwind) } /// Creates a full drop ladder, consisting of 2 connected half-drop-ladders @@ -827,9 +817,7 @@ where } } ty::Dynamic(..) => { - let unwind = self.unwind; // FIXME(#43234) - let succ = self.succ; - self.complete_drop(Some(DropFlagMode::Deep), succ, unwind) + self.complete_drop(self.succ, self.unwind) } ty::Array(ety, size) => { let size = size.try_eval_usize(self.tcx(), self.elaborator.param_env()); @@ -842,26 +830,18 @@ where } /// Returns a basic block that drop a place using the context - /// and path in `c`. If `mode` is something, also clear `c` - /// according to it. + /// and path in `c`. /// /// if FLAG(self.path) - /// if let Some(mode) = mode: FLAG(self.path)[mode] = false /// drop(self.place) fn complete_drop( &mut self, - drop_mode: Option, succ: BasicBlock, unwind: Unwind, ) -> BasicBlock { - debug!("complete_drop({:?},{:?})", self, drop_mode); + debug!("complete_drop(succ={:?}, unwind={:?})", succ, unwind); let drop_block = self.drop_block(succ, unwind); - let drop_block = if let Some(mode) = drop_mode { - self.drop_flag_reset_block(mode, drop_block, unwind) - } else { - drop_block - }; self.drop_flag_test_block(drop_block, succ, unwind) } @@ -873,6 +853,11 @@ where { debug!("drop_flag_reset_block({:?},{:?})", self, mode); + if unwind.is_cleanup() { + // The drop flag isn't read again on the unwind path, so don't + // bother setting it. + return succ; + } let block = self.new_block(unwind, TerminatorKind::Goto { target: succ }); let block_start = Location { block: block, statement_index: 0 }; self.elaborator.clear_drop_flag(block_start, self.path, mode); @@ -976,11 +961,6 @@ where self.elaborator.patch().new_temp(ty, self.source_info.span) } - fn terminator_loc(&mut self, bb: BasicBlock) -> Location { - let body = self.elaborator.body(); - self.elaborator.patch().terminator_loc(body, bb) - } - fn constant_usize(&self, val: u16) -> Operand<'tcx> { Operand::Constant(box Constant { span: self.source_info.span, diff --git a/src/test/mir-opt/unusual-item-types.rs b/src/test/mir-opt/unusual-item-types.rs index 77ee2bc8bfecf..95951ffb88fe8 100644 --- a/src/test/mir-opt/unusual-item-types.rs +++ b/src/test/mir-opt/unusual-item-types.rs @@ -40,8 +40,8 @@ fn main() { // END rustc.E-V-{{constant}}.mir_map.0.mir // START rustc.ptr-real_drop_in_place.std__vec__Vec_i32_.AddMovesForPackedDrops.before.mir -// bb0: { -// goto -> bb7; +// bb0: { +// goto -> bb6; // } // bb1: { // return; @@ -53,17 +53,14 @@ fn main() { // goto -> bb1; // } // bb4 (cleanup): { -// goto -> bb2; +// drop(((*_1).0: alloc::raw_vec::RawVec)) -> bb2; // } -// bb5 (cleanup): { -// drop(((*_1).0: alloc::raw_vec::RawVec)) -> bb4; +// bb5: { +// drop(((*_1).0: alloc::raw_vec::RawVec)) -> [return: bb3, unwind: bb2]; // } // bb6: { -// drop(((*_1).0: alloc::raw_vec::RawVec)) -> [return: bb3, unwind: bb4]; -// } -// bb7: { // _2 = &mut (*_1); -// _3 = const as std::ops::Drop>::drop(move _2) -> [return: bb6, unwind: bb5]; +// _3 = const as std::ops::Drop>::drop(move _2) -> [return: bb5, unwind: bb4]; // } // END rustc.ptr-real_drop_in_place.std__vec__Vec_i32_.AddMovesForPackedDrops.before.mir From 642829c7a3be4beb964cba5c9b35a525a7adf307 Mon Sep 17 00:00:00 2001 From: Matthew Jasper Date: Sun, 17 Nov 2019 14:19:40 +0000 Subject: [PATCH 12/19] fixup! Temp: Cleanup 5 (colors) --- src/librustc_mir/util/graphviz.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/librustc_mir/util/graphviz.rs b/src/librustc_mir/util/graphviz.rs index cb65164e7110e..7dc5c1a151e88 100644 --- a/src/librustc_mir/util/graphviz.rs +++ b/src/librustc_mir/util/graphviz.rs @@ -103,7 +103,7 @@ pub fn write_node_label(block: BasicBlock, // Basic block number at the top. let (blk, color) = if data.is_cleanup { - (format!("{} (cleanup)", block.index()), "light blue") + (format!("{} (cleanup)", block.index()), "lightblue") } else { (format!("{}", block.index()), "gray") }; From d08346aa3be4b05f3f53009c111d205a42991abe Mon Sep 17 00:00:00 2001 From: Matthew Jasper Date: Sun, 17 Nov 2019 14:38:59 +0000 Subject: [PATCH 13/19] Temp: Fix a bug --- src/librustc_mir/build/scope.rs | 26 +++++++++++--------------- 1 file changed, 11 insertions(+), 15 deletions(-) diff --git a/src/librustc_mir/build/scope.rs b/src/librustc_mir/build/scope.rs index c3514c423c642..a72b2dab4be7c 100644 --- a/src/librustc_mir/build/scope.rs +++ b/src/librustc_mir/build/scope.rs @@ -651,24 +651,15 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { fn leave_top_scope(&mut self, block: BasicBlock) -> BasicBlock { // If we are emitting a `drop` statement, we need to have the cached // diverge cleanup pads ready in case that drop panics. - let scope = self.scopes.scopes.last().expect("exit_top_scope called with no scopes"); + let needs_cleanup = self.scopes.scopes.last().map_or(false, |scope| scope.needs_cleanup()); let is_generator = self.is_generator; - let needs_cleanup = scope.needs_cleanup(); - let unwind_to = if needs_cleanup { - let mut drops = self.scopes.scopes.iter() - .flat_map(|scope| &scope.drops) - .filter(|drop| is_generator || drop.kind == DropKind::Value); - let mut next_drop = ROOT_NODE; - let mut drop_info = drops.next().unwrap(); - for previous_drop_info in drops { - next_drop = self.scopes.unwind_drops.add_drop(*drop_info, next_drop); - drop_info = previous_drop_info; - } - next_drop + self.diverge_cleanup() } else { DropIdx::MAX }; + + let scope = self.scopes.scopes.last().expect("exit_top_scope called with no scopes"); unpack!(build_scope_drops( &mut self.cfg, &mut self.scopes.unwind_drops, @@ -1098,16 +1089,18 @@ fn build_scope_drops<'tcx>( match drop_data.kind { DropKind::Value => { + debug_assert_eq!(unwind_drops.drops[unwind_to].0.local, drop_data.local); + debug_assert_eq!(unwind_drops.drops[unwind_to].0.kind, drop_data.kind); + unwind_to = unwind_drops.drops[unwind_to].1; // If the operand has been moved, and we are not on an unwind // path, then don't generate the drop. (We only take this into // account for non-unwind paths so as not to disturb the // caching mechanism.) if scope.moved_locals.iter().any(|&o| o == local) { - unwind_to = unwind_drops.drops[unwind_to].1; continue; } - unwind_drops.entry_points.push((unwind_to, block)); + unwind_drops.add_entry(block, unwind_to); let next = cfg.start_new_block(); cfg.terminate(block, source_info, TerminatorKind::Drop { @@ -1119,6 +1112,8 @@ fn build_scope_drops<'tcx>( } DropKind::Storage => { if storage_dead_on_unwind { + debug_assert_eq!(unwind_drops.drops[unwind_to].0.local, drop_data.local); + debug_assert_eq!(unwind_drops.drops[unwind_to].0.kind, drop_data.kind); unwind_to = unwind_drops.drops[unwind_to].1; } // Only temps and vars need their storage dead. @@ -1224,6 +1219,7 @@ impl<'a, 'tcx: 'a> Builder<'a, 'tcx> { // optimization is, but it is here. for (drop_idx, drop_data) in drops.drops.iter_enumerated() { if let DropKind::Value = drop_data.0.kind { + debug_assert!(drop_data.1 < drops.drops.next_index()); drops.entry_points.push((drop_data.1, blocks[drop_idx].unwrap())); } } From c1a0042364c3b6033dc7024f5d95b98185379f7b Mon Sep 17 00:00:00 2001 From: Matthew Jasper Date: Sun, 17 Nov 2019 15:24:12 +0000 Subject: [PATCH 14/19] Temp: Appease tidy --- src/librustc_mir/build/scope.rs | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/src/librustc_mir/build/scope.rs b/src/librustc_mir/build/scope.rs index a72b2dab4be7c..c759342f8a801 100644 --- a/src/librustc_mir/build/scope.rs +++ b/src/librustc_mir/build/scope.rs @@ -789,7 +789,10 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { } if scope.region_scope == region_scope { - let region_scope_span = region_scope.span(self.hir.tcx(), &self.hir.region_scope_tree); + let region_scope_span = region_scope.span( + self.hir.tcx(), + &self.hir.region_scope_tree, + ); // Attribute scope exit drops to scope's closing brace. let scope_end = self.hir.tcx().sess.source_map().end_point(region_scope_span); From 90916d0e232ae1676788e535018d3965a4ffc400 Mon Sep 17 00:00:00 2001 From: Matthew Jasper Date: Sun, 17 Nov 2019 17:20:11 +0000 Subject: [PATCH 15/19] Temp: Fix debuginfo --- src/librustc_mir/build/scope.rs | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/src/librustc_mir/build/scope.rs b/src/librustc_mir/build/scope.rs index c759342f8a801..4755f640731be 100644 --- a/src/librustc_mir/build/scope.rs +++ b/src/librustc_mir/build/scope.rs @@ -370,8 +370,12 @@ impl DropTree { cfg.push(block, stmt); let target = blocks[drop_data.1].unwrap(); if target != block { + // Diagnostics don't use this `Span` but debuginfo + // might, which could cause breakpoints to end up in the + // wrong place. + let source_info = SourceInfo { span: DUMMY_SP, ..drop_data.0.source_info }; let terminator = TerminatorKind::Goto { target }; - cfg.terminate(block, drop_data.0.source_info, terminator); + cfg.terminate(block, source_info, terminator); } } } From 56b3bf2354aedf9c7e91a5c2096400d3e6770287 Mon Sep 17 00:00:00 2001 From: Matthew Jasper Date: Sun, 17 Nov 2019 22:14:16 +0000 Subject: [PATCH 16/19] Make `into` schedule drop for the destination again --- src/librustc_mir/build/block.rs | 51 ++-- src/librustc_mir/build/expr/as_rvalue.rs | 7 +- src/librustc_mir/build/expr/as_temp.rs | 11 +- src/librustc_mir/build/expr/into.rs | 46 +++- src/librustc_mir/build/into.rs | 25 +- src/librustc_mir/build/matches/mod.rs | 68 ++---- src/librustc_mir/build/mod.rs | 12 +- src/librustc_mir/build/scope.rs | 177 ++++++++++++-- src/test/mir-opt/box_expr.rs | 2 +- src/test/mir-opt/issue-62289.rs | 19 +- src/test/ui/drop/dynamic-drop-async.rs | 163 ++++++++----- src/test/ui/drop/dynamic-drop.rs | 281 ++++++++++++++--------- 12 files changed, 573 insertions(+), 289 deletions(-) diff --git a/src/librustc_mir/build/block.rs b/src/librustc_mir/build/block.rs index 7749bcc51f4c6..f35decf53b071 100644 --- a/src/librustc_mir/build/block.rs +++ b/src/librustc_mir/build/block.rs @@ -2,17 +2,20 @@ use crate::build::{BlockAnd, BlockAndExtension, BlockFrame, Builder}; use crate::build::ForGuard::OutsideGuard; use crate::build::matches::ArmHasGuard; use crate::hair::*; +use rustc::middle::region; use rustc::mir::*; use rustc::hir; use syntax_pos::Span; impl<'a, 'tcx> Builder<'a, 'tcx> { - pub fn ast_block(&mut self, - destination: &Place<'tcx>, - block: BasicBlock, - ast_block: &'tcx hir::Block, - source_info: SourceInfo) - -> BlockAnd<()> { + pub fn ast_block( + &mut self, + destination: &Place<'tcx>, + scope: Option, + block: BasicBlock, + ast_block: &'tcx hir::Block, + source_info: SourceInfo, + ) -> BlockAnd<()> { let Block { region_scope, opt_destruction_scope, @@ -21,17 +24,18 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { expr, targeted_by_break, safety_mode - } = - self.hir.mirror(ast_block); + } = self.hir.mirror(ast_block); self.in_opt_scope(opt_destruction_scope.map(|de|(de, source_info)), move |this| { this.in_scope((region_scope, source_info), LintLevel::Inherited, move |this| { if targeted_by_break { this.in_breakable_scope( None, destination.clone(), + scope, span, |this| Some(this.ast_block_stmts( destination, + scope, block, span, stmts, @@ -40,21 +44,30 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { )), ) } else { - this.ast_block_stmts(destination, block, span, stmts, expr, - safety_mode) + this.ast_block_stmts( + destination, + scope, + block, + span, + stmts, + expr, + safety_mode, + ) } }) }) } - fn ast_block_stmts(&mut self, - destination: &Place<'tcx>, - mut block: BasicBlock, - span: Span, - stmts: Vec>, - expr: Option>, - safety_mode: BlockSafety) - -> BlockAnd<()> { + fn ast_block_stmts( + &mut self, + destination: &Place<'tcx>, + scope: Option, + mut block: BasicBlock, + span: Span, + stmts: Vec>, + expr: Option>, + safety_mode: BlockSafety, + ) -> BlockAnd<()> { let this = self; // This convoluted structure is to avoid using recursion as we walk down a list @@ -180,7 +193,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { this.block_context.currently_ignores_tail_results(); this.block_context.push(BlockFrame::TailExpr { tail_result_is_ignored }); - unpack!(block = this.into(destination, block, expr)); + unpack!(block = this.into(destination, scope, block, expr)); let popped = this.block_context.pop(); assert!(popped.map_or(false, |bf|bf.is_tail_expr())); diff --git a/src/librustc_mir/build/expr/as_rvalue.rs b/src/librustc_mir/build/expr/as_rvalue.rs index 332b1da195902..e4869e02726fe 100644 --- a/src/librustc_mir/build/expr/as_rvalue.rs +++ b/src/librustc_mir/build/expr/as_rvalue.rs @@ -130,11 +130,14 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { this.cfg .push_assign(block, source_info, &Place::from(result), box_); - // initialize the box contents: + // Initialize the box contents. No scope is needed since the + // `Box` is already scheduled to be dropped. unpack!( block = this.into( &this.hir.tcx().mk_place_deref(Place::from(result)), - block, value + None, + block, + value, ) ); block.and(Rvalue::Use(Operand::Move(Place::from(result)))) diff --git a/src/librustc_mir/build/expr/as_temp.rs b/src/librustc_mir/build/expr/as_temp.rs index 4dad9ab498f63..faca537e5b9e3 100644 --- a/src/librustc_mir/build/expr/as_temp.rs +++ b/src/librustc_mir/build/expr/as_temp.rs @@ -114,16 +114,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { } } - unpack!(block = this.into(temp_place, block, expr)); - - if let Some(temp_lifetime) = temp_lifetime { - this.schedule_drop( - expr_span, - temp_lifetime, - temp, - DropKind::Value, - ); - } + unpack!(block = this.into(temp_place, temp_lifetime, block, expr)); block.and(temp) } diff --git a/src/librustc_mir/build/expr/into.rs b/src/librustc_mir/build/expr/into.rs index d1bccbac008b9..1c91800e0a4f0 100644 --- a/src/librustc_mir/build/expr/into.rs +++ b/src/librustc_mir/build/expr/into.rs @@ -2,7 +2,9 @@ use crate::build::expr::category::{Category, RvalueFunc}; use crate::build::{BlockAnd, BlockAndExtension, BlockFrame, Builder}; +use crate::build::scope::DropKind; use crate::hair::*; +use rustc::middle::region; use rustc::mir::*; use rustc::ty::{self, CanonicalUserTypeAnnotation}; use rustc_data_structures::fx::FxHashMap; @@ -13,15 +15,18 @@ use rustc_target::spec::abi::Abi; impl<'a, 'tcx> Builder<'a, 'tcx> { /// Compile `expr`, storing the result into `destination`, which /// is assumed to be uninitialized. + /// If a `drop_scope` is provided, `destination` is scheduled to be dropped + /// in `scope` once it has been initialized. pub fn into_expr( &mut self, destination: &Place<'tcx>, + scope: Option, mut block: BasicBlock, expr: Expr<'tcx>, ) -> BlockAnd<()> { debug!( - "into_expr(destination={:?}, block={:?}, expr={:?})", - destination, block, expr + "into_expr(destination={:?}, scope={:?}, block={:?}, expr={:?})", + destination, scope, block, expr ); // since we frequently have to reference `self` from within a @@ -37,6 +42,14 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { _ => false, }; + let schedule_drop = move |this: &mut Self| { + if let Some(drop_scope) = scope { + let local = destination.as_local() + .expect("cannot schedule drop of non-Local place"); + this.schedule_drop(expr_span, drop_scope, local, DropKind::Value); + } + }; + if !expr_is_block_or_scope { this.block_context.push(BlockFrame::SubExpr); } @@ -49,14 +62,14 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { } => { let region_scope = (region_scope, source_info); this.in_scope(region_scope, lint_level, |this| { - this.into(destination, block, value) + this.into(destination, scope, block, value) }) } ExprKind::Block { body: ast_block } => { - this.ast_block(destination, block, ast_block, source_info) + this.ast_block(destination, scope, block, ast_block, source_info) } ExprKind::Match { scrutinee, arms } => { - this.match_expr(destination, expr_span, block, scrutinee, arms) + this.match_expr(destination, scope, expr_span, block, scrutinee, arms) } ExprKind::NeverToAny { source } => { let source = this.hir.mirror(source); @@ -69,6 +82,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { // This is an optimization. If the expression was a call then we already have an // unreachable block. Don't bother to terminate it and create a new one. + schedule_drop(this); if is_call { block.unit() } else { @@ -168,6 +182,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { this.in_breakable_scope( Some(loop_block), destination.clone(), + scope, expr_span, move |this| { // conduct the test, if necessary @@ -186,12 +201,14 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { // introduce a unit temporary as the destination for the loop body. let tmp = this.get_unit_temp(); // Execute the body, branching back to the test. - let body_block_end = unpack!(this.into(&tmp, body_block, body)); + // No scope is provided, since we've scheduled the drop above. + let body_block_end = unpack!(this.into(&tmp, None, body_block, body)); this.cfg.terminate( body_block_end, source_info, TerminatorKind::Goto { target: loop_block }, ); + schedule_drop(this); None }, ) @@ -234,8 +251,14 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { is_block_tail: None, }); let ptr_temp = Place::from(ptr_temp); - let block = unpack!(this.into(&ptr_temp, block, ptr)); - this.into(&this.hir.tcx().mk_place_deref(ptr_temp), block, val) + // No need for a scope, ptr_temp doesn't need drop + let block = unpack!(this.into(&ptr_temp, None, block, ptr)); + // Maybe we should provide a scope here so that + // `move_val_init` wouldn't leak on panic even with an + // arbitrary `val` expression, but `schedule_drop`, + // borrowck and drop elaboration all prevent us from + // dropping `ptr_temp.deref()`. + this.into(&this.hir.tcx().mk_place_deref(ptr_temp), None, block, val) } else { let args: Vec<_> = args .into_iter() @@ -265,11 +288,12 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { from_hir_call, }, ); + schedule_drop(this); success.unit() } } ExprKind::Use { source } => { - this.into(destination, block, source) + this.into(destination, scope, block, source) } ExprKind::Borrow { arg, borrow_kind } => { // We don't do this in `as_rvalue` because we use `as_place` @@ -364,6 +388,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { destination, Rvalue::Aggregate(adt, fields) ); + schedule_drop(this); block.unit() } @@ -391,6 +416,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { let rvalue = Rvalue::Use(this.consume_by_copy_or_move(place)); this.cfg .push_assign(block, source_info, destination, rvalue); + schedule_drop(this); block.unit() } ExprKind::Index { .. } | ExprKind::Deref { .. } | ExprKind::Field { .. } => { @@ -410,6 +436,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { let rvalue = Rvalue::Use(this.consume_by_copy_or_move(place)); this.cfg .push_assign(block, source_info, destination, rvalue); + schedule_drop(this); block.unit() } @@ -440,6 +467,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { let rvalue = unpack!(block = this.as_local_rvalue(block, expr)); this.cfg.push_assign(block, source_info, destination, rvalue); + schedule_drop(this); block.unit() } }; diff --git a/src/librustc_mir/build/into.rs b/src/librustc_mir/build/into.rs index 077840c9ccf17..e57f10f0b14e9 100644 --- a/src/librustc_mir/build/into.rs +++ b/src/librustc_mir/build/into.rs @@ -6,6 +6,7 @@ use crate::build::{BlockAnd, Builder}; use crate::hair::*; +use rustc::middle::region; use rustc::mir::*; pub(in crate::build) trait EvalInto<'tcx> { @@ -13,19 +14,23 @@ pub(in crate::build) trait EvalInto<'tcx> { self, builder: &mut Builder<'_, 'tcx>, destination: &Place<'tcx>, + scope: Option, block: BasicBlock, ) -> BlockAnd<()>; } impl<'a, 'tcx> Builder<'a, 'tcx> { - pub fn into(&mut self, - destination: &Place<'tcx>, - block: BasicBlock, - expr: E) - -> BlockAnd<()> - where E: EvalInto<'tcx> + pub fn into( + &mut self, + destination: &Place<'tcx>, + scope: Option, + block: BasicBlock, + expr: E, + ) -> BlockAnd<()> + where + E: EvalInto<'tcx>, { - expr.eval_into(self, destination, block) + expr.eval_into(self, destination, scope, block) } } @@ -34,10 +39,11 @@ impl<'tcx> EvalInto<'tcx> for ExprRef<'tcx> { self, builder: &mut Builder<'_, 'tcx>, destination: &Place<'tcx>, + scope: Option, block: BasicBlock, ) -> BlockAnd<()> { let expr = builder.hir.mirror(self); - builder.into_expr(destination, block, expr) + builder.into_expr(destination, scope, block, expr) } } @@ -46,8 +52,9 @@ impl<'tcx> EvalInto<'tcx> for Expr<'tcx> { self, builder: &mut Builder<'_, 'tcx>, destination: &Place<'tcx>, + scope: Option, block: BasicBlock, ) -> BlockAnd<()> { - builder.into_expr(destination, block, self) + builder.into_expr(destination, scope, block, self) } } diff --git a/src/librustc_mir/build/matches/mod.rs b/src/librustc_mir/build/matches/mod.rs index 4cb0c9e162a2d..79e2b1779cb53 100644 --- a/src/librustc_mir/build/matches/mod.rs +++ b/src/librustc_mir/build/matches/mod.rs @@ -11,6 +11,7 @@ use crate::build::{BlockAnd, BlockAndExtension, Builder}; use crate::build::{GuardFrame, GuardFrameLocal, LocalsForNode}; use crate::hair::{self, *}; use rustc::hir::HirId; +use rustc::middle::region; use rustc::mir::*; use rustc::ty::{self, CanonicalUserTypeAnnotation, Ty}; use rustc::ty::layout::VariantIdx; @@ -101,6 +102,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { pub fn match_expr( &mut self, destination: &Place<'tcx>, + destination_scope: Option, span: Span, mut block: BasicBlock, scrutinee: ExprRef<'tcx>, @@ -227,54 +229,14 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { }; // Step 5. Create everything else: the guards and the arms. - let arm_end_blocks: Vec<_> = arm_candidates.into_iter().map(|(arm, mut candidates)| { - let arm_source_info = self.source_info(arm.span); - let arm_scope = (arm.scope, arm_source_info); - self.in_scope(arm_scope, arm.lint_level, |this| { - let body = this.hir.mirror(arm.body.clone()); - let scope = this.declare_bindings( - None, - arm.span, - &arm.top_pats_hack()[0], - ArmHasGuard(arm.guard.is_some()), - Some((Some(&scrutinee_place), scrutinee_span)), - ); - - let arm_block; - if candidates.len() == 1 { - arm_block = this.bind_and_guard_matched_candidate( - candidates.pop().unwrap(), - arm.guard.clone(), - &fake_borrow_temps, - scrutinee_span, - //match_scope, - ); - } else { - arm_block = this.cfg.start_new_block(); - for candidate in candidates { - this.clear_top_scope(arm.scope); - let binding_end = this.bind_and_guard_matched_candidate( - candidate, - arm.guard.clone(), - &fake_borrow_temps, - scrutinee_span, - //match_scope, - ); - this.cfg.terminate( - binding_end, - source_info, - TerminatorKind::Goto { target: arm_block }, - ); - } - } - - if let Some(source_scope) = scope { - this.source_scope = source_scope; - } - - this.into(destination, arm_block, body) - }) - }).collect(); + let arm_end_blocks = self.build_match_arms( + arm_candidates, + destination, + destination_scope, + fake_borrow_temps, + scrutinee_span, + Some((Some(&scrutinee_place), scrutinee_span)), + ); // all the arm blocks will rejoin here let end_block = self.cfg.start_new_block(); @@ -308,8 +270,9 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { } => { let place = self.storage_live_binding(block, var, irrefutable_pat.span, OutsideGuard); - unpack!(block = self.into(&place, block, initializer)); + let region_scope = self.hir.region_scope_tree.var_scope(var.local_id); + unpack!(block = self.into(&place, Some(region_scope), block, initializer)); // Inject a fake read, see comments on `FakeReadCause::ForLet`. let source_info = self.source_info(irrefutable_pat.span); @@ -321,7 +284,6 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { }, ); - self.schedule_drop_for_binding(var, irrefutable_pat.span, OutsideGuard); block.unit() } @@ -349,9 +311,10 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { user_ty_span, }, } => { + let region_scope = self.hir.region_scope_tree.var_scope(var.local_id); let place = self.storage_live_binding(block, var, irrefutable_pat.span, OutsideGuard); - unpack!(block = self.into(&place, block, initializer)); + unpack!(block = self.into(&place, Some(region_scope), block, initializer)); // Inject a fake read, see comments on `FakeReadCause::ForLet`. let pattern_source_info = self.source_info(irrefutable_pat.span); @@ -397,7 +360,6 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { }, ); - self.schedule_drop_for_binding(var, irrefutable_pat.span, OutsideGuard); block.unit() } @@ -1344,7 +1306,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { /// Note: we do not check earlier that if there is a guard, /// there cannot be move bindings. We avoid a use-after-move by only /// moving the binding once the guard has evaluated to true (see below). - fn bind_and_guard_matched_candidate<'pat>( + crate fn bind_and_guard_matched_candidate<'pat>( &mut self, candidate: Candidate<'pat, 'tcx>, guard: Option>, diff --git a/src/librustc_mir/build/mod.rs b/src/librustc_mir/build/mod.rs index 8e774aa901cf7..06dff414109cd 100644 --- a/src/librustc_mir/build/mod.rs +++ b/src/librustc_mir/build/mod.rs @@ -618,6 +618,7 @@ where let return_block = unpack!(builder.in_breakable_scope( None, Place::return_place(), + Some(call_site_scope), fn_end, |builder| { Some(builder.in_scope(arg_scope_s, LintLevel::Inherited, |builder| { @@ -633,6 +634,7 @@ where if let Some(unreachable_block) = builder.cached_unreachable_block { builder.cfg.terminate(unreachable_block, source_info, TerminatorKind::Unreachable); } + builder.unschedule_return_place_drop(); return_block.unit() })); @@ -673,7 +675,9 @@ fn construct_const<'a, 'tcx>( let mut block = START_BLOCK; let ast_expr = &tcx.hir().body(body_id).value; let expr = builder.hir.mirror(ast_expr); - unpack!(block = builder.into_expr(&Place::return_place(), block, expr)); + // We don't provide a scope because we can't unwind in constants, so won't + // need to drop the return place. + unpack!(block = builder.into_expr(&Place::return_place(), None, block, expr)); let source_info = builder.source_info(span); builder.cfg.terminate(block, source_info, TerminatorKind::Return); @@ -873,7 +877,11 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { } let body = self.hir.mirror(ast_body); - self.into(&Place::return_place(), block, body) + let call_site = region::Scope { + id: ast_body.hir_id.local_id, + data: region::ScopeData::CallSite + }; + self.into(&Place::return_place(), Some(call_site), block, body) } fn set_correct_source_scope_for_arg( diff --git a/src/librustc_mir/build/scope.rs b/src/librustc_mir/build/scope.rs index 4755f640731be..0abe664d30ce3 100644 --- a/src/librustc_mir/build/scope.rs +++ b/src/librustc_mir/build/scope.rs @@ -81,8 +81,8 @@ that contains only loops and breakable blocks. It tracks where a `break`, */ -use crate::build::{BlockAnd, BlockAndExtension, BlockFrame, Builder, CFG}; -use crate::hair::{Expr, ExprRef, LintLevel}; +use crate::build::{matches, BlockAnd, BlockAndExtension, BlockFrame, Builder, CFG}; +use crate::hair::{Expr, ExprRef, LintLevel, Arm}; use rustc::middle::region; use rustc::hir; use rustc::mir::*; @@ -158,6 +158,8 @@ struct BreakableScope<'tcx> { /// The destination of the loop/block expression itself (i.e., where to put /// the result of a `break` or `return` expression) break_destination: Place<'tcx>, + /// The scope that the destination should have its drop scheduled in. + destination_scope: Option, /// Drops that happen on the `break`/`return` path. break_drops: DropTree, /// Drops that happen on the `continue` path. @@ -439,6 +441,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { &mut self, loop_block: Option, break_destination: Place<'tcx>, + destination_scope: Option, span: Span, f: F, ) -> BlockAnd<()> @@ -448,17 +451,19 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { let scope = BreakableScope { region_scope, break_destination, + destination_scope, break_drops: DropTree::new(), continue_drops: loop_block.map(|_| DropTree::new()), }; + let continue_blocks = loop_block.map(|block| (block, self.diverge_cleanup())); self.scopes.breakable_scopes.push(scope); let normal_exit_block = f(self); let breakable_scope = self.scopes.breakable_scopes.pop().unwrap(); assert!(breakable_scope.region_scope == region_scope); - let break_block = self.build_exit_tree(breakable_scope.break_drops, None); breakable_scope.continue_drops.map(|drops| { - self.build_exit_tree(drops, loop_block); + self.build_exit_tree(drops, continue_blocks); }); + let break_block = self.build_exit_tree(breakable_scope.break_drops, None); match (normal_exit_block, break_block) { (Some(block), None) | (None, Some(block)) => block, (None, None) => self.cfg.start_new_block().unit(), @@ -582,24 +587,22 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { span_bug!(span, "no enclosing breakable scope found") }) }; - let (break_index, destination) = match scope { + let (break_index, destination, dest_scope) = match scope { BreakableTarget::Return => { let scope = &self.scopes.breakable_scopes[0]; if scope.break_destination != Place::return_place() { span_bug!(span, "`return` in item with no return scope"); } - (0, Some(scope.break_destination.clone())) + (0, Some(scope.break_destination.clone()), scope.destination_scope) } BreakableTarget::Break(scope) => { let break_index = get_scope_index(scope); - ( - break_index, - Some(self.scopes.breakable_scopes[break_index].break_destination.clone()), - ) + let scope = &self.scopes.breakable_scopes[break_index]; + (break_index, Some(scope.break_destination.clone()), scope.destination_scope) } BreakableTarget::Continue(scope) => { let break_index = get_scope_index(scope); - (break_index, None) + (break_index, None, None) } }; @@ -607,7 +610,10 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { if let Some(value) = value { debug!("stmt_expr Break val block_context.push(SubExpr)"); self.block_context.push(BlockFrame::SubExpr); - unpack!(block = self.into(destination, block, value)); + unpack!(block = self.into(destination, dest_scope, block, value)); + dest_scope.map(|scope| { + self.unschedule_drop(scope, destination.as_local().unwrap()) + }); self.block_context.pop(); } else { self.cfg.push_assign_unit(block, source_info, destination) @@ -813,6 +819,40 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { span_bug!(span, "region scope {:?} not in scope to drop {:?}", region_scope, local); } + /// Unschedule a drop. Used for `break`, `return` and `match` expressions + /// when `record_operands_moved` is not powerful enough. + /// + /// The given local is expected to have a value drop scheduled in the given + /// scope and for that drop to be the most recent thing scheduled in that + /// scope. + fn unschedule_drop(&mut self, region_scope: region::Scope, local: Local ) { + if !self.hir.needs_drop(self.local_decls[local].ty) { + return; + } + for scope in self.scopes.scopes.iter_mut().rev() { + scope.invalidate_cache(); + + if scope.region_scope == region_scope { + let drop = scope.drops.pop(); + + match drop { + Some(DropData { + local: removed_local, + kind: DropKind::Value, + .. + }) if removed_local == local => return, + _ => bug!( + "found wrong drop, expected value drop of {:?}, found {:?}", + local, + drop, + ), + } + } + } + + bug!("region scope {:?} not in scope to unschedule drop of {:?}", region_scope, local); + } + /// Indicates that the "local operand" stored in `local` is /// *moved* at some point during execution (see `local_scope` for /// more information about what a "local operand" is -- in short, @@ -1043,13 +1083,99 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { success_block } - // `match` arm scopes - // ================== + /// Lower the arms and guards of a match. + /// + /// This is here, and not in `build::matches` because we have to do some + /// careful scope manipulation to have the drop of the destination be + /// scheduled at the end of each arm and then cleared for the next arm. + crate fn build_match_arms( + &mut self, + arm_candidates: Vec<(&Arm<'tcx>, Vec>)>, + destination: &Place<'tcx>, + destination_scope: Option, + fake_borrow_temps: Vec<(PlaceRef<'b, 'tcx>, Local)>, + span: Span, + opt_match_place: Option<(Option<&Place<'tcx>>, Span)>, + ) -> Vec> { + if arm_candidates.is_empty() { + // If there are no arms to schedule drops, then we have to do it + // manually. + if let Some(scope) = destination_scope { + self.schedule_drop( + span, + scope, + destination.as_local().unwrap(), + DropKind::Value, + ); + } + return Vec::new(); + } + let mut first_arm = true; + let cached_unwind_block = self.diverge_cleanup(); + arm_candidates.into_iter().map(|(arm, mut candidates)| { + if first_arm { + first_arm = false; + } else { + destination_scope.map(|scope| { + self.unschedule_drop(scope, destination.as_local().unwrap()); + }); + let top_scope = &mut self.scopes.scopes.last_mut().unwrap(); + top_scope.cached_unwind_block = Some(cached_unwind_block); + } + + let arm_source_info = self.source_info(arm.span); + let arm_scope = (arm.scope, arm_source_info); + self.in_scope(arm_scope, arm.lint_level, |this| { + let body = this.hir.mirror(arm.body.clone()); + let scope = this.declare_bindings( + None, + arm.span, + &arm.top_pats_hack()[0], + matches::ArmHasGuard(arm.guard.is_some()), + opt_match_place, + ); + + let arm_block; + if candidates.len() == 1 { + arm_block = this.bind_and_guard_matched_candidate( + candidates.pop().unwrap(), + arm.guard.clone(), + &fake_borrow_temps, + span, + ); + } else { + arm_block = this.cfg.start_new_block(); + for candidate in candidates { + this.clear_top_scope(arm.scope); + let binding_end = this.bind_and_guard_matched_candidate( + candidate, + arm.guard.clone(), + &fake_borrow_temps, + span, + ); + this.cfg.terminate( + binding_end, + this.source_info(span), + TerminatorKind::Goto { target: arm_block }, + ); + } + } + + if let Some(source_scope) = scope { + this.source_scope = source_scope; + } + + this.into(destination, destination_scope, arm_block, body) + }) + }).collect() + } + + /// Unschedules any drops in the top scope. /// /// This is only needed for `match` arm scopes, because they have one /// entrance per pattern, but only one exit. - crate fn clear_top_scope(&mut self, region_scope: region::Scope) { + fn clear_top_scope(&mut self, region_scope: region::Scope) { let top_scope = self.scopes.scopes.last_mut().unwrap(); assert_eq!(top_scope.region_scope, region_scope); @@ -1057,6 +1183,18 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { top_scope.drops.clear(); top_scope.invalidate_cache(); } + + /// Unschedules the drop of the return place. + /// + /// If the return type of a function requires drop, then we schedule it + /// in the outermost scope so that it's dropped if there's a panic while + /// we drop any local variables. But we don't want to drop it if we + /// return normally. + crate fn unschedule_return_place_drop(&mut self) { + assert_eq!(self.scopes.scopes.len(), 1); + assert!(self.scopes.scopes[0].drops.len() <= 1); + self.scopes.scopes[0].drops.clear(); + } } /// Builds drops for pop_scope and leave_top_scope. @@ -1139,14 +1277,17 @@ impl<'a, 'tcx: 'a> Builder<'a, 'tcx> { fn build_exit_tree( &mut self, mut drops: DropTree, - continue_block: Option, + continue_block: Option<(BasicBlock, DropIdx)>, ) -> Option> { let mut blocks = IndexVec::from_elem(None, &drops.drops); - blocks[ROOT_NODE] = continue_block; + blocks[ROOT_NODE] = continue_block.map(|(block, _)| block); drops.build_mir::(&mut self.cfg, &mut blocks); if drops.drops.iter().any(|(drop, _)| drop.kind == DropKind::Value) { - let unwind_target = self.diverge_cleanup(); + let unwind_target = continue_block.map_or_else( + || self.diverge_cleanup(), + |(_, unwind_target)| unwind_target, + ); let mut unwind_indices = IndexVec::from_elem_n(unwind_target, 1); for (drop_idx, drop_data) in drops.drops.iter_enumerated().skip(1) { match drop_data.0.kind { diff --git a/src/test/mir-opt/box_expr.rs b/src/test/mir-opt/box_expr.rs index c13f249d83f3c..e853f6193f416 100644 --- a/src/test/mir-opt/box_expr.rs +++ b/src/test/mir-opt/box_expr.rs @@ -36,7 +36,7 @@ impl Drop for S { // } // bb1: { // _1 = move _2; -// drop(_2) -> bb2; +// drop(_2) -> [return: bb2, unwind: bb6]; // } // bb2: { // StorageDead(_2); diff --git a/src/test/mir-opt/issue-62289.rs b/src/test/mir-opt/issue-62289.rs index b2b1a71e10291..aa8d57b701997 100644 --- a/src/test/mir-opt/issue-62289.rs +++ b/src/test/mir-opt/issue-62289.rs @@ -24,7 +24,7 @@ fn main() { // StorageLive(_3); // StorageLive(_4); // _4 = std::option::Option::::None; -// _3 = const as std::ops::Try>::into_result(move _4) -> [return: bb1, unwind: bb12]; +// _3 = const as std::ops::Try>::into_result(move _4) -> [return: bb1, unwind: bb13]; // } // bb1: { // StorageDead(_4); @@ -40,16 +40,16 @@ fn main() { // StorageLive(_8); // StorageLive(_9); // _9 = _6; -// _8 = const >::from(move _9) -> [return: bb4, unwind: bb12]; +// _8 = const >::from(move _9) -> [return: bb4, unwind: bb13]; // } // bb4: { // StorageDead(_9); -// _0 = const > as std::ops::Try>::from_error(move _8) -> [return: bb5, unwind: bb12]; +// _0 = const > as std::ops::Try>::from_error(move _8) -> [return: bb5, unwind: bb13]; // } // bb5: { // StorageDead(_8); // StorageDead(_6); -// drop(_2) -> bb9; +// drop(_2) -> [return: bb9, unwind: bb11]; // } // bb6: { // StorageLive(_10); @@ -57,12 +57,12 @@ fn main() { // (*_2) = _10; // StorageDead(_10); // _1 = move _2; -// drop(_2) -> [return: bb7, unwind: bb11]; +// drop(_2) -> [return: bb7, unwind: bb12]; // } // bb7: { // StorageDead(_2); // _0 = std::option::Option::>::Some(move _1,); -// drop(_1) -> bb8; +// drop(_1) -> [return: bb8, unwind: bb11]; // } // bb8: { // StorageDead(_1); @@ -79,12 +79,15 @@ fn main() { // return; // } // bb11 (cleanup): { -// drop(_1) -> bb13; +// drop(_0) -> bb14; // } // bb12 (cleanup): { -// drop(_2) -> bb13; +// drop(_1) -> bb14; // } // bb13 (cleanup): { +// drop(_2) -> bb14; +// } +// bb14 (cleanup): { // resume; // } // } diff --git a/src/test/ui/drop/dynamic-drop-async.rs b/src/test/ui/drop/dynamic-drop-async.rs index 91063edf0f6c4..398bcb7ec0e82 100644 --- a/src/test/ui/drop/dynamic-drop-async.rs +++ b/src/test/ui/drop/dynamic-drop-async.rs @@ -7,7 +7,7 @@ // edition:2018 // ignore-wasm32-bare compiled with panic=abort by default -#![feature(slice_patterns)] +#![feature(slice_patterns, arbitrary_self_types)] #![allow(unused)] use std::{ @@ -45,6 +45,7 @@ impl Future for Defer { /// The `failing_op`-th operation will panic. struct Allocator { data: RefCell>, + name: &'static str, failing_op: usize, cur_ops: Cell, } @@ -56,23 +57,28 @@ impl Drop for Allocator { fn drop(&mut self) { let data = self.data.borrow(); if data.iter().any(|d| *d) { - panic!("missing free: {:?}", data); + panic!("missing free in {:?}: {:?}", self.name, data); } } } impl Allocator { - fn new(failing_op: usize) -> Self { - Allocator { failing_op, cur_ops: Cell::new(0), data: RefCell::new(vec![]) } + fn new(failing_op: usize, name: &'static str) -> Self { + Allocator { + failing_op, + name, + cur_ops: Cell::new(0), + data: RefCell::new(vec![]), + } } - fn alloc(&self) -> impl Future> + '_ { + fn alloc(self: &Rc) -> impl Future + 'static { self.fallible_operation(); let mut data = self.data.borrow_mut(); let addr = data.len(); data.push(true); - Defer { ready: false, value: Some(Ptr(addr, self)) } + Defer { ready: false, value: Some(Ptr(addr, self.clone())) } } fn fallible_operation(&self) { self.cur_ops.set(self.cur_ops.get() + 1); @@ -85,11 +91,11 @@ impl Allocator { // Type that tracks whether it was dropped and can panic when it's created or // destroyed. -struct Ptr<'a>(usize, &'a Allocator); -impl<'a> Drop for Ptr<'a> { +struct Ptr(usize, Rc); +impl Drop for Ptr { fn drop(&mut self) { match self.1.data.borrow_mut()[self.0] { - false => panic!("double free at index {:?}", self.0), + false => panic!("double free in {:?} at index {:?}", self.1.name, self.0), ref mut d => *d = false, } @@ -113,7 +119,7 @@ async fn dynamic_drop(a: Rc, c: bool) { }; } -struct TwoPtrs<'a>(Ptr<'a>, Ptr<'a>); +struct TwoPtrs(Ptr, Ptr); async fn struct_dynamic_drop(a: Rc, c0: bool, c1: bool, c: bool) { for i in 0..2 { let x; @@ -228,21 +234,62 @@ async fn subslice_pattern_reassign(a: Rc) { a.alloc().await; } -fn run_test(cx: &mut Context<'_>, ref f: F) +async fn panic_after_return(a: Rc, c: bool) -> (Ptr,) { + a.alloc().await; + let p = a.alloc().await; + if c { + a.alloc().await; + let q = a.alloc().await; + // We use a return type that isn't used anywhere else to make sure that + // the return place doesn't incorrectly end up in the generator state. + return (a.alloc().await,); + } + (a.alloc().await,) +} + + +async fn panic_after_init_by_loop(a: Rc) { + a.alloc().await; + let p = a.alloc().await; + let q = loop { + a.alloc().await; + let r = a.alloc().await; + break a.alloc().await; + }; +} + +async fn panic_after_init_by_match_with_bindings_and_guard(a: Rc, b: bool) { + a.alloc().await; + let p = a.alloc().await; + let q = match a.alloc().await { + ref _x if b => { + a.alloc().await; + let r = a.alloc().await; + a.alloc().await + } + _x => { + a.alloc().await; + let r = a.alloc().await; + a.alloc().await + }, + }; +} + +fn run_test(cx: &mut Context<'_>, ref f: F, name: &'static str) where F: Fn(Rc) -> G, - G: Future, + G: Future, { for polls in 0.. { // Run without any panics to find which operations happen after the // penultimate `poll`. - let first_alloc = Rc::new(Allocator::new(usize::MAX)); + let first_alloc = Rc::new(Allocator::new(usize::MAX, name)); let mut fut = Box::pin(f(first_alloc.clone())); let mut ops_before_last_poll = 0; let mut completed = false; for _ in 0..polls { ops_before_last_poll = first_alloc.cur_ops.get(); - if let Poll::Ready(()) = fut.as_mut().poll(cx) { + if let Poll::Ready(_) = fut.as_mut().poll(cx) { completed = true; } } @@ -251,7 +298,7 @@ where // Start at `ops_before_last_poll` so that we will always be able to // `poll` the expected number of times. for failing_op in ops_before_last_poll..first_alloc.cur_ops.get() { - let alloc = Rc::new(Allocator::new(failing_op + 1)); + let alloc = Rc::new(Allocator::new(failing_op + 1, name)); let f = &f; let cx = &mut *cx; let result = panic::catch_unwind(panic::AssertUnwindSafe(move || { @@ -281,46 +328,56 @@ fn clone_waker(data: *const ()) -> RawWaker { RawWaker::new(data, &RawWakerVTable::new(clone_waker, drop, drop, drop)) } +macro_rules! run_test { + ($ctxt:expr, $e:expr) => { run_test($ctxt, $e, stringify!($e)); }; +} + fn main() { let waker = unsafe { Waker::from_raw(clone_waker(ptr::null())) }; let context = &mut Context::from_waker(&waker); - run_test(context, |a| dynamic_init(a, false)); - run_test(context, |a| dynamic_init(a, true)); - run_test(context, |a| dynamic_drop(a, false)); - run_test(context, |a| dynamic_drop(a, true)); - - run_test(context, |a| assignment(a, false, false)); - run_test(context, |a| assignment(a, false, true)); - run_test(context, |a| assignment(a, true, false)); - run_test(context, |a| assignment(a, true, true)); - - run_test(context, |a| array_simple(a)); - run_test(context, |a| vec_simple(a)); - run_test(context, |a| vec_unreachable(a)); - - run_test(context, |a| struct_dynamic_drop(a, false, false, false)); - run_test(context, |a| struct_dynamic_drop(a, false, false, true)); - run_test(context, |a| struct_dynamic_drop(a, false, true, false)); - run_test(context, |a| struct_dynamic_drop(a, false, true, true)); - run_test(context, |a| struct_dynamic_drop(a, true, false, false)); - run_test(context, |a| struct_dynamic_drop(a, true, false, true)); - run_test(context, |a| struct_dynamic_drop(a, true, true, false)); - run_test(context, |a| struct_dynamic_drop(a, true, true, true)); - - run_test(context, |a| field_assignment(a, false)); - run_test(context, |a| field_assignment(a, true)); - - run_test(context, |a| mixed_drop_and_nondrop(a)); - - run_test(context, |a| slice_pattern_one_of(a, 0)); - run_test(context, |a| slice_pattern_one_of(a, 1)); - run_test(context, |a| slice_pattern_one_of(a, 2)); - run_test(context, |a| slice_pattern_one_of(a, 3)); - - run_test(context, |a| subslice_pattern_from_end_with_drop(a, true, true)); - run_test(context, |a| subslice_pattern_from_end_with_drop(a, true, false)); - run_test(context, |a| subslice_pattern_from_end_with_drop(a, false, true)); - run_test(context, |a| subslice_pattern_from_end_with_drop(a, false, false)); - run_test(context, |a| subslice_pattern_reassign(a)); + run_test!(context, |a| dynamic_init(a, false)); + run_test!(context, |a| dynamic_init(a, true)); + run_test!(context, |a| dynamic_drop(a, false)); + run_test!(context, |a| dynamic_drop(a, true)); + + run_test!(context, |a| assignment(a, false, false)); + run_test!(context, |a| assignment(a, false, true)); + run_test!(context, |a| assignment(a, true, false)); + run_test!(context, |a| assignment(a, true, true)); + + run_test!(context, |a| array_simple(a)); + run_test!(context, |a| vec_simple(a)); + run_test!(context, |a| vec_unreachable(a)); + + run_test!(context, |a| struct_dynamic_drop(a, false, false, false)); + run_test!(context, |a| struct_dynamic_drop(a, false, false, true)); + run_test!(context, |a| struct_dynamic_drop(a, false, true, false)); + run_test!(context, |a| struct_dynamic_drop(a, false, true, true)); + run_test!(context, |a| struct_dynamic_drop(a, true, false, false)); + run_test!(context, |a| struct_dynamic_drop(a, true, false, true)); + run_test!(context, |a| struct_dynamic_drop(a, true, true, false)); + run_test!(context, |a| struct_dynamic_drop(a, true, true, true)); + + run_test!(context, |a| field_assignment(a, false)); + run_test!(context, |a| field_assignment(a, true)); + + run_test!(context, |a| mixed_drop_and_nondrop(a)); + + run_test!(context, |a| slice_pattern_one_of(a, 0)); + run_test!(context, |a| slice_pattern_one_of(a, 1)); + run_test!(context, |a| slice_pattern_one_of(a, 2)); + run_test!(context, |a| slice_pattern_one_of(a, 3)); + + run_test!(context, |a| subslice_pattern_from_end_with_drop(a, true, true)); + run_test!(context, |a| subslice_pattern_from_end_with_drop(a, true, false)); + run_test!(context, |a| subslice_pattern_from_end_with_drop(a, false, true)); + run_test!(context, |a| subslice_pattern_from_end_with_drop(a, false, false)); + run_test!(context, |a| subslice_pattern_reassign(a)); + + run_test!(context, |a| panic_after_return(a, false)); + run_test!(context, |a| panic_after_return(a, true)); + run_test!(context, |a| panic_after_init_by_loop(a)); + run_test!(context, |a| panic_after_init_by_match_with_bindings_and_guard(a, false)); + run_test!(context, |a| panic_after_init_by_match_with_bindings_and_guard(a, true)); } diff --git a/src/test/ui/drop/dynamic-drop.rs b/src/test/ui/drop/dynamic-drop.rs index 29dcbfe9609a0..c2e9a09cfd47d 100644 --- a/src/test/ui/drop/dynamic-drop.rs +++ b/src/test/ui/drop/dynamic-drop.rs @@ -18,6 +18,7 @@ struct InjectedFailure; struct Allocator { data: RefCell>, + name: &'static str, failing_op: usize, cur_ops: Cell, } @@ -29,17 +30,18 @@ impl Drop for Allocator { fn drop(&mut self) { let data = self.data.borrow(); if data.iter().any(|d| *d) { - panic!("missing free: {:?}", data); + panic!("missing free in {:?}: {:?}", self.name, data); } } } impl Allocator { - fn new(failing_op: usize) -> Self { + fn new(failing_op: usize, name: &'static str) -> Self { Allocator { failing_op: failing_op, cur_ops: Cell::new(0), - data: RefCell::new(vec![]) + data: RefCell::new(vec![]), + name, } } fn alloc(&self) -> Ptr<'_> { @@ -54,20 +56,6 @@ impl Allocator { data.push(true); Ptr(addr, self) } - // FIXME(#47949) Any use of this indicates a bug in rustc: we should never - // be leaking values in the cases here. - // - // Creates a `Ptr<'_>` and checks that the allocated value is leaked if the - // `failing_op` is in the list of exception. - fn alloc_leaked(&self, exceptions: Vec) -> Ptr<'_> { - let ptr = self.alloc(); - - if exceptions.iter().any(|operation| *operation == self.failing_op) { - let mut data = self.data.borrow_mut(); - data[ptr.0] = false; - } - ptr - } } struct Ptr<'a>(usize, &'a Allocator); @@ -75,7 +63,7 @@ impl<'a> Drop for Ptr<'a> { fn drop(&mut self) { match self.1.data.borrow_mut()[self.0] { false => { - panic!("double free at index {:?}", self.0) + panic!("double free in {:?} at index {:?}", self.1.name, self.0) } ref mut d => *d = false } @@ -270,79 +258,148 @@ fn subslice_pattern_reassign(a: &Allocator) { } fn panic_after_return(a: &Allocator) -> Ptr<'_> { - // Panic in the drop of `p` or `q` can leak - let exceptions = vec![8, 9]; a.alloc(); let p = a.alloc(); { a.alloc(); let p = a.alloc(); - // FIXME (#47949) We leak values when we panic in a destructor after - // evaluating an expression with `rustc_mir::build::Builder::into`. - a.alloc_leaked(exceptions) + a.alloc() } } fn panic_after_return_expr(a: &Allocator) -> Ptr<'_> { - // Panic in the drop of `p` or `q` can leak - let exceptions = vec![8, 9]; a.alloc(); let p = a.alloc(); { a.alloc(); let q = a.alloc(); - // FIXME (#47949) - return a.alloc_leaked(exceptions); + return a.alloc(); } } fn panic_after_init(a: &Allocator) { - // Panic in the drop of `r` can leak - let exceptions = vec![8]; a.alloc(); let p = a.alloc(); let q = { a.alloc(); let r = a.alloc(); - // FIXME (#47949) - a.alloc_leaked(exceptions) + a.alloc() }; } fn panic_after_init_temp(a: &Allocator) { - // Panic in the drop of `r` can leak - let exceptions = vec![8]; a.alloc(); let p = a.alloc(); { a.alloc(); let r = a.alloc(); - // FIXME (#47949) - a.alloc_leaked(exceptions) + a.alloc() }; } fn panic_after_init_by_loop(a: &Allocator) { - // Panic in the drop of `r` can leak - let exceptions = vec![8]; a.alloc(); let p = a.alloc(); let q = loop { a.alloc(); let r = a.alloc(); - // FIXME (#47949) - break a.alloc_leaked(exceptions); + break a.alloc(); + }; +} + +fn panic_after_init_by_match(a: &Allocator, b: bool) { + a.alloc(); + let p = a.alloc(); + loop { + let q = match b { + true => { + a.alloc(); + let r = a.alloc(); + a.alloc() + } + false => { + a.alloc(); + let r = a.alloc(); + break a.alloc(); + } + }; + return; + }; +} + +fn panic_after_init_by_match_with_guard(a: &Allocator, b: bool) { + a.alloc(); + let p = a.alloc(); + let q = match a.alloc() { + _ if b => { + a.alloc(); + let r = a.alloc(); + a.alloc() + } + _ => { + a.alloc(); + let r = a.alloc(); + a.alloc() + }, + }; +} + +fn panic_after_init_by_match_with_bindings_and_guard(a: &Allocator, b: bool) { + a.alloc(); + let p = a.alloc(); + let q = match a.alloc() { + _x if b => { + a.alloc(); + let r = a.alloc(); + a.alloc() + } + _x => { + a.alloc(); + let r = a.alloc(); + a.alloc() + }, + }; +} + +fn panic_after_init_by_match_with_ref_bindings_and_guard(a: &Allocator, b: bool) { + a.alloc(); + let p = a.alloc(); + let q = match a.alloc() { + ref _x if b => { + a.alloc(); + let r = a.alloc(); + a.alloc() + } + ref _x => { + a.alloc(); + let r = a.alloc(); + a.alloc() + }, + }; +} + +fn panic_after_init_by_break_if(a: &Allocator, b: bool) { + a.alloc(); + let p = a.alloc(); + let q = loop { + let r = a.alloc(); + break if b { + let s = a.alloc(); + a.alloc() + } else { + a.alloc() + }; }; } -fn run_test(mut f: F) +fn run_test(mut f: F, name: &'static str) where F: FnMut(&Allocator) { - let first_alloc = Allocator::new(usize::MAX); + let first_alloc = Allocator::new(usize::MAX, name); f(&first_alloc); for failing_op in 1..first_alloc.cur_ops.get()+1 { - let alloc = Allocator::new(failing_op); + let alloc = Allocator::new(failing_op, name); let alloc = &alloc; let f = panic::AssertUnwindSafe(&mut f); let result = panic::catch_unwind(move || { @@ -360,77 +417,91 @@ fn run_test(mut f: F) } } -fn run_test_nopanic(mut f: F) +fn run_test_nopanic(mut f: F, name: &'static str) where F: FnMut(&Allocator) { - let first_alloc = Allocator::new(usize::MAX); + let first_alloc = Allocator::new(usize::MAX, name); f(&first_alloc); } +macro_rules! run_test { + ($e:expr) => { run_test($e, stringify!($e)); } +} + fn main() { - run_test(|a| dynamic_init(a, false)); - run_test(|a| dynamic_init(a, true)); - run_test(|a| dynamic_drop(a, false)); - run_test(|a| dynamic_drop(a, true)); - - run_test(|a| assignment2(a, false, false)); - run_test(|a| assignment2(a, false, true)); - run_test(|a| assignment2(a, true, false)); - run_test(|a| assignment2(a, true, true)); - - run_test(|a| assignment1(a, false)); - run_test(|a| assignment1(a, true)); - - run_test(|a| array_simple(a)); - run_test(|a| vec_simple(a)); - run_test(|a| vec_unreachable(a)); - - run_test(|a| struct_dynamic_drop(a, false, false, false)); - run_test(|a| struct_dynamic_drop(a, false, false, true)); - run_test(|a| struct_dynamic_drop(a, false, true, false)); - run_test(|a| struct_dynamic_drop(a, false, true, true)); - run_test(|a| struct_dynamic_drop(a, true, false, false)); - run_test(|a| struct_dynamic_drop(a, true, false, true)); - run_test(|a| struct_dynamic_drop(a, true, true, false)); - run_test(|a| struct_dynamic_drop(a, true, true, true)); - - run_test(|a| field_assignment(a, false)); - run_test(|a| field_assignment(a, true)); - - run_test(|a| generator(a, 0)); - run_test(|a| generator(a, 1)); - run_test(|a| generator(a, 2)); - run_test(|a| generator(a, 3)); - - run_test(|a| mixed_drop_and_nondrop(a)); - - run_test(|a| slice_pattern_first(a)); - run_test(|a| slice_pattern_middle(a)); - run_test(|a| slice_pattern_two(a)); - run_test(|a| slice_pattern_last(a)); - run_test(|a| slice_pattern_one_of(a, 0)); - run_test(|a| slice_pattern_one_of(a, 1)); - run_test(|a| slice_pattern_one_of(a, 2)); - run_test(|a| slice_pattern_one_of(a, 3)); - - run_test(|a| subslice_pattern_from_end(a, true)); - run_test(|a| subslice_pattern_from_end(a, false)); - run_test(|a| subslice_pattern_from_end_with_drop(a, true, true)); - run_test(|a| subslice_pattern_from_end_with_drop(a, true, false)); - run_test(|a| subslice_pattern_from_end_with_drop(a, false, true)); - run_test(|a| subslice_pattern_from_end_with_drop(a, false, false)); - run_test(|a| slice_pattern_reassign(a)); - run_test(|a| subslice_pattern_reassign(a)); - - run_test(|a| { + run_test!(|a| dynamic_init(a, false)); + run_test!(|a| dynamic_init(a, true)); + run_test!(|a| dynamic_drop(a, false)); + run_test!(|a| dynamic_drop(a, true)); + + run_test!(|a| assignment2(a, false, false)); + run_test!(|a| assignment2(a, false, true)); + run_test!(|a| assignment2(a, true, false)); + run_test!(|a| assignment2(a, true, true)); + + run_test!(|a| assignment1(a, false)); + run_test!(|a| assignment1(a, true)); + + run_test!(|a| array_simple(a)); + run_test!(|a| vec_simple(a)); + run_test!(|a| vec_unreachable(a)); + + run_test!(|a| struct_dynamic_drop(a, false, false, false)); + run_test!(|a| struct_dynamic_drop(a, false, false, true)); + run_test!(|a| struct_dynamic_drop(a, false, true, false)); + run_test!(|a| struct_dynamic_drop(a, false, true, true)); + run_test!(|a| struct_dynamic_drop(a, true, false, false)); + run_test!(|a| struct_dynamic_drop(a, true, false, true)); + run_test!(|a| struct_dynamic_drop(a, true, true, false)); + run_test!(|a| struct_dynamic_drop(a, true, true, true)); + + run_test!(|a| field_assignment(a, false)); + run_test!(|a| field_assignment(a, true)); + + run_test!(|a| generator(a, 0)); + run_test!(|a| generator(a, 1)); + run_test!(|a| generator(a, 2)); + run_test!(|a| generator(a, 3)); + + run_test!(|a| mixed_drop_and_nondrop(a)); + + run_test!(|a| slice_pattern_first(a)); + run_test!(|a| slice_pattern_middle(a)); + run_test!(|a| slice_pattern_two(a)); + run_test!(|a| slice_pattern_last(a)); + run_test!(|a| slice_pattern_one_of(a, 0)); + run_test!(|a| slice_pattern_one_of(a, 1)); + run_test!(|a| slice_pattern_one_of(a, 2)); + run_test!(|a| slice_pattern_one_of(a, 3)); + + run_test!(|a| subslice_pattern_from_end(a, true)); + run_test!(|a| subslice_pattern_from_end(a, false)); + run_test!(|a| subslice_pattern_from_end_with_drop(a, true, true)); + run_test!(|a| subslice_pattern_from_end_with_drop(a, true, false)); + run_test!(|a| subslice_pattern_from_end_with_drop(a, false, true)); + run_test!(|a| subslice_pattern_from_end_with_drop(a, false, false)); + run_test!(|a| slice_pattern_reassign(a)); + run_test!(|a| subslice_pattern_reassign(a)); + + run_test!(|a| { panic_after_return(a); }); - run_test(|a| { + run_test!(|a| { panic_after_return_expr(a); }); - run_test(|a| panic_after_init(a)); - run_test(|a| panic_after_init_temp(a)); - run_test(|a| panic_after_init_by_loop(a)); - - run_test_nopanic(|a| union1(a)); + run_test!(|a| panic_after_init(a)); + run_test!(|a| panic_after_init_temp(a)); + run_test!(|a| panic_after_init_by_loop(a)); + run_test!(|a| panic_after_init_by_match(a, false)); + run_test!(|a| panic_after_init_by_match(a, true)); + run_test!(|a| panic_after_init_by_match_with_guard(a, false)); + run_test!(|a| panic_after_init_by_match_with_guard(a, true)); + run_test!(|a| panic_after_init_by_match_with_bindings_and_guard(a, false)); + run_test!(|a| panic_after_init_by_match_with_bindings_and_guard(a, true)); + run_test!(|a| panic_after_init_by_match_with_ref_bindings_and_guard(a, false)); + run_test!(|a| panic_after_init_by_match_with_ref_bindings_and_guard(a, true)); + run_test!(|a| panic_after_init_by_break_if(a, false)); + run_test!(|a| panic_after_init_by_break_if(a, true)); + + run_test_nopanic(|a| union1(a), "|a| union1(a)"); } From 4d62cc623de74426d70aea544273b4eb52f83c0e Mon Sep 17 00:00:00 2001 From: Matthew Jasper Date: Sun, 24 Nov 2019 12:27:43 +0000 Subject: [PATCH 17/19] Temp: update new test --- src/test/mir-opt/simplify_try.rs | 34 ++++++++++++++++---------------- 1 file changed, 17 insertions(+), 17 deletions(-) diff --git a/src/test/mir-opt/simplify_try.rs b/src/test/mir-opt/simplify_try.rs index 7911fbd0a984d..432293814059a 100644 --- a/src/test/mir-opt/simplify_try.rs +++ b/src/test/mir-opt/simplify_try.rs @@ -40,7 +40,7 @@ fn main() { // } // bb0: { // _5 = discriminant(_1); -// switchInt(move _5) -> [0isize: bb4, 1isize: bb2, otherwise: bb1]; +// switchInt(move _5) -> [0isize: bb3, 1isize: bb2, otherwise: bb1]; // } // bb1: { // unreachable; @@ -49,16 +49,16 @@ fn main() { // _6 = ((_1 as Err).0: i32); // ((_0 as Err).0: i32) = move _6; // discriminant(_0) = 1; -// goto -> bb3; +// goto -> bb4; // } // bb3: { -// return; -// } -// bb4: { // _10 = ((_1 as Ok).0: u32); // ((_0 as Ok).0: u32) = move _10; // discriminant(_0) = 0; -// goto -> bb3; +// goto -> bb4; +// } +// bb4: { +// return; // } // } // END rustc.try_identity.SimplifyArmIdentity.before.mir @@ -95,7 +95,7 @@ fn main() { // } // bb0: { // _5 = discriminant(_1); -// switchInt(move _5) -> [0isize: bb4, 1isize: bb2, otherwise: bb1]; +// switchInt(move _5) -> [0isize: bb3, 1isize: bb2, otherwise: bb1]; // } // bb1: { // unreachable; @@ -104,16 +104,16 @@ fn main() { // _0 = move _1; // nop; // nop; -// goto -> bb3; +// goto -> bb4; // } // bb3: { -// return; -// } -// bb4: { // _0 = move _1; // nop; // nop; -// goto -> bb3; +// goto -> bb4; +// } +// bb4: { +// return; // } // } // END rustc.try_identity.SimplifyArmIdentity.after.mir @@ -150,16 +150,16 @@ fn main() { // } // bb0: { // _5 = discriminant(_1); -// goto -> bb2; +// goto -> bb1; // } // bb1: { -// return; -// } -// bb2: { // _0 = move _1; // nop; // nop; -// goto -> bb1; +// goto -> bb2; +// } +// bb2: { +// return; // } // } // END rustc.try_identity.SimplifyBranchSame.after.mir From 15beeeb01812a57addf10afe7b3d85a98843954b Mon Sep 17 00:00:00 2001 From: Matthew Jasper Date: Sun, 24 Nov 2019 12:28:39 +0000 Subject: [PATCH 18/19] Temp: remove unnecessary drop flags after monomorphization --- src/librustc/mir/mod.rs | 32 +++++++++++++++---- src/librustc/mir/visit.rs | 10 +++++- src/librustc_codegen_ssa/mir/block.rs | 24 ++++++++++++-- src/librustc_mir/borrow_check/mod.rs | 1 + .../borrow_check/nll/invalidation.rs | 1 + src/librustc_mir/build/scope.rs | 2 ++ src/librustc_mir/dataflow/generic.rs | 4 +-- src/librustc_mir/dataflow/mod.rs | 4 +-- .../dataflow/move_paths/builder.rs | 2 +- src/librustc_mir/interpret/terminator.rs | 25 ++++++++++----- src/librustc_mir/shim.rs | 6 +++- .../transform/add_moves_for_packed_drops.rs | 7 ++-- src/librustc_mir/transform/elaborate_drops.rs | 11 +++++-- src/librustc_mir/transform/generator.rs | 2 ++ src/librustc_mir/transform/inline.rs | 2 +- src/librustc_mir/util/elaborate_drops.rs | 22 +++++++------ 16 files changed, 116 insertions(+), 39 deletions(-) diff --git a/src/librustc/mir/mod.rs b/src/librustc/mir/mod.rs index bd793fd07bf22..9cf1572e0cbdc 100644 --- a/src/librustc/mir/mod.rs +++ b/src/librustc/mir/mod.rs @@ -1088,8 +1088,18 @@ pub enum TerminatorKind<'tcx> { /// Indicates a terminator that can never be reached. Unreachable, - /// Drop the `Place`. - Drop { location: Place<'tcx>, target: BasicBlock, unwind: Option }, + /// Drop the `Place`, possibly conditioned on a flag being true. + Drop { + location: Place<'tcx>, + /// Whether to drop the value. + /// + /// Before drop elaboration this is always `None. After drop elaboration + /// If this is `None` then the drop is unconditional, otherwise the drop + /// is only evaluated when the flag is true. + flag: Option>, + target: BasicBlock, + unwind: Option, + }, /// Drop the `Place` and assign the new value over it. This ensures /// that the assignment to `P` occurs *even if* the destructor for @@ -1464,7 +1474,10 @@ impl<'tcx> TerminatorKind<'tcx> { Abort => write!(fmt, "abort"), Yield { ref value, .. } => write!(fmt, "_1 = suspend({:?})", value), Unreachable => write!(fmt, "unreachable"), - Drop { ref location, .. } => write!(fmt, "drop({:?})", location), + Drop { ref location, flag: Some(ref flag), .. } => { + write!(fmt, "if {:?} drop({:?})", flag, location) + } + Drop { ref location, flag: None, .. } => write!(fmt, "drop({:?})", location), DropAndReplace { ref location, ref value, .. } => { write!(fmt, "replace({:?} <- {:?})", location, value) } @@ -2967,8 +2980,13 @@ impl<'tcx> TypeFoldable<'tcx> for Terminator<'tcx> { values: values.clone(), targets: targets.clone(), }, - Drop { ref location, target, unwind } => { - Drop { location: location.fold_with(folder), target, unwind } + Drop { ref location, ref flag, target, unwind } => { + Drop { + location: location.fold_with(folder), + flag: flag.fold_with(folder), + target, + unwind, + } } DropAndReplace { ref location, ref value, target, unwind } => DropAndReplace { location: location.fold_with(folder), @@ -3025,7 +3043,9 @@ impl<'tcx> TypeFoldable<'tcx> for Terminator<'tcx> { SwitchInt { ref discr, switch_ty, .. } => { discr.visit_with(visitor) || switch_ty.visit_with(visitor) } - Drop { ref location, .. } => location.visit_with(visitor), + Drop { ref location, ref flag, .. } => { + location.visit_with(visitor) || flag.visit_with(visitor) + }, DropAndReplace { ref location, ref value, .. } => { location.visit_with(visitor) || value.visit_with(visitor) } diff --git a/src/librustc/mir/visit.rs b/src/librustc/mir/visit.rs index fc0e77aab43a4..5648d55192750 100644 --- a/src/librustc/mir/visit.rs +++ b/src/librustc/mir/visit.rs @@ -432,14 +432,22 @@ macro_rules! make_mir_visitor { TerminatorKind::Drop { location, + flag, target: _, unwind: _, } => { self.visit_place( location, PlaceContext::MutatingUse(MutatingUseContext::Drop), - source_location + source_location, ); + if let Some(flag) = flag { + self.visit_place( + flag, + PlaceContext::NonMutatingUse(NonMutatingUseContext::Inspect), + source_location, + ); + } } TerminatorKind::DropAndReplace { diff --git a/src/librustc_codegen_ssa/mir/block.rs b/src/librustc_codegen_ssa/mir/block.rs index 14be0e80fb482..b1f19a4d1f1e4 100644 --- a/src/librustc_codegen_ssa/mir/block.rs +++ b/src/librustc_codegen_ssa/mir/block.rs @@ -321,8 +321,10 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { helper: TerminatorCodegenHelper<'b, 'tcx>, mut bx: Bx, location: &mir::Place<'tcx>, + flag: &Option>, target: mir::BasicBlock, unwind: Option, + source_info: mir::SourceInfo, ) { let ty = location.ty(self.mir, bx.tcx()).ty; let ty = self.monomorphize(&ty); @@ -335,6 +337,16 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { return } + if let Some(flag) = flag { + let flag = self.codegen_consume(&mut bx, &flag.as_ref()).immediate(); + let lltarget = helper.llblock(self, target); + let drop_block = self.new_block("drop"); + helper.maybe_sideeffect(self.mir, &mut bx, &[target]); + bx.cond_br(flag, drop_block.llbb(), lltarget); + bx = drop_block; + self.set_debug_loc(&mut bx, source_info); + } + let place = self.codegen_place(&mut bx, &location.as_ref()); let (args1, args2); let mut args = if let Some(llextra) = place.llextra { @@ -854,8 +866,16 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { bx.unreachable(); } - mir::TerminatorKind::Drop { ref location, target, unwind } => { - self.codegen_drop_terminator(helper, bx, location, target, unwind); + mir::TerminatorKind::Drop { ref location, ref flag, target, unwind } => { + self.codegen_drop_terminator( + helper, + bx, + location, + flag, + target, + unwind, + terminator.source_info, + ); } mir::TerminatorKind::Assert { ref cond, expected, ref msg, target, cleanup } => { diff --git a/src/librustc_mir/borrow_check/mod.rs b/src/librustc_mir/borrow_check/mod.rs index 90e39286ec84d..63ef796e720d5 100644 --- a/src/librustc_mir/borrow_check/mod.rs +++ b/src/librustc_mir/borrow_check/mod.rs @@ -618,6 +618,7 @@ impl<'cx, 'tcx> DataflowResultsConsumer<'cx, 'tcx> for MirBorrowckCtxt<'cx, 'tcx } TerminatorKind::Drop { location: ref drop_place, + flag: _, target: _, unwind: _, } => { diff --git a/src/librustc_mir/borrow_check/nll/invalidation.rs b/src/librustc_mir/borrow_check/nll/invalidation.rs index 1d429e3a6dee6..af3390475868a 100644 --- a/src/librustc_mir/borrow_check/nll/invalidation.rs +++ b/src/librustc_mir/borrow_check/nll/invalidation.rs @@ -155,6 +155,7 @@ impl<'cx, 'tcx> Visitor<'tcx> for InvalidationGenerator<'cx, 'tcx> { } TerminatorKind::Drop { location: ref drop_place, + flag: _, target: _, unwind: _, } => { diff --git a/src/librustc_mir/build/scope.rs b/src/librustc_mir/build/scope.rs index 0abe664d30ce3..b7ede5d1464b2 100644 --- a/src/librustc_mir/build/scope.rs +++ b/src/librustc_mir/build/scope.rs @@ -352,6 +352,7 @@ impl DropTree { DropKind::Value => { let terminator = TerminatorKind::Drop { target: blocks[drop_data.1].unwrap(), + flag: None, // The caller will handle this if needed. unwind: None, location: drop_data.0.local.into(), @@ -1250,6 +1251,7 @@ fn build_scope_drops<'tcx>( let next = cfg.start_new_block(); cfg.terminate(block, source_info, TerminatorKind::Drop { location: local.into(), + flag: None, target: next, unwind: None }); diff --git a/src/librustc_mir/dataflow/generic.rs b/src/librustc_mir/dataflow/generic.rs index dd6238b80d174..5f8e175efc03b 100644 --- a/src/librustc_mir/dataflow/generic.rs +++ b/src/librustc_mir/dataflow/generic.rs @@ -480,7 +480,7 @@ where mir::TerminatorKind::Goto { target } | mir::TerminatorKind::Assert { target, cleanup: None, .. } | mir::TerminatorKind::Yield { resume: target, drop: None, .. } - | mir::TerminatorKind::Drop { target, location: _, unwind: None } + | mir::TerminatorKind::Drop { target, location: _, unwind: None, flag: _ } | mir::TerminatorKind::DropAndReplace { target, value: _, location: _, unwind: None } => { self.propagate_bits_into_entry_set_for(in_out, target, dirty_list); @@ -492,7 +492,7 @@ where } mir::TerminatorKind::Assert { target, cleanup: Some(unwind), .. } - | mir::TerminatorKind::Drop { target, location: _, unwind: Some(unwind) } + | mir::TerminatorKind::Drop { target, location: _, unwind: Some(unwind), flag: _ } | mir::TerminatorKind::DropAndReplace { target, value: _, diff --git a/src/librustc_mir/dataflow/mod.rs b/src/librustc_mir/dataflow/mod.rs index ad0f75d772548..0a8897484327a 100644 --- a/src/librustc_mir/dataflow/mod.rs +++ b/src/librustc_mir/dataflow/mod.rs @@ -840,7 +840,7 @@ where mir::TerminatorKind::Goto { target } | mir::TerminatorKind::Assert { target, cleanup: None, .. } | mir::TerminatorKind::Yield { resume: target, drop: None, .. } | - mir::TerminatorKind::Drop { target, location: _, unwind: None } | + mir::TerminatorKind::Drop { target, location: _, unwind: None, flag: _ } | mir::TerminatorKind::DropAndReplace { target, value: _, location: _, unwind: None } => { @@ -851,7 +851,7 @@ where self.propagate_bits_into_entry_set_for(in_out, drop, dirty_list); } mir::TerminatorKind::Assert { target, cleanup: Some(unwind), .. } | - mir::TerminatorKind::Drop { target, location: _, unwind: Some(unwind) } | + mir::TerminatorKind::Drop { target, location: _, unwind: Some(unwind), flag: _ } | mir::TerminatorKind::DropAndReplace { target, value: _, location: _, unwind: Some(unwind) } => { diff --git a/src/librustc_mir/dataflow/move_paths/builder.rs b/src/librustc_mir/dataflow/move_paths/builder.rs index 906776ed64259..0888d2988795b 100644 --- a/src/librustc_mir/dataflow/move_paths/builder.rs +++ b/src/librustc_mir/dataflow/move_paths/builder.rs @@ -368,7 +368,7 @@ impl<'b, 'a, 'tcx> Gatherer<'b, 'a, 'tcx> { self.gather_operand(value); } - TerminatorKind::Drop { ref location, target: _, unwind: _ } => { + TerminatorKind::Drop { ref location, target: _, unwind: _, flag: _ } => { self.gather_move(location); } TerminatorKind::DropAndReplace { ref location, ref value, .. } => { diff --git a/src/librustc_mir/interpret/terminator.rs b/src/librustc_mir/interpret/terminator.rs index 50c4a249c63c2..f7282eca992a3 100644 --- a/src/librustc_mir/interpret/terminator.rs +++ b/src/librustc_mir/interpret/terminator.rs @@ -105,6 +105,7 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> { Drop { ref location, + ref flag, target, unwind, } => { @@ -113,14 +114,22 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> { let ty = place.layout.ty; trace!("TerminatorKind::drop: {:?}, type {}", location, ty); - let instance = Instance::resolve_drop_in_place(*self.tcx, ty); - self.drop_in_place( - place, - instance, - terminator.source_info.span, - target, - unwind - )?; + let should_drop = if let Some(flag) = flag { + let imm = self.read_scalar(self.eval_place_to_op(flag, None)?)?.not_undef()?; + imm.to_bool()? + } else { + true + }; + if should_drop { + let instance = Instance::resolve_drop_in_place(*self.tcx, ty); + self.drop_in_place( + place, + instance, + terminator.source_info.span, + target, + unwind + )?; + } } Assert { diff --git a/src/librustc_mir/shim.rs b/src/librustc_mir/shim.rs index 17f5e3d4e47a9..759bbcd66e575 100644 --- a/src/librustc_mir/shim.rs +++ b/src/librustc_mir/shim.rs @@ -281,7 +281,7 @@ impl<'a, 'tcx> DropElaborator<'a, 'tcx> for DropShimElaborator<'a, 'tcx> { } } - fn get_drop_flag(&mut self, _path: Self::Path) -> Option> { + fn get_drop_flag(&mut self, _path: Self::Path) -> Option> { None } @@ -617,6 +617,7 @@ impl CloneShimBuilder<'tcx> { // `drop(dest[beg])`; self.block(vec![], TerminatorKind::Drop { location: self.tcx.mk_place_index(dest, beg), + flag: None, target: BasicBlock::new(8), unwind: None, }, true); @@ -674,6 +675,7 @@ impl CloneShimBuilder<'tcx> { // Drop previous field and goto previous cleanup block. self.block(vec![], TerminatorKind::Drop { location: previous_field, + flag: None, target: previous_cleanup, unwind: None, }, true); @@ -808,6 +810,7 @@ fn build_call_shim<'tcx>( // BB #1 - drop for Self block(&mut blocks, vec![], TerminatorKind::Drop { location: Place::from(rcvr_arg), + flag: None, target: BasicBlock::new(2), unwind: None }, false); @@ -818,6 +821,7 @@ fn build_call_shim<'tcx>( // BB #3 - drop if closure panics block(&mut blocks, vec![], TerminatorKind::Drop { location: Place::from(rcvr_arg), + flag: None, target: BasicBlock::new(4), unwind: None }, true); diff --git a/src/librustc_mir/transform/add_moves_for_packed_drops.rs b/src/librustc_mir/transform/add_moves_for_packed_drops.rs index 052631ddff371..c82daf814dc5d 100644 --- a/src/librustc_mir/transform/add_moves_for_packed_drops.rs +++ b/src/librustc_mir/transform/add_moves_for_packed_drops.rs @@ -90,9 +90,9 @@ fn add_move_for_packed_drop<'tcx>( is_cleanup: bool, ) { debug!("add_move_for_packed_drop({:?} @ {:?})", terminator, loc); - let (location, target, unwind) = match terminator.kind { - TerminatorKind::Drop { ref location, target, unwind } => - (location, target, unwind), + let (location, flag, target, unwind) = match terminator.kind { + TerminatorKind::Drop { ref location, ref flag, target, unwind } => + (location, flag, target, unwind), _ => unreachable!() }; @@ -116,6 +116,7 @@ fn add_move_for_packed_drop<'tcx>( Rvalue::Use(Operand::Move(location.clone()))); patch.patch_terminator(loc.block, TerminatorKind::Drop { location: Place::from(temp), + flag: flag.clone(), target: storage_dead_block, unwind }); diff --git a/src/librustc_mir/transform/elaborate_drops.rs b/src/librustc_mir/transform/elaborate_drops.rs index f91a08bcd9aa6..d10d7618489ac 100644 --- a/src/librustc_mir/transform/elaborate_drops.rs +++ b/src/librustc_mir/transform/elaborate_drops.rs @@ -257,8 +257,8 @@ impl<'a, 'b, 'tcx> DropElaborator<'a, 'tcx> for Elaborator<'a, 'b, 'tcx> { }) } - fn get_drop_flag(&mut self, path: Self::Path) -> Option> { - self.ctxt.drop_flag(path).map(Operand::Copy) + fn get_drop_flag(&mut self, path: Self::Path) -> Option> { + self.ctxt.drop_flag(path) } } @@ -374,10 +374,14 @@ impl<'b, 'tcx> ElaborateDropsCtxt<'b, 'tcx> { let resume_block = self.patch.resume_block(); match terminator.kind { - TerminatorKind::Drop { ref location, target, unwind } => { + TerminatorKind::Drop { ref location, ref flag, target, unwind } => { let init_data = self.initialization_data_at(loc); match self.move_data().rev_lookup.find(location.as_ref()) { LookupResult::Exact(path) => { + debug_assert!( + flag.is_none(), + "should not have drop flags before elaboration", + ); elaborate_drop( &mut Elaborator { init_data: &init_data, @@ -494,6 +498,7 @@ impl<'b, 'tcx> ElaborateDropsCtxt<'b, 'tcx> { debug!("elaborate_drop_and_replace({:?}) - untracked {:?}", terminator, parent); self.patch.patch_terminator(bb, TerminatorKind::Drop { location: location.clone(), + flag: None, target, unwind: Some(unwind) }); diff --git a/src/librustc_mir/transform/generator.rs b/src/librustc_mir/transform/generator.rs index 524b6b087908c..a857f9068120b 100644 --- a/src/librustc_mir/transform/generator.rs +++ b/src/librustc_mir/transform/generator.rs @@ -894,6 +894,7 @@ fn elaborate_generator_drops<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId, body: &mut source_info, kind: TerminatorKind::Drop { location, + flag: None, target, unwind } @@ -1109,6 +1110,7 @@ fn insert_clean_drop(body: &mut Body<'_>) -> BasicBlock { let drop_clean = BasicBlock::new(body.basic_blocks().len()); let term = TerminatorKind::Drop { location: Place::from(self_arg()), + flag: None, target: return_block, unwind: None, }; diff --git a/src/librustc_mir/transform/inline.rs b/src/librustc_mir/transform/inline.rs index 5a34e3f471f66..826d2dce3b128 100644 --- a/src/librustc_mir/transform/inline.rs +++ b/src/librustc_mir/transform/inline.rs @@ -303,7 +303,7 @@ impl Inliner<'tcx> { let term = blk.terminator(); let mut is_drop = false; match term.kind { - TerminatorKind::Drop { ref location, target, unwind } | + TerminatorKind::Drop { ref location, target, unwind, .. } | TerminatorKind::DropAndReplace { ref location, target, unwind, .. } => { is_drop = true; work_list.push(target); diff --git a/src/librustc_mir/util/elaborate_drops.rs b/src/librustc_mir/util/elaborate_drops.rs index 1038d45e65289..57d7908534e30 100644 --- a/src/librustc_mir/util/elaborate_drops.rs +++ b/src/librustc_mir/util/elaborate_drops.rs @@ -79,7 +79,7 @@ pub trait DropElaborator<'a, 'tcx>: fmt::Debug { fn param_env(&self) -> ty::ParamEnv<'tcx>; fn drop_style(&self, path: Self::Path, mode: DropFlagMode) -> DropStyle; - fn get_drop_flag(&mut self, path: Self::Path) -> Option>; + fn get_drop_flag(&mut self, path: Self::Path) -> Option>; fn clear_drop_flag(&mut self, location: Location, path: Self::Path, mode: DropFlagMode); @@ -165,14 +165,18 @@ where DropStyle::Static => { self.elaborator.patch().patch_terminator(bb, TerminatorKind::Drop { location: self.place.clone(), + flag: None, target: self.succ, unwind: self.unwind.into_option(), }); } DropStyle::Conditional => { - let drop_bb = self.complete_drop(self.succ, self.unwind); - self.elaborator.patch().patch_terminator(bb, TerminatorKind::Goto { - target: drop_bb + let flag = self.elaborator.get_drop_flag(self.path).unwrap(); + self.elaborator.patch().patch_terminator(bb, TerminatorKind::Drop { + location: self.place.clone(), + flag: Some(flag), + target: self.succ, + unwind: self.unwind.into_option(), }); } DropStyle::Open => { @@ -453,7 +457,7 @@ where if let Unwind::To(unwind) = unwind { unwind_blocks.as_mut().unwrap().push( self.drop_block(unwind, Unwind::InCleanup) - ); + ); } } else { values.pop(); @@ -624,6 +628,7 @@ where self.elaborator.patch().patch_terminator(drop_block, TerminatorKind::Drop { location: tcx.mk_place_deref(ptr.clone()), + flag: None, target: loop_block, unwind: unwind.into_option() }); @@ -866,9 +871,7 @@ where fn elaborated_drop_block(&mut self) -> BasicBlock { debug!("elaborated_drop_block({:?})", self); - let unwind = self.unwind; // FIXME(#43234) - let succ = self.succ; - let blk = self.drop_block(succ, unwind); + let blk = self.drop_block(self.succ, self.unwind); self.elaborate_drop(blk); blk } @@ -920,6 +923,7 @@ where fn drop_block(&mut self, target: BasicBlock, unwind: Unwind) -> BasicBlock { let block = TerminatorKind::Drop { location: self.place.clone(), + flag: None, target, unwind: unwind.into_option() }; @@ -940,7 +944,7 @@ where DropStyle::Dead => on_unset, DropStyle::Static => on_set, DropStyle::Conditional | DropStyle::Open => { - let flag = self.elaborator.get_drop_flag(self.path).unwrap(); + let flag = Operand::Copy(self.elaborator.get_drop_flag(self.path).unwrap()); let term = TerminatorKind::if_(self.tcx(), flag, on_set, on_unset); self.new_block(unwind, term) } From f09a177150a2e9a4cd7d467106f7f6fe1bba45a4 Mon Sep 17 00:00:00 2001 From: Matthew Jasper Date: Sun, 24 Nov 2019 14:52:16 +0000 Subject: [PATCH 19/19] Temp: limit copies of drop shims in incremental CGUs --- src/librustc/ty/instance.rs | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/src/librustc/ty/instance.rs b/src/librustc/ty/instance.rs index 777db38850fec..6ce1af8c7d6f6 100644 --- a/src/librustc/ty/instance.rs +++ b/src/librustc/ty/instance.rs @@ -166,10 +166,14 @@ impl<'tcx> InstanceDef<'tcx> { return true } if let ty::InstanceDef::DropGlue(..) = *self { - // Drop glue wants to be instantiated at every codegen + // Drop glue generally wants to be instantiated at every codegen // unit, but without an #[inline] hint. We should make this // available to normal end-users. - return true + // + // When compiling with incremental, we can generate a lot of + // codegen units. Including drop glue into all of them has a + // considerable compile time cost. + return tcx.sess.opts.incremental.is_none(); } tcx.codegen_fn_attrs(self.def_id()).requests_inline() }
{blk}
{blk}