diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 291dbf603612a..c3c95226aebf6 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -442,7 +442,7 @@ jobs: - name: x86_64-msvc-cargo env: SCRIPT: python x.py test src/tools/cargotest src/tools/cargo - RUST_CONFIGURE_ARGS: "--build=x86_64-pc-windows-msvc" + RUST_CONFIGURE_ARGS: "--build=x86_64-pc-windows-msvc --enable-lld" VCVARS_BAT: vcvars64.bat NO_DEBUG_ASSERTIONS: 1 NO_LLVM_ASSERTIONS: 1 diff --git a/RELEASES.md b/RELEASES.md index 757821abcd153..fc9628bb365b4 100644 --- a/RELEASES.md +++ b/RELEASES.md @@ -43,7 +43,7 @@ Libraries - [Unicode 13 is now supported.][69929] - [`String` now implements `From<&mut str>`.][69661] - [`IoSlice` now implements `Copy`.][69403] -- [`Vec` now implements `From<[T; N]>`.][68692] Where `N` is less than 32. +- [`Vec` now implements `From<[T; N]>`.][68692] Where `N` is at most 32. - [`proc_macro::LexError` now implements `fmt::Display` and `Error`.][68899] - [`from_le_bytes`, `to_le_bytes`, `from_be_bytes`, `to_be_bytes`, `from_ne_bytes`, and `to_ne_bytes` methods are now `const` for all diff --git a/src/ci/azure-pipelines/auto.yml b/src/ci/azure-pipelines/auto.yml index 46d3cf7a38ca2..f8fa7b727d179 100644 --- a/src/ci/azure-pipelines/auto.yml +++ b/src/ci/azure-pipelines/auto.yml @@ -148,7 +148,7 @@ jobs: INITIAL_RUST_CONFIGURE_ARGS: --build=x86_64-pc-windows-msvc x86_64-msvc-cargo: SCRIPT: python x.py test src/tools/cargotest src/tools/cargo - INITIAL_RUST_CONFIGURE_ARGS: --build=x86_64-pc-windows-msvc + INITIAL_RUST_CONFIGURE_ARGS: --build=x86_64-pc-windows-msvc --enable-lld VCVARS_BAT: vcvars64.bat # FIXME(#59637) NO_DEBUG_ASSERTIONS: 1 diff --git a/src/ci/docker/wasm32/Dockerfile b/src/ci/docker/wasm32/Dockerfile index 91c492d03c179..8232539edda77 100644 --- a/src/ci/docker/wasm32/Dockerfile +++ b/src/ci/docker/wasm32/Dockerfile @@ -27,6 +27,9 @@ ENV PATH=$PATH:/emsdk-portable ENV PATH=$PATH:/emsdk-portable/upstream/emscripten/ ENV PATH=$PATH:/emsdk-portable/node/12.9.1_64bit/bin/ ENV BINARYEN_ROOT=/emsdk-portable/upstream/ +ENV EMSDK=/emsdk-portable +ENV EM_CONFIG=/emsdk-portable/.emscripten +ENV EM_CACHE=/emsdk-portable/upstream/emscripten/cache ENV TARGETS=wasm32-unknown-emscripten diff --git a/src/ci/github-actions/ci.yml b/src/ci/github-actions/ci.yml index 1c120f8163459..92fec593a5410 100644 --- a/src/ci/github-actions/ci.yml +++ b/src/ci/github-actions/ci.yml @@ -505,7 +505,7 @@ jobs: - name: x86_64-msvc-cargo env: SCRIPT: python x.py test src/tools/cargotest src/tools/cargo - RUST_CONFIGURE_ARGS: --build=x86_64-pc-windows-msvc + RUST_CONFIGURE_ARGS: --build=x86_64-pc-windows-msvc --enable-lld VCVARS_BAT: vcvars64.bat # FIXME(#59637) NO_DEBUG_ASSERTIONS: 1 diff --git a/src/librustc_mir/dataflow/impls/borrowed_locals.rs b/src/librustc_mir/dataflow/impls/borrowed_locals.rs index 9905265741534..6eeb95c01629c 100644 --- a/src/librustc_mir/dataflow/impls/borrowed_locals.rs +++ b/src/librustc_mir/dataflow/impls/borrowed_locals.rs @@ -99,9 +99,6 @@ impl GenKillAnalysis<'tcx> for MaybeBorrowedLocals where K: BorrowAnalysisKind<'tcx>, { - // The generator transform relies on the fact that this analysis does **not** use "before" - // effects. - fn statement_effect( &self, trans: &mut impl GenKill, diff --git a/src/librustc_mir/dataflow/impls/init_locals.rs b/src/librustc_mir/dataflow/impls/init_locals.rs deleted file mode 100644 index 726330b1f035e..0000000000000 --- a/src/librustc_mir/dataflow/impls/init_locals.rs +++ /dev/null @@ -1,118 +0,0 @@ -//! A less precise version of `MaybeInitializedPlaces` whose domain is entire locals. -//! -//! A local will be maybe initialized if *any* projections of that local might be initialized. - -use crate::dataflow::{self, BottomValue, GenKill}; - -use rustc_index::bit_set::BitSet; -use rustc_middle::mir::visit::{PlaceContext, Visitor}; -use rustc_middle::mir::{self, BasicBlock, Local, Location}; - -pub struct MaybeInitializedLocals; - -impl BottomValue for MaybeInitializedLocals { - /// bottom = uninit - const BOTTOM_VALUE: bool = false; -} - -impl dataflow::AnalysisDomain<'tcx> for MaybeInitializedLocals { - type Idx = Local; - - const NAME: &'static str = "maybe_init_locals"; - - fn bits_per_block(&self, body: &mir::Body<'tcx>) -> usize { - body.local_decls.len() - } - - fn initialize_start_block(&self, body: &mir::Body<'tcx>, entry_set: &mut BitSet) { - // Function arguments are initialized to begin with. - for arg in body.args_iter() { - entry_set.insert(arg); - } - } -} - -impl dataflow::GenKillAnalysis<'tcx> for MaybeInitializedLocals { - // The generator transform relies on the fact that this analysis does **not** use "before" - // effects. - - fn statement_effect( - &self, - trans: &mut impl GenKill, - statement: &mir::Statement<'tcx>, - loc: Location, - ) { - TransferFunction { trans }.visit_statement(statement, loc) - } - - fn terminator_effect( - &self, - trans: &mut impl GenKill, - terminator: &mir::Terminator<'tcx>, - loc: Location, - ) { - TransferFunction { trans }.visit_terminator(terminator, loc) - } - - fn call_return_effect( - &self, - trans: &mut impl GenKill, - _block: BasicBlock, - _func: &mir::Operand<'tcx>, - _args: &[mir::Operand<'tcx>], - return_place: mir::Place<'tcx>, - ) { - trans.gen(return_place.local) - } - - /// See `Analysis::apply_yield_resume_effect`. - fn yield_resume_effect( - &self, - trans: &mut impl GenKill, - _resume_block: BasicBlock, - resume_place: mir::Place<'tcx>, - ) { - trans.gen(resume_place.local) - } -} - -struct TransferFunction<'a, T> { - trans: &'a mut T, -} - -impl Visitor<'tcx> for TransferFunction<'a, T> -where - T: GenKill, -{ - fn visit_local(&mut self, &local: &Local, context: PlaceContext, _: Location) { - use rustc_middle::mir::visit::{MutatingUseContext, NonMutatingUseContext, NonUseContext}; - match context { - // These are handled specially in `call_return_effect` and `yield_resume_effect`. - PlaceContext::MutatingUse(MutatingUseContext::Call | MutatingUseContext::Yield) => {} - - // Otherwise, when a place is mutated, we must consider it possibly initialized. - PlaceContext::MutatingUse(_) => self.trans.gen(local), - - // If the local is moved out of, or if it gets marked `StorageDead`, consider it no - // longer initialized. - PlaceContext::NonUse(NonUseContext::StorageDead) - | PlaceContext::NonMutatingUse(NonMutatingUseContext::Move) => self.trans.kill(local), - - // All other uses do not affect this analysis. - PlaceContext::NonUse( - NonUseContext::StorageLive - | NonUseContext::AscribeUserTy - | NonUseContext::VarDebugInfo, - ) - | PlaceContext::NonMutatingUse( - NonMutatingUseContext::Inspect - | NonMutatingUseContext::Copy - | NonMutatingUseContext::SharedBorrow - | NonMutatingUseContext::ShallowBorrow - | NonMutatingUseContext::UniqueBorrow - | NonMutatingUseContext::AddressOf - | NonMutatingUseContext::Projection, - ) => {} - } - } -} diff --git a/src/librustc_mir/dataflow/impls/mod.rs b/src/librustc_mir/dataflow/impls/mod.rs index ed01d6b01ea43..e199a174efbc3 100644 --- a/src/librustc_mir/dataflow/impls/mod.rs +++ b/src/librustc_mir/dataflow/impls/mod.rs @@ -22,15 +22,13 @@ use crate::dataflow::drop_flag_effects; mod borrowed_locals; pub(super) mod borrows; -mod init_locals; mod liveness; mod storage_liveness; pub use self::borrowed_locals::{MaybeBorrowedLocals, MaybeMutBorrowedLocals}; pub use self::borrows::Borrows; -pub use self::init_locals::MaybeInitializedLocals; pub use self::liveness::MaybeLiveLocals; -pub use self::storage_liveness::MaybeStorageLive; +pub use self::storage_liveness::{MaybeRequiresStorage, MaybeStorageLive}; /// `MaybeInitializedPlaces` tracks all places that might be /// initialized upon reaching a particular point in the control flow diff --git a/src/librustc_mir/dataflow/impls/storage_liveness.rs b/src/librustc_mir/dataflow/impls/storage_liveness.rs index 2a2be069b1ed8..bbc4942030ef7 100644 --- a/src/librustc_mir/dataflow/impls/storage_liveness.rs +++ b/src/librustc_mir/dataflow/impls/storage_liveness.rs @@ -1,9 +1,11 @@ pub use super::*; use crate::dataflow::BottomValue; -use crate::dataflow::{self, GenKill}; +use crate::dataflow::{self, GenKill, Results, ResultsRefCursor}; use crate::util::storage::AlwaysLiveLocals; +use rustc_middle::mir::visit::{NonMutatingUseContext, PlaceContext, Visitor}; use rustc_middle::mir::*; +use std::cell::RefCell; #[derive(Clone)] pub struct MaybeStorageLive { @@ -76,3 +78,233 @@ impl BottomValue for MaybeStorageLive { /// bottom = dead const BOTTOM_VALUE: bool = false; } + +type BorrowedLocalsResults<'a, 'tcx> = ResultsRefCursor<'a, 'a, 'tcx, MaybeBorrowedLocals>; + +/// Dataflow analysis that determines whether each local requires storage at a +/// given location; i.e. whether its storage can go away without being observed. +pub struct MaybeRequiresStorage<'mir, 'tcx> { + body: &'mir Body<'tcx>, + borrowed_locals: RefCell>, +} + +impl<'mir, 'tcx> MaybeRequiresStorage<'mir, 'tcx> { + pub fn new( + body: &'mir Body<'tcx>, + borrowed_locals: &'mir Results<'tcx, MaybeBorrowedLocals>, + ) -> Self { + MaybeRequiresStorage { + body, + borrowed_locals: RefCell::new(ResultsRefCursor::new(&body, borrowed_locals)), + } + } +} + +impl<'mir, 'tcx> dataflow::AnalysisDomain<'tcx> for MaybeRequiresStorage<'mir, 'tcx> { + type Idx = Local; + + const NAME: &'static str = "requires_storage"; + + fn bits_per_block(&self, body: &mir::Body<'tcx>) -> usize { + body.local_decls.len() + } + + fn initialize_start_block(&self, body: &mir::Body<'tcx>, on_entry: &mut BitSet) { + // The resume argument is live on function entry (we don't care about + // the `self` argument) + for arg in body.args_iter().skip(1) { + on_entry.insert(arg); + } + } +} + +impl<'mir, 'tcx> dataflow::GenKillAnalysis<'tcx> for MaybeRequiresStorage<'mir, 'tcx> { + fn before_statement_effect( + &self, + trans: &mut impl GenKill, + stmt: &mir::Statement<'tcx>, + loc: Location, + ) { + // If a place is borrowed in a statement, it needs storage for that statement. + self.borrowed_locals.borrow().analysis().statement_effect(trans, stmt, loc); + + match &stmt.kind { + StatementKind::StorageDead(l) => trans.kill(*l), + + // If a place is assigned to in a statement, it needs storage for that statement. + StatementKind::Assign(box (place, _)) + | StatementKind::SetDiscriminant { box place, .. } => { + trans.gen(place.local); + } + StatementKind::LlvmInlineAsm(asm) => { + for place in &*asm.outputs { + trans.gen(place.local); + } + } + + // Nothing to do for these. Match exhaustively so this fails to compile when new + // variants are added. + StatementKind::AscribeUserType(..) + | StatementKind::FakeRead(..) + | StatementKind::Nop + | StatementKind::Retag(..) + | StatementKind::StorageLive(..) => {} + } + } + + fn statement_effect( + &self, + trans: &mut impl GenKill, + _: &mir::Statement<'tcx>, + loc: Location, + ) { + // If we move from a place then only stops needing storage *after* + // that statement. + self.check_for_move(trans, loc); + } + + fn before_terminator_effect( + &self, + trans: &mut impl GenKill, + terminator: &mir::Terminator<'tcx>, + loc: Location, + ) { + // If a place is borrowed in a terminator, it needs storage for that terminator. + self.borrowed_locals.borrow().analysis().terminator_effect(trans, terminator, loc); + + match &terminator.kind { + TerminatorKind::Call { destination: Some((place, _)), .. } => { + trans.gen(place.local); + } + + // Note that we do *not* gen the `resume_arg` of `Yield` terminators. The reason for + // that is that a `yield` will return from the function, and `resume_arg` is written + // only when the generator is later resumed. Unlike `Call`, this doesn't require the + // place to have storage *before* the yield, only after. + TerminatorKind::Yield { .. } => {} + + TerminatorKind::InlineAsm { operands, .. } => { + for op in operands { + match op { + InlineAsmOperand::Out { place, .. } + | InlineAsmOperand::InOut { out_place: place, .. } => { + if let Some(place) = place { + trans.gen(place.local); + } + } + InlineAsmOperand::In { .. } + | InlineAsmOperand::Const { .. } + | InlineAsmOperand::SymFn { .. } + | InlineAsmOperand::SymStatic { .. } => {} + } + } + } + + // Nothing to do for these. Match exhaustively so this fails to compile when new + // variants are added. + TerminatorKind::Call { destination: None, .. } + | TerminatorKind::Abort + | TerminatorKind::Assert { .. } + | TerminatorKind::Drop { .. } + | TerminatorKind::DropAndReplace { .. } + | TerminatorKind::FalseEdges { .. } + | TerminatorKind::FalseUnwind { .. } + | TerminatorKind::GeneratorDrop + | TerminatorKind::Goto { .. } + | TerminatorKind::Resume + | TerminatorKind::Return + | TerminatorKind::SwitchInt { .. } + | TerminatorKind::Unreachable => {} + } + } + + fn terminator_effect( + &self, + trans: &mut impl GenKill, + terminator: &mir::Terminator<'tcx>, + loc: Location, + ) { + match &terminator.kind { + // For call terminators the destination requires storage for the call + // and after the call returns successfully, but not after a panic. + // Since `propagate_call_unwind` doesn't exist, we have to kill the + // destination here, and then gen it again in `call_return_effect`. + TerminatorKind::Call { destination: Some((place, _)), .. } => { + trans.kill(place.local); + } + + // Nothing to do for these. Match exhaustively so this fails to compile when new + // variants are added. + TerminatorKind::Call { destination: None, .. } + | TerminatorKind::Yield { .. } + | TerminatorKind::Abort + | TerminatorKind::Assert { .. } + | TerminatorKind::Drop { .. } + | TerminatorKind::DropAndReplace { .. } + | TerminatorKind::FalseEdges { .. } + | TerminatorKind::FalseUnwind { .. } + | TerminatorKind::GeneratorDrop + | TerminatorKind::Goto { .. } + | TerminatorKind::InlineAsm { .. } + | TerminatorKind::Resume + | TerminatorKind::Return + | TerminatorKind::SwitchInt { .. } + | TerminatorKind::Unreachable => {} + } + + self.check_for_move(trans, loc); + } + + fn call_return_effect( + &self, + trans: &mut impl GenKill, + _block: BasicBlock, + _func: &mir::Operand<'tcx>, + _args: &[mir::Operand<'tcx>], + return_place: mir::Place<'tcx>, + ) { + trans.gen(return_place.local); + } + + fn yield_resume_effect( + &self, + trans: &mut impl GenKill, + _resume_block: BasicBlock, + resume_place: mir::Place<'tcx>, + ) { + trans.gen(resume_place.local); + } +} + +impl<'mir, 'tcx> MaybeRequiresStorage<'mir, 'tcx> { + /// Kill locals that are fully moved and have not been borrowed. + fn check_for_move(&self, trans: &mut impl GenKill, loc: Location) { + let mut visitor = MoveVisitor { trans, borrowed_locals: &self.borrowed_locals }; + visitor.visit_location(&self.body, loc); + } +} + +impl<'mir, 'tcx> BottomValue for MaybeRequiresStorage<'mir, 'tcx> { + /// bottom = dead + const BOTTOM_VALUE: bool = false; +} + +struct MoveVisitor<'a, 'mir, 'tcx, T> { + borrowed_locals: &'a RefCell>, + trans: &'a mut T, +} + +impl<'a, 'mir, 'tcx, T> Visitor<'tcx> for MoveVisitor<'a, 'mir, 'tcx, T> +where + T: GenKill, +{ + fn visit_local(&mut self, local: &Local, context: PlaceContext, loc: Location) { + if PlaceContext::NonMutatingUse(NonMutatingUseContext::Move) == context { + let mut borrowed_locals = self.borrowed_locals.borrow_mut(); + borrowed_locals.seek_before_primary_effect(loc); + if !borrowed_locals.contains(*local) { + self.trans.kill(*local); + } + } + } +} diff --git a/src/librustc_mir/dataflow/move_paths/builder.rs b/src/librustc_mir/dataflow/move_paths/builder.rs index da02211d002a8..4749d74684547 100644 --- a/src/librustc_mir/dataflow/move_paths/builder.rs +++ b/src/librustc_mir/dataflow/move_paths/builder.rs @@ -362,18 +362,17 @@ impl<'b, 'a, 'tcx> Gatherer<'b, 'a, 'tcx> { fn gather_terminator(&mut self, term: &Terminator<'tcx>) { match term.kind { TerminatorKind::Goto { target: _ } - | TerminatorKind::FalseEdges { .. } - | TerminatorKind::FalseUnwind { .. } - // In some sense returning moves the return place into the current - // call's destination, however, since there are no statements after - // this that could possibly access the return place, this doesn't - // need recording. - | TerminatorKind::Return | TerminatorKind::Resume | TerminatorKind::Abort | TerminatorKind::GeneratorDrop + | TerminatorKind::FalseEdges { .. } + | TerminatorKind::FalseUnwind { .. } | TerminatorKind::Unreachable => {} + TerminatorKind::Return => { + self.gather_move(Place::return_place()); + } + TerminatorKind::Assert { ref cond, .. } => { self.gather_operand(cond); } @@ -417,7 +416,7 @@ impl<'b, 'a, 'tcx> Gatherer<'b, 'a, 'tcx> { ref operands, options: _, line_spans: _, - destination: _ + destination: _, } => { for op in operands { match *op { diff --git a/src/librustc_mir/transform/generator.rs b/src/librustc_mir/transform/generator.rs index 461b13c4f6382..dbf1ad3bd6f93 100644 --- a/src/librustc_mir/transform/generator.rs +++ b/src/librustc_mir/transform/generator.rs @@ -50,7 +50,7 @@ //! Otherwise it drops all the values in scope at the last suspension point. use crate::dataflow::impls::{ - MaybeBorrowedLocals, MaybeInitializedLocals, MaybeLiveLocals, MaybeStorageLive, + MaybeBorrowedLocals, MaybeLiveLocals, MaybeRequiresStorage, MaybeStorageLive, }; use crate::dataflow::{self, Analysis}; use crate::transform::no_landing_pads::no_landing_pads; @@ -444,80 +444,86 @@ fn locals_live_across_suspend_points( movable: bool, ) -> LivenessInfo { let def_id = source.def_id(); + let body_ref: &Body<'_> = &body; // Calculate when MIR locals have live storage. This gives us an upper bound of their // lifetimes. let mut storage_live = MaybeStorageLive::new(always_live_locals.clone()) - .into_engine(tcx, body, def_id) + .into_engine(tcx, body_ref, def_id) .iterate_to_fixpoint() - .into_results_cursor(body); - - let mut init = MaybeInitializedLocals - .into_engine(tcx, body, def_id) - .iterate_to_fixpoint() - .into_results_cursor(body); - - let mut live = MaybeLiveLocals - .into_engine(tcx, body, def_id) - .iterate_to_fixpoint() - .into_results_cursor(body); - - let mut borrowed = MaybeBorrowedLocals::all_borrows() - .into_engine(tcx, body, def_id) + .into_results_cursor(body_ref); + + // Calculate the MIR locals which have been previously + // borrowed (even if they are still active). + let borrowed_locals_results = + MaybeBorrowedLocals::all_borrows().into_engine(tcx, body_ref, def_id).iterate_to_fixpoint(); + + let mut borrowed_locals_cursor = + dataflow::ResultsCursor::new(body_ref, &borrowed_locals_results); + + // Calculate the MIR locals that we actually need to keep storage around + // for. + let requires_storage_results = MaybeRequiresStorage::new(body, &borrowed_locals_results) + .into_engine(tcx, body_ref, def_id) + .iterate_to_fixpoint(); + let mut requires_storage_cursor = + dataflow::ResultsCursor::new(body_ref, &requires_storage_results); + + // Calculate the liveness of MIR locals ignoring borrows. + let mut liveness = MaybeLiveLocals + .into_engine(tcx, body_ref, def_id) .iterate_to_fixpoint() - .into_results_cursor(body); - - // Liveness across yield points is determined by the following boolean equation, where `live`, - // `init` and `borrowed` come from dataflow and `movable` is a property of the generator. - // Movable generators do not allow borrows to live across yield points, so they don't need to - // store a local simply because it is borrowed. - // - // live_across_yield := (live & init) | (!movable & borrowed) - // - let mut locals_live_across_yield_point = |block| { - live.seek_to_block_end(block); - let mut live_locals = live.get().clone(); - - init.seek_to_block_end(block); - live_locals.intersect(init.get()); - - if !movable { - borrowed.seek_to_block_end(block); - live_locals.union(borrowed.get()); - } - - live_locals - }; + .into_results_cursor(body_ref); let mut storage_liveness_map = IndexVec::from_elem(None, body.basic_blocks()); let mut live_locals_at_suspension_points = Vec::new(); let mut live_locals_at_any_suspension_point = BitSet::new_empty(body.local_decls.len()); for (block, data) in body.basic_blocks().iter_enumerated() { - if !matches!(data.terminator().kind, TerminatorKind::Yield { .. }) { - continue; - } + if let TerminatorKind::Yield { .. } = data.terminator().kind { + let loc = Location { block, statement_index: data.statements.len() }; + + liveness.seek_to_block_end(block); + let mut live_locals = liveness.get().clone(); + + if !movable { + // The `liveness` variable contains the liveness of MIR locals ignoring borrows. + // This is correct for movable generators since borrows cannot live across + // suspension points. However for immovable generators we need to account for + // borrows, so we conseratively assume that all borrowed locals are live until + // we find a StorageDead statement referencing the locals. + // To do this we just union our `liveness` result with `borrowed_locals`, which + // contains all the locals which has been borrowed before this suspension point. + // If a borrow is converted to a raw reference, we must also assume that it lives + // forever. Note that the final liveness is still bounded by the storage liveness + // of the local, which happens using the `intersect` operation below. + borrowed_locals_cursor.seek_before_primary_effect(loc); + live_locals.union(borrowed_locals_cursor.get()); + } - // Store the storage liveness for later use so we can restore the state - // after a suspension point - storage_live.seek_to_block_end(block); - storage_liveness_map[block] = Some(storage_live.get().clone()); + // Store the storage liveness for later use so we can restore the state + // after a suspension point + storage_live.seek_before_primary_effect(loc); + storage_liveness_map[block] = Some(storage_live.get().clone()); - let mut live_locals = locals_live_across_yield_point(block); + // Locals live are live at this point only if they are used across + // suspension points (the `liveness` variable) + // and their storage is required (the `storage_required` variable) + requires_storage_cursor.seek_before_primary_effect(loc); + live_locals.intersect(requires_storage_cursor.get()); - // The combination of `MaybeInitializedLocals` and `MaybeBorrowedLocals` should be strictly - // more precise than `MaybeStorageLive` because they handle `StorageDead` themselves. This - // assumes that the MIR forbids locals from being initialized/borrowed before reaching - // `StorageLive`. - debug_assert!(storage_live.get().superset(&live_locals)); + // The generator argument is ignored. + live_locals.remove(SELF_ARG); - // Ignore the generator's `self` argument since it is handled seperately. - live_locals.remove(SELF_ARG); - debug!("block = {:?}, live_locals = {:?}", block, live_locals); - live_locals_at_any_suspension_point.union(&live_locals); - live_locals_at_suspension_points.push(live_locals); - } + debug!("loc = {:?}, live_locals = {:?}", loc, live_locals); + // Add the locals live at this suspension point to the set of locals which live across + // any suspension points + live_locals_at_any_suspension_point.union(&live_locals); + + live_locals_at_suspension_points.push(live_locals); + } + } debug!("live_locals_anywhere = {:?}", live_locals_at_any_suspension_point); // Renumber our liveness_map bitsets to include only the locals we are @@ -528,11 +534,10 @@ fn locals_live_across_suspend_points( .collect(); let storage_conflicts = compute_storage_conflicts( - body, + body_ref, &live_locals_at_any_suspension_point, always_live_locals.clone(), - init, - borrowed, + requires_storage_results, ); LivenessInfo { @@ -564,37 +569,6 @@ fn renumber_bitset( out } -/// Record conflicts between locals at the current dataflow cursor positions. -/// -/// You need to seek the cursors before calling this function. -fn record_conflicts_at_curr_loc( - local_conflicts: &mut BitMatrix, - init: &dataflow::ResultsCursor<'mir, 'tcx, MaybeInitializedLocals>, - borrowed: &dataflow::ResultsCursor<'mir, 'tcx, MaybeBorrowedLocals>, -) { - // A local requires storage if it is initialized or borrowed. For now, a local - // becomes uninitialized if it is moved from, but is still considered "borrowed". - // - // requires_storage := init | borrowed - // - // Just like when determining what locals are live at yield points, there is no need - // to look at storage liveness here, since `init | borrowed` is strictly more precise. - // - // FIXME: This function is called in a loop, so it might be better to pass in a temporary - // bitset rather than cloning here. - let mut requires_storage = init.get().clone(); - requires_storage.union(borrowed.get()); - - for local in requires_storage.iter() { - local_conflicts.union_row_with(&requires_storage, local); - } - - // `>1` because the `self` argument always requires storage. - if requires_storage.count() > 1 { - trace!("requires_storage={:?}", requires_storage); - } -} - /// For every saved local, looks for which locals are StorageLive at the same /// time. Generates a bitset for every local of all the other locals that may be /// StorageLive simultaneously with that local. This is used in the layout @@ -603,45 +577,30 @@ fn compute_storage_conflicts( body: &'mir Body<'tcx>, stored_locals: &BitSet, always_live_locals: storage::AlwaysLiveLocals, - mut init: dataflow::ResultsCursor<'mir, 'tcx, MaybeInitializedLocals>, - mut borrowed: dataflow::ResultsCursor<'mir, 'tcx, MaybeBorrowedLocals>, + requires_storage: dataflow::Results<'tcx, MaybeRequiresStorage<'mir, 'tcx>>, ) -> BitMatrix { - debug!("compute_storage_conflicts({:?})", body.span); assert_eq!(body.local_decls.len(), stored_locals.domain_size()); - // Locals that are always live conflict with all other locals. - // - // FIXME: Why do we need to handle locals without `Storage{Live,Dead}` specially here? - // Shouldn't it be enough to know whether they are initialized? - let always_live_locals = always_live_locals.into_inner(); - let mut local_conflicts = BitMatrix::from_row_n(&always_live_locals, body.local_decls.len()); - - // Visit every reachable statement and terminator. The exact order does not matter. When two - // locals are live at the same point in time, add an entry in the conflict matrix. - for (block, data) in traversal::preorder(body) { - // Ignore unreachable blocks. - if data.terminator().kind == TerminatorKind::Unreachable { - continue; - } + debug!("compute_storage_conflicts({:?})", body.span); + debug!("always_live = {:?}", always_live_locals); - // Observe the dataflow state *before* all possible locations (statement or terminator) in - // each basic block... - for statement_index in 0..=data.statements.len() { - let loc = Location { block, statement_index }; - trace!("record conflicts at {:?}", loc); - init.seek_before_primary_effect(loc); - borrowed.seek_before_primary_effect(loc); - record_conflicts_at_curr_loc(&mut local_conflicts, &init, &borrowed); - } + // Locals that are always live or ones that need to be stored across + // suspension points are not eligible for overlap. + let mut ineligible_locals = always_live_locals.into_inner(); + ineligible_locals.intersect(stored_locals); - // ...and then observe the state *after* the terminator effect is applied. As long as - // neither `init` nor `borrowed` has a "before" effect, we will observe all possible - // dataflow states here or in the loop above. - trace!("record conflicts at end of {:?}", block); - init.seek_to_block_end(block); - borrowed.seek_to_block_end(block); - record_conflicts_at_curr_loc(&mut local_conflicts, &init, &borrowed); - } + // Compute the storage conflicts for all eligible locals. + let mut visitor = StorageConflictVisitor { + body, + stored_locals: &stored_locals, + local_conflicts: BitMatrix::from_row_n(&ineligible_locals, body.local_decls.len()), + }; + + // Visit only reachable basic blocks. The exact order is not important. + let reachable_blocks = traversal::preorder(body).map(|(bb, _)| bb); + requires_storage.visit_with(body, reachable_blocks, &mut visitor); + + let local_conflicts = visitor.local_conflicts; // Compress the matrix using only stored locals (Local -> GeneratorSavedLocal). // @@ -653,7 +612,7 @@ fn compute_storage_conflicts( let mut storage_conflicts = BitMatrix::new(stored_locals.count(), stored_locals.count()); for (idx_a, local_a) in stored_locals.iter().enumerate() { let saved_local_a = GeneratorSavedLocal::new(idx_a); - if always_live_locals.contains(local_a) { + if ineligible_locals.contains(local_a) { // Conflicts with everything. storage_conflicts.insert_all_into_row(saved_local_a); } else { @@ -669,6 +628,56 @@ fn compute_storage_conflicts( storage_conflicts } +struct StorageConflictVisitor<'mir, 'tcx, 's> { + body: &'mir Body<'tcx>, + stored_locals: &'s BitSet, + // FIXME(tmandry): Consider using sparse bitsets here once we have good + // benchmarks for generators. + local_conflicts: BitMatrix, +} + +impl dataflow::ResultsVisitor<'mir, 'tcx> for StorageConflictVisitor<'mir, 'tcx, '_> { + type FlowState = BitSet; + + fn visit_statement_before_primary_effect( + &mut self, + state: &Self::FlowState, + _statement: &'mir Statement<'tcx>, + loc: Location, + ) { + self.apply_state(state, loc); + } + + fn visit_terminator_before_primary_effect( + &mut self, + state: &Self::FlowState, + _terminator: &'mir Terminator<'tcx>, + loc: Location, + ) { + self.apply_state(state, loc); + } +} + +impl<'body, 'tcx, 's> StorageConflictVisitor<'body, 'tcx, 's> { + fn apply_state(&mut self, flow_state: &BitSet, loc: Location) { + // Ignore unreachable blocks. + if self.body.basic_blocks()[loc.block].terminator().kind == TerminatorKind::Unreachable { + return; + } + + let mut eligible_storage_live = flow_state.clone(); + eligible_storage_live.intersect(&self.stored_locals); + + for local in eligible_storage_live.iter() { + self.local_conflicts.union_row_with(&eligible_storage_live, local); + } + + if eligible_storage_live.count() > 1 { + trace!("at {:?}, eligible_storage_live={:?}", loc, eligible_storage_live); + } + } +} + fn compute_layout<'tcx>( tcx: TyCtxt<'tcx>, source: MirSource<'tcx>, diff --git a/src/librustc_mir/util/elaborate_drops.rs b/src/librustc_mir/util/elaborate_drops.rs index ed999c6871bb6..8f27247bfb4ce 100644 --- a/src/librustc_mir/util/elaborate_drops.rs +++ b/src/librustc_mir/util/elaborate_drops.rs @@ -233,6 +233,8 @@ where .patch_terminator(bb, TerminatorKind::Goto { target: self.succ }); } DropStyle::Static => { + let loc = self.terminator_loc(bb); + self.elaborator.clear_drop_flag(loc, self.path, DropFlagMode::Deep); self.elaborator.patch().patch_terminator( bb, TerminatorKind::Drop { @@ -243,7 +245,9 @@ where ); } DropStyle::Conditional => { - let drop_bb = self.complete_drop(self.succ, self.unwind); + let unwind = self.unwind; // FIXME(#43234) + let succ = self.succ; + let drop_bb = self.complete_drop(Some(DropFlagMode::Deep), succ, unwind); self.elaborator .patch() .patch_terminator(bb, TerminatorKind::Goto { target: drop_bb }); @@ -315,7 +319,7 @@ where // our own drop flag. path: self.path, } - .complete_drop(succ, unwind) + .complete_drop(None, succ, unwind) } } @@ -344,7 +348,13 @@ where // Clear the "master" drop flag at the end. This is needed // because the "master" drop protects the ADT's discriminant, // which is invalidated after the ADT is dropped. - (self.drop_flag_reset_block(DropFlagMode::Shallow, self.succ, self.unwind), self.unwind) + let (succ, unwind) = (self.succ, self.unwind); // FIXME(#43234) + ( + self.drop_flag_reset_block(DropFlagMode::Shallow, succ, unwind), + unwind.map(|unwind| { + self.drop_flag_reset_block(DropFlagMode::Shallow, unwind, Unwind::InCleanup) + }), + ) } /// Creates a full drop ladder, consisting of 2 connected half-drop-ladders @@ -878,7 +888,11 @@ where self.open_drop_for_adt(def, substs) } } - ty::Dynamic(..) => self.complete_drop(self.succ, self.unwind), + ty::Dynamic(..) => { + let unwind = self.unwind; // FIXME(#43234) + let succ = self.succ; + self.complete_drop(Some(DropFlagMode::Deep), succ, unwind) + } ty::Array(ety, size) => { let size = size.try_eval_usize(self.tcx(), self.elaborator.param_env()); self.open_drop_for_array(ety, size) @@ -889,10 +903,20 @@ where } } - fn complete_drop(&mut self, succ: BasicBlock, unwind: Unwind) -> BasicBlock { - debug!("complete_drop(succ={:?}, unwind={:?})", succ, unwind); + fn complete_drop( + &mut self, + drop_mode: Option, + succ: BasicBlock, + unwind: Unwind, + ) -> BasicBlock { + debug!("complete_drop({:?},{:?})", self, drop_mode); let drop_block = self.drop_block(succ, unwind); + let drop_block = if let Some(mode) = drop_mode { + self.drop_flag_reset_block(mode, drop_block, unwind) + } else { + drop_block + }; self.drop_flag_test_block(drop_block, succ, unwind) } @@ -907,11 +931,6 @@ where ) -> BasicBlock { debug!("drop_flag_reset_block({:?},{:?})", self, mode); - if unwind.is_cleanup() { - // The drop flag isn't read again on the unwind path, so don't - // bother setting it. - return succ; - } let block = self.new_block(unwind, TerminatorKind::Goto { target: succ }); let block_start = Location { block, statement_index: 0 }; self.elaborator.clear_drop_flag(block_start, self.path, mode); @@ -1028,6 +1047,11 @@ where self.elaborator.patch().new_temp(ty, self.source_info.span) } + fn terminator_loc(&mut self, bb: BasicBlock) -> Location { + let body = self.elaborator.body(); + self.elaborator.patch().terminator_loc(body, bb) + } + fn constant_usize(&self, val: u16) -> Operand<'tcx> { Operand::Constant(box Constant { span: self.source_info.span, diff --git a/src/librustc_mir/util/graphviz.rs b/src/librustc_mir/util/graphviz.rs index fb862b926d782..aed29a076a426 100644 --- a/src/librustc_mir/util/graphviz.rs +++ b/src/librustc_mir/util/graphviz.rs @@ -97,17 +97,12 @@ where write!(w, r#""#)?; // Basic block number at the top. - let (blk, color) = if data.is_cleanup { - (format!("{} (cleanup)", block.index()), "lightblue") - } else { - (format!("{}", block.index()), "gray") - }; write!( w, - r#""#, + r#""#, + attrs = r#"bgcolor="gray" align="center""#, colspan = num_cols, - blk = blk, - color = color + blk = block.index() )?; init(w)?; diff --git a/src/librustc_mir_build/build/block.rs b/src/librustc_mir_build/build/block.rs index 4e4f0dc74cb7c..2be4136ad42a0 100644 --- a/src/librustc_mir_build/build/block.rs +++ b/src/librustc_mir_build/build/block.rs @@ -28,16 +28,14 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { self.in_opt_scope(opt_destruction_scope.map(|de| (de, source_info)), move |this| { this.in_scope((region_scope, source_info), LintLevel::Inherited, move |this| { if targeted_by_break { - this.in_breakable_scope(None, destination, span, |this| { - Some(this.ast_block_stmts( - destination, - block, - span, - stmts, - expr, - safety_mode, - )) - }) + // This is a `break`-able block + let exit_block = this.cfg.start_new_block(); + let block_exit = + this.in_breakable_scope(None, exit_block, destination, |this| { + this.ast_block_stmts(destination, block, span, stmts, expr, safety_mode) + }); + this.cfg.goto(unpack!(block_exit), source_info, exit_block); + exit_block.unit() } else { this.ast_block_stmts(destination, block, span, stmts, expr, safety_mode) } diff --git a/src/librustc_mir_build/build/expr/into.rs b/src/librustc_mir_build/build/expr/into.rs index d5d1ddc5b7d7b..e402b2d15961a 100644 --- a/src/librustc_mir_build/build/expr/into.rs +++ b/src/librustc_mir_build/build/expr/into.rs @@ -135,19 +135,23 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { // body, even when the exact code in the body cannot unwind let loop_block = this.cfg.start_new_block(); + let exit_block = this.cfg.start_new_block(); // Start the loop. this.cfg.goto(block, source_info, loop_block); - this.in_breakable_scope(Some(loop_block), destination, expr_span, move |this| { + this.in_breakable_scope(Some(loop_block), exit_block, destination, move |this| { // conduct the test, if necessary let body_block = this.cfg.start_new_block(); + let diverge_cleanup = this.diverge_cleanup(); this.cfg.terminate( loop_block, source_info, - TerminatorKind::FalseUnwind { real_target: body_block, unwind: None }, + TerminatorKind::FalseUnwind { + real_target: body_block, + unwind: Some(diverge_cleanup), + }, ); - this.diverge_from(loop_block); // The “return” value of the loop body must always be an unit. We therefore // introduce a unit temporary as the destination for the loop body. @@ -155,10 +159,8 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { // Execute the body, branching back to the test. let body_block_end = unpack!(this.into(tmp, body_block, body)); this.cfg.goto(body_block_end, source_info, loop_block); - - // Loops are only exited by `break` expressions. - None - }) + }); + exit_block.unit() } ExprKind::Call { ty, fun, args, from_hir_call } => { let intrinsic = match ty.kind { @@ -200,6 +202,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { .collect(); let success = this.cfg.start_new_block(); + let cleanup = this.diverge_cleanup(); this.record_operands_moved(&args); @@ -209,7 +212,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { TerminatorKind::Call { func: fun, args, - cleanup: None, + cleanup: Some(cleanup), // FIXME(varkor): replace this with an uninhabitedness-based check. // This requires getting access to the current module to call // `tcx.is_ty_uninhabited_from`, which is currently tricky to do. @@ -221,7 +224,6 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { from_hir_call, }, ); - this.diverge_from(block); success.unit() } } @@ -425,12 +427,12 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { let scope = this.local_scope(); let value = unpack!(block = this.as_operand(block, scope, value)); let resume = this.cfg.start_new_block(); + let cleanup = this.generator_drop_cleanup(); this.cfg.terminate( block, source_info, - TerminatorKind::Yield { value, resume, resume_arg: destination, drop: None }, + TerminatorKind::Yield { value, resume, resume_arg: destination, drop: cleanup }, ); - this.generator_drop_cleanup(block); resume.unit() } diff --git a/src/librustc_mir_build/build/matches/mod.rs b/src/librustc_mir_build/build/matches/mod.rs index 147c09d8f3af6..3b448b0cf27cb 100644 --- a/src/librustc_mir_build/build/matches/mod.rs +++ b/src/librustc_mir_build/build/matches/mod.rs @@ -225,6 +225,8 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { outer_source_info: SourceInfo, fake_borrow_temps: Vec<(Place<'tcx>, Local)>, ) -> BlockAnd<()> { + let match_scope = self.scopes.topmost(); + let arm_end_blocks: Vec<_> = arm_candidates .into_iter() .map(|(arm, candidate)| { @@ -245,7 +247,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { let arm_block = this.bind_pattern( outer_source_info, candidate, - arm.guard.as_ref(), + arm.guard.as_ref().map(|g| (g, match_scope)), &fake_borrow_temps, scrutinee_span, Some(arm.scope), @@ -282,7 +284,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { &mut self, outer_source_info: SourceInfo, candidate: Candidate<'_, 'tcx>, - guard: Option<&Guard<'tcx>>, + guard: Option<(&Guard<'tcx>, region::Scope)>, fake_borrow_temps: &Vec<(Place<'tcx>, Local)>, scrutinee_span: Span, arm_scope: Option, @@ -1588,7 +1590,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { &mut self, candidate: Candidate<'pat, 'tcx>, parent_bindings: &[(Vec>, Vec>)], - guard: Option<&Guard<'tcx>>, + guard: Option<(&Guard<'tcx>, region::Scope)>, fake_borrows: &Vec<(Place<'tcx>, Local)>, scrutinee_span: Span, schedule_drops: bool, @@ -1700,7 +1702,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { // the reference that we create for the arm. // * So we eagerly create the reference for the arm and then take a // reference to that. - if let Some(guard) = guard { + if let Some((guard, region_scope)) = guard { let tcx = self.hir.tcx(); let bindings = parent_bindings .iter() @@ -1744,7 +1746,12 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { unreachable }); let outside_scope = self.cfg.start_new_block(); - self.exit_top_scope(otherwise_post_guard_block, outside_scope, source_info); + self.exit_scope( + source_info.span, + region_scope, + otherwise_post_guard_block, + outside_scope, + ); self.false_edges( outside_scope, otherwise_block, diff --git a/src/librustc_mir_build/build/matches/test.rs b/src/librustc_mir_build/build/matches/test.rs index 1eab5848e0974..74398ca8a40fa 100644 --- a/src/librustc_mir_build/build/matches/test.rs +++ b/src/librustc_mir_build/build/matches/test.rs @@ -423,6 +423,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { let bool_ty = self.hir.bool_ty(); let eq_result = self.temp(bool_ty, source_info.span); let eq_block = self.cfg.start_new_block(); + let cleanup = self.diverge_cleanup(); self.cfg.terminate( block, source_info, @@ -440,11 +441,10 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { }), args: vec![val, expect], destination: Some((eq_result, eq_block)), - cleanup: None, + cleanup: Some(cleanup), from_hir_call: false, }, ); - self.diverge_from(block); if let [success_block, fail_block] = *make_target_blocks(self) { // check the result diff --git a/src/librustc_mir_build/build/mod.rs b/src/librustc_mir_build/build/mod.rs index 3d821aa55a1f8..2efe93d057b9b 100644 --- a/src/librustc_mir_build/build/mod.rs +++ b/src/librustc_mir_build/build/mod.rs @@ -327,6 +327,11 @@ struct Builder<'a, 'tcx> { var_debug_info: Vec>, + /// Cached block with the `RESUME` terminator; this is created + /// when first set of cleanups are built. + cached_resume_block: Option, + /// Cached block with the `RETURN` terminator. + cached_return_block: Option, /// Cached block with the `UNREACHABLE` terminator. cached_unreachable_block: Option, } @@ -585,34 +590,50 @@ where region::Scope { id: body.value.hir_id.local_id, data: region::ScopeData::CallSite }; let arg_scope = region::Scope { id: body.value.hir_id.local_id, data: region::ScopeData::Arguments }; + let mut block = START_BLOCK; let source_info = builder.source_info(span); let call_site_s = (call_site_scope, source_info); - unpack!(builder.in_scope(call_site_s, LintLevel::Inherited, |builder| { - let arg_scope_s = (arg_scope, source_info); - // Attribute epilogue to function's closing brace - let fn_end = span.shrink_to_hi(); - let return_block = - unpack!(builder.in_breakable_scope(None, Place::return_place(), fn_end, |builder| { - Some(builder.in_scope(arg_scope_s, LintLevel::Inherited, |builder| { - builder.args_and_body( - START_BLOCK, - fn_def_id.to_def_id(), - &arguments, - arg_scope, - &body.value, - ) - })) - })); - let source_info = builder.source_info(fn_end); - builder.cfg.terminate(return_block, source_info, TerminatorKind::Return); - let should_abort = should_abort_on_panic(tcx, fn_def_id, abi); - builder.build_drop_trees(should_abort); - // Attribute any unreachable codepaths to the function's closing brace - if let Some(unreachable_block) = builder.cached_unreachable_block { - builder.cfg.terminate(unreachable_block, source_info, TerminatorKind::Unreachable); - } - return_block.unit() - })); + unpack!( + block = builder.in_scope(call_site_s, LintLevel::Inherited, |builder| { + if should_abort_on_panic(tcx, fn_def_id, abi) { + builder.schedule_abort(); + } + + let arg_scope_s = (arg_scope, source_info); + // `return_block` is called when we evaluate a `return` expression, so + // we just use `START_BLOCK` here. + unpack!( + block = builder.in_breakable_scope( + None, + START_BLOCK, + Place::return_place(), + |builder| { + builder.in_scope(arg_scope_s, LintLevel::Inherited, |builder| { + builder.args_and_body( + block, + fn_def_id.to_def_id(), + &arguments, + arg_scope, + &body.value, + ) + }) + }, + ) + ); + // Attribute epilogue to function's closing brace + let fn_end = span.shrink_to_hi(); + let source_info = builder.source_info(fn_end); + let return_block = builder.return_block(); + builder.cfg.goto(block, source_info, return_block); + builder.cfg.terminate(return_block, source_info, TerminatorKind::Return); + // Attribute any unreachable codepaths to the function's closing brace + if let Some(unreachable_block) = builder.cached_unreachable_block { + builder.cfg.terminate(unreachable_block, source_info, TerminatorKind::Unreachable); + } + return_block.unit() + }) + ); + assert_eq!(block, builder.return_block()); let spread_arg = if abi == Abi::RustCall { // RustCall pseudo-ABI untuples the last argument. @@ -646,7 +667,8 @@ fn construct_const<'a, 'tcx>( let source_info = builder.source_info(span); builder.cfg.terminate(block, source_info, TerminatorKind::Return); - builder.build_drop_trees(false); + // Constants can't `return` so a return block should not be created. + assert_eq!(builder.cached_return_block, None); // Constants may be match expressions in which case an unreachable block may // be created, so terminate it properly. @@ -713,7 +735,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { fn_span: span, arg_count, generator_kind, - scopes: scope::Scopes::new(), + scopes: Default::default(), block_context: BlockContext::new(), source_scopes: IndexVec::new(), source_scope: OUTERMOST_SOURCE_SCOPE, @@ -726,6 +748,8 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { var_indices: Default::default(), unit_temp: None, var_debug_info: vec![], + cached_resume_block: None, + cached_return_block: None, cached_unreachable_block: None, }; @@ -957,6 +981,17 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { } } } + + fn return_block(&mut self) -> BasicBlock { + match self.cached_return_block { + Some(rb) => rb, + None => { + let rb = self.cfg.start_new_block(); + self.cached_return_block = Some(rb); + rb + } + } + } } /////////////////////////////////////////////////////////////////////////// diff --git a/src/librustc_mir_build/build/scope.rs b/src/librustc_mir_build/build/scope.rs index 868fb69abe80c..4daf567d7d451 100644 --- a/src/librustc_mir_build/build/scope.rs +++ b/src/librustc_mir_build/build/scope.rs @@ -6,31 +6,30 @@ contents, and then pop it off. Every scope is named by a ### SEME Regions -When pushing a new [Scope], we record the current point in the graph (a +When pushing a new scope, we record the current point in the graph (a basic block); this marks the entry to the scope. We then generate more stuff in the control-flow graph. Whenever the scope is exited, either via a `break` or `return` or just by fallthrough, that marks an exit from the scope. Each lexical scope thus corresponds to a single-entry, multiple-exit (SEME) region in the control-flow graph. -For now, we record the `region::Scope` to each SEME region for later reference -(see caveat in next paragraph). This is because destruction scopes are tied to -them. This may change in the future so that MIR lowering determines its own -destruction scopes. +For now, we keep a mapping from each `region::Scope` to its +corresponding SEME region for later reference (see caveat in next +paragraph). This is because region scopes are tied to +them. Eventually, when we shift to non-lexical lifetimes, there should +be no need to remember this mapping. ### Not so SEME Regions In the course of building matches, it sometimes happens that certain code (namely guards) gets executed multiple times. This means that the scope lexical scope may in fact correspond to multiple, disjoint SEME regions. So in fact our -mapping is from one scope to a vector of SEME regions. Since the SEME regions -are disjoint, the mapping is still one-to-one for the set of SEME regions that -we're currently in. +mapping is from one scope to a vector of SEME regions. -Also in matches, the scopes assigned to arms are not always even SEME regions! -Each arm has a single region with one entry for each pattern. We manually +Also in matches, the scopes assigned to arms are not even SEME regions! Each +arm has a single region with one entry for each pattern. We manually manipulate the scheduled drops in this scope to avoid dropping things multiple -times. +times, although drop elaboration would clean this up for value drops. ### Drops @@ -61,48 +60,38 @@ that for now); any later drops would also drop `y`. There are numerous "normal" ways to early exit a scope: `break`, `continue`, `return` (panics are handled separately). Whenever an -early exit occurs, the method `break_scope` is called. It is given the +early exit occurs, the method `exit_scope` is called. It is given the current point in execution where the early exit occurs, as well as the scope you want to branch to (note that all early exits from to some -other enclosing scope). `break_scope` will record the set of drops currently -scheduled in a [DropTree]. Later, before `in_breakable_scope` exits, the drops -will be added to the CFG. +other enclosing scope). `exit_scope` will record this exit point and +also add all drops. -Panics are handled in a similar fashion, except that the drops are added to the -MIR once the rest of the function has finished being lowered. If a terminator -can panic, call `diverge_from(block)` with the block containing the terminator -`block`. +Panics are handled in a similar fashion, except that a panic always +returns out to the `DIVERGE_BLOCK`. To trigger a panic, simply call +`panic(p)` with the current point `p`. Or else you can call +`diverge_cleanup`, which will produce a block that you can branch to +which does the appropriate cleanup and then diverges. `panic(p)` +simply calls `diverge_cleanup()` and adds an edge from `p` to the +result. -### Breakable scopes +### Loop scopes In addition to the normal scope stack, we track a loop scope stack -that contains only loops and breakable blocks. It tracks where a `break`, -`continue` or `return` should go to. +that contains only loops. It tracks where a `break` and `continue` +should go to. */ use crate::build::{BlockAnd, BlockAndExtension, BlockFrame, Builder, CFG}; use crate::hair::{Expr, ExprRef, LintLevel}; -use rustc_data_structures::fx::FxHashMap; -use rustc_hir as hir; -use rustc_index::vec::IndexVec; use rustc_middle::middle::region; use rustc_middle::mir::*; +use rustc_data_structures::fx::FxHashMap; +use rustc_hir as hir; +use rustc_hir::GeneratorKind; use rustc_span::{Span, DUMMY_SP}; - -#[derive(Debug)] -pub struct Scopes<'tcx> { - scopes: Vec, - /// The current set of breakable scopes. See module comment for more details. - breakable_scopes: Vec>, - - /// Drops that need to be done on unwind paths. See the comment on - /// [DropTree] for more details. - unwind_drops: DropTree, - - /// Drops that need to be done on paths to the `GeneratorDrop` terminator. - generator_drops: DropTree, -} +use std::collections::hash_map::Entry; +use std::mem; #[derive(Debug)] struct Scope { @@ -123,45 +112,73 @@ struct Scope { moved_locals: Vec, - /// The drop index that will drop everything in and below this scope on an - /// unwind path. - cached_unwind_block: Option, + /// The cache for drop chain on “normal” exit into a particular BasicBlock. + cached_exits: FxHashMap<(BasicBlock, region::Scope), BasicBlock>, + + /// The cache for drop chain on "generator drop" exit. + cached_generator_drop: Option, - /// The drop index that will drop everything in and below this scope on a - /// generator drop path. - cached_generator_drop_block: Option, + /// The cache for drop chain on "unwind" exit. + cached_unwind: CachedBlock, } -#[derive(Clone, Copy, Debug)] +#[derive(Debug, Default)] +crate struct Scopes<'tcx> { + scopes: Vec, + /// The current set of breakable scopes. See module comment for more details. + breakable_scopes: Vec>, +} + +#[derive(Debug)] struct DropData { - /// The `Span` where drop obligation was incurred (typically where place was - /// declared) - source_info: SourceInfo, + /// span where drop obligation was incurred (typically where place was declared) + span: Span, /// local to drop local: Local, /// Whether this is a value Drop or a StorageDead. kind: DropKind, + + /// The cached blocks for unwinds. + cached_block: CachedBlock, +} + +#[derive(Debug, Default, Clone, Copy)] +struct CachedBlock { + /// The cached block for the cleanups-on-diverge path. This block + /// contains code to run the current drop and all the preceding + /// drops (i.e., those having lower index in Drop’s Scope drop + /// array) + unwind: Option, + + /// The cached block for unwinds during cleanups-on-generator-drop path + /// + /// This is split from the standard unwind path here to prevent drop + /// elaboration from creating drop flags that would have to be captured + /// by the generator. I'm not sure how important this optimization is, + /// but it is here. + generator_drop: Option, } -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +#[derive(Debug, PartialEq, Eq)] pub(crate) enum DropKind { Value, Storage, } -#[derive(Debug)] +#[derive(Clone, Debug)] struct BreakableScope<'tcx> { /// Region scope of the loop region_scope: region::Scope, + /// Where the body of the loop begins. `None` if block + continue_block: Option, + /// Block to branch into when the loop or block terminates (either by being + /// `break`-en out from, or by having its condition to become false) + break_block: BasicBlock, /// The destination of the loop/block expression itself (i.e., where to put - /// the result of a `break` or `return` expression) + /// the result of a `break` expression) break_destination: Place<'tcx>, - /// Drops that happen on the `break`/`return` path. - break_drops: DropTree, - /// Drops that happen on the `continue` path. - continue_drops: Option, } /// The target of an expression that breaks out of a scope @@ -172,33 +189,61 @@ crate enum BreakableTarget { Return, } -rustc_index::newtype_index! { - struct DropIdx { .. } -} +impl CachedBlock { + fn invalidate(&mut self) { + *self = CachedBlock::default(); + } -const ROOT_NODE: DropIdx = DropIdx::from_u32(0); + fn get(&self, generator_drop: bool) -> Option { + if generator_drop { self.generator_drop } else { self.unwind } + } -/// A tree of drops that we have deferred lowering. It's used for: -/// -/// * Drops on unwind paths -/// * Drops on generator drop paths (when a suspended generator is dropped) -/// * Drops on return and loop exit paths -/// -/// Once no more nodes could be added to the tree, we lower it to MIR in one go -/// in `build_drop_tree`. -#[derive(Debug)] -struct DropTree { - /// Drops in the tree. - drops: IndexVec, - /// Map for finding the inverse of the `next_drop` relation: - /// - /// `previous_drops[(drops[i].1, drops[i].0.local, drops[i].0.kind] == i` - previous_drops: FxHashMap<(DropIdx, Local, DropKind), DropIdx>, - /// Edges into the `DropTree` that need to be added once it's lowered. - entry_points: Vec<(DropIdx, BasicBlock)>, + fn ref_mut(&mut self, generator_drop: bool) -> &mut Option { + if generator_drop { &mut self.generator_drop } else { &mut self.unwind } + } } impl Scope { + /// Invalidates all the cached blocks in the scope. + /// + /// Should always be run for all inner scopes when a drop is pushed into some scope enclosing a + /// larger extent of code. + /// + /// `storage_only` controls whether to invalidate only drop paths that run `StorageDead`. + /// `this_scope_only` controls whether to invalidate only drop paths that refer to the current + /// top-of-scope (as opposed to dependent scopes). + fn invalidate_cache( + &mut self, + storage_only: bool, + generator_kind: Option, + this_scope_only: bool, + ) { + // FIXME: maybe do shared caching of `cached_exits` etc. to handle functions + // with lots of `try!`? + + // cached exits drop storage and refer to the top-of-scope + self.cached_exits.clear(); + + // the current generator drop and unwind refer to top-of-scope + self.cached_generator_drop = None; + + let ignore_unwinds = storage_only && generator_kind.is_none(); + if !ignore_unwinds { + self.cached_unwind.invalidate(); + } + + if !ignore_unwinds && !this_scope_only { + for drop_data in &mut self.drops { + drop_data.cached_block.invalidate(); + } + } + } + + /// Given a span and this scope's source scope, make a SourceInfo. + fn source_info(&self, span: Span) -> SourceInfo { + SourceInfo { span, scope: self.source_scope } + } + /// Whether there's anything to do for the cleanup path, that is, /// when unwinding through this scope. This includes destructors, /// but not StorageDead statements, which don't get emitted at all @@ -216,189 +261,11 @@ impl Scope { DropKind::Storage => false, }) } - - fn invalidate_cache(&mut self) { - self.cached_unwind_block = None; - self.cached_generator_drop_block = None; - } -} - -/// A trait that determined how [DropTree::build_mir] creates its blocks and -/// links to any entry nodes. -trait DropTreeBuilder<'tcx> { - /// Create a new block for the tree. This should call either - /// `cfg.start_new_block()` or `cfg.start_new_cleanup_block()`. - fn make_block(cfg: &mut CFG<'tcx>) -> BasicBlock; - - /// Links a block outside the drop tree, `from`, to the block `to` inside - /// the drop tree. - fn add_entry(cfg: &mut CFG<'tcx>, from: BasicBlock, to: BasicBlock); -} - -impl DropTree { - fn new() -> Self { - // The root node of the tree doesn't represent a drop, but instead - // represents the block in the tree that should be jumped to once all - // of the required drops have been performed. - let fake_source_info = SourceInfo::outermost(DUMMY_SP); - let fake_data = - DropData { source_info: fake_source_info, local: Local::MAX, kind: DropKind::Storage }; - let drop_idx = DropIdx::MAX; - let drops = IndexVec::from_elem_n((fake_data, drop_idx), 1); - Self { drops, entry_points: Vec::new(), previous_drops: FxHashMap::default() } - } - - fn add_drop(&mut self, drop: DropData, next: DropIdx) -> DropIdx { - let drops = &mut self.drops; - *self - .previous_drops - .entry((next, drop.local, drop.kind)) - .or_insert_with(|| drops.push((drop, next))) - } - - fn add_entry(&mut self, from: BasicBlock, to: DropIdx) { - debug_assert!(to < self.drops.next_index()); - self.entry_points.push((to, from)); - } - - /// Builds the MIR for a given drop tree. - /// - /// `blocks` should have the same length as `self.drops`, and may have its - /// first value set to some already existing block. - fn build_mir<'tcx, T: DropTreeBuilder<'tcx>>( - &mut self, - cfg: &mut CFG<'tcx>, - blocks: &mut IndexVec>, - ) { - debug!("DropTree::build_mir(drops = {:#?})", self); - assert_eq!(blocks.len(), self.drops.len()); - - self.assign_blocks::(cfg, blocks); - self.link_blocks(cfg, blocks) - } - - /// Assign blocks for all of the drops in the drop tree that need them. - fn assign_blocks<'tcx, T: DropTreeBuilder<'tcx>>( - &mut self, - cfg: &mut CFG<'tcx>, - blocks: &mut IndexVec>, - ) { - // StorageDead statements can share blocks with each other and also with - // a Drop terminator. We iterate through the drops to find which drops - // need their own block. - #[derive(Clone, Copy)] - enum Block { - // This drop is unreachable - None, - // This drop is only reachable through the `StorageDead` with the - // specified index. - Shares(DropIdx), - // This drop has more than one way of being reached, or it is - // branched to from outside the tree, or its predecessor is a - // `Value` drop. - Own, - } - - let mut needs_block = IndexVec::from_elem(Block::None, &self.drops); - if blocks[ROOT_NODE].is_some() { - // In some cases (such as drops for `continue`) the root node - // already has a block. In this case, make sure that we don't - // override it. - needs_block[ROOT_NODE] = Block::Own; - } - - // Sort so that we only need to check the last value. - let entry_points = &mut self.entry_points; - entry_points.sort(); - - for (drop_idx, drop_data) in self.drops.iter_enumerated().rev() { - if entry_points.last().map_or(false, |entry_point| entry_point.0 == drop_idx) { - let block = *blocks[drop_idx].get_or_insert_with(|| T::make_block(cfg)); - needs_block[drop_idx] = Block::Own; - while entry_points.last().map_or(false, |entry_point| entry_point.0 == drop_idx) { - let entry_block = entry_points.pop().unwrap().1; - T::add_entry(cfg, entry_block, block); - } - } - match needs_block[drop_idx] { - Block::None => continue, - Block::Own => { - blocks[drop_idx].get_or_insert_with(|| T::make_block(cfg)); - } - Block::Shares(pred) => { - blocks[drop_idx] = blocks[pred]; - } - } - if let DropKind::Value = drop_data.0.kind { - needs_block[drop_data.1] = Block::Own; - } else { - if drop_idx != ROOT_NODE { - match &mut needs_block[drop_data.1] { - pred @ Block::None => *pred = Block::Shares(drop_idx), - pred @ Block::Shares(_) => *pred = Block::Own, - Block::Own => (), - } - } - } - } - - debug!("assign_blocks: blocks = {:#?}", blocks); - assert!(entry_points.is_empty()); - } - - fn link_blocks<'tcx>( - &self, - cfg: &mut CFG<'tcx>, - blocks: &IndexVec>, - ) { - for (drop_idx, drop_data) in self.drops.iter_enumerated().rev() { - let block = if let Some(block) = blocks[drop_idx] { - block - } else { - continue; - }; - match drop_data.0.kind { - DropKind::Value => { - let terminator = TerminatorKind::Drop { - target: blocks[drop_data.1].unwrap(), - // The caller will handle this if needed. - unwind: None, - location: drop_data.0.local.into(), - }; - cfg.terminate(block, drop_data.0.source_info, terminator); - } - // Root nodes don't correspond to a drop. - DropKind::Storage if drop_idx == ROOT_NODE => {} - DropKind::Storage => { - let stmt = Statement { - source_info: drop_data.0.source_info, - kind: StatementKind::StorageDead(drop_data.0.local), - }; - cfg.push(block, stmt); - let target = blocks[drop_data.1].unwrap(); - if target != block { - // Diagnostics don't use this `Span` but debuginfo - // might. Since we don't want breakpoints to be placed - // here, especially when this is on an unwind path, we - // use `DUMMY_SP`. - let source_info = SourceInfo { span: DUMMY_SP, ..drop_data.0.source_info }; - let terminator = TerminatorKind::Goto { target }; - cfg.terminate(block, source_info, terminator); - } - } - } - } - } } impl<'tcx> Scopes<'tcx> { - pub(crate) fn new() -> Self { - Self { - scopes: Vec::new(), - breakable_scopes: Vec::new(), - unwind_drops: DropTree::new(), - generator_drops: DropTree::new(), - } + fn len(&self) -> usize { + self.scopes.len() } fn push_scope(&mut self, region_scope: (region::Scope, SourceInfo), vis_scope: SourceScope) { @@ -409,29 +276,94 @@ impl<'tcx> Scopes<'tcx> { region_scope_span: region_scope.1.span, drops: vec![], moved_locals: vec![], - cached_unwind_block: None, - cached_generator_drop_block: None, + cached_generator_drop: None, + cached_exits: Default::default(), + cached_unwind: CachedBlock::default(), }); } - fn pop_scope(&mut self, region_scope: (region::Scope, SourceInfo)) -> Scope { + fn pop_scope( + &mut self, + region_scope: (region::Scope, SourceInfo), + ) -> (Scope, Option) { let scope = self.scopes.pop().unwrap(); assert_eq!(scope.region_scope, region_scope.0); - scope + let unwind_to = + self.scopes.last().and_then(|next_scope| next_scope.cached_unwind.get(false)); + (scope, unwind_to) + } + + fn may_panic(&self, scope_count: usize) -> bool { + let len = self.len(); + self.scopes[(len - scope_count)..].iter().any(|s| s.needs_cleanup()) + } + + /// Finds the breakable scope for a given label. This is used for + /// resolving `return`, `break` and `continue`. + fn find_breakable_scope( + &self, + span: Span, + target: BreakableTarget, + ) -> (BasicBlock, region::Scope, Option>) { + let get_scope = |scope: region::Scope| { + // find the loop-scope by its `region::Scope`. + self.breakable_scopes + .iter() + .rfind(|breakable_scope| breakable_scope.region_scope == scope) + .unwrap_or_else(|| span_bug!(span, "no enclosing breakable scope found")) + }; + match target { + BreakableTarget::Return => { + let scope = &self.breakable_scopes[0]; + if scope.break_destination != Place::return_place() { + span_bug!(span, "`return` in item with no return scope"); + } + (scope.break_block, scope.region_scope, Some(scope.break_destination)) + } + BreakableTarget::Break(scope) => { + let scope = get_scope(scope); + (scope.break_block, scope.region_scope, Some(scope.break_destination)) + } + BreakableTarget::Continue(scope) => { + let scope = get_scope(scope); + let continue_block = scope + .continue_block + .unwrap_or_else(|| span_bug!(span, "missing `continue` block")); + (continue_block, scope.region_scope, None) + } + } } - fn scope_index(&self, region_scope: region::Scope, span: Span) -> usize { - self.scopes + fn num_scopes_above(&self, region_scope: region::Scope, span: Span) -> usize { + let scope_count = self + .scopes .iter() - .rposition(|scope| scope.region_scope == region_scope) - .unwrap_or_else(|| span_bug!(span, "region_scope {:?} does not enclose", region_scope)) + .rev() + .position(|scope| scope.region_scope == region_scope) + .unwrap_or_else(|| span_bug!(span, "region_scope {:?} does not enclose", region_scope)); + let len = self.len(); + assert!(scope_count < len, "should not use `exit_scope` to pop ALL scopes"); + scope_count + } + + fn iter_mut(&mut self) -> impl DoubleEndedIterator + '_ { + self.scopes.iter_mut().rev() + } + + fn top_scopes(&mut self, count: usize) -> impl DoubleEndedIterator + '_ { + let len = self.len(); + self.scopes[len - count..].iter_mut() } /// Returns the topmost active scope, which is known to be alive until /// the next scope expression. - fn topmost(&self) -> region::Scope { + pub(super) fn topmost(&self) -> region::Scope { self.scopes.last().expect("topmost_scope: no scopes present").region_scope } + + fn source_info(&self, index: usize, span: Span) -> SourceInfo { + self.scopes[self.len() - index].source_info(span) + } } impl<'a, 'tcx> Builder<'a, 'tcx> { @@ -439,50 +371,28 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { // ========================== // Start a breakable scope, which tracks where `continue`, `break` and // `return` should branch to. - crate fn in_breakable_scope( + crate fn in_breakable_scope( &mut self, loop_block: Option, + break_block: BasicBlock, break_destination: Place<'tcx>, - span: Span, f: F, - ) -> BlockAnd<()> + ) -> R where - F: FnOnce(&mut Builder<'a, 'tcx>) -> Option>, + F: FnOnce(&mut Builder<'a, 'tcx>) -> R, { let region_scope = self.scopes.topmost(); let scope = BreakableScope { region_scope, + continue_block: loop_block, + break_block, break_destination, - break_drops: DropTree::new(), - continue_drops: loop_block.map(|_| DropTree::new()), }; self.scopes.breakable_scopes.push(scope); - let normal_exit_block = f(self); + let res = f(self); let breakable_scope = self.scopes.breakable_scopes.pop().unwrap(); assert!(breakable_scope.region_scope == region_scope); - let break_block = self.build_exit_tree(breakable_scope.break_drops, None); - breakable_scope.continue_drops.map(|drops| { - self.build_exit_tree(drops, loop_block); - }); - match (normal_exit_block, break_block) { - (Some(block), None) | (None, Some(block)) => block, - (None, None) => self.cfg.start_new_block().unit(), - (Some(normal_block), Some(exit_block)) => { - let target = self.cfg.start_new_block(); - let source_info = self.source_info(span); - self.cfg.terminate( - unpack!(normal_block), - source_info, - TerminatorKind::Goto { target }, - ); - self.cfg.terminate( - unpack!(exit_block), - source_info, - TerminatorKind::Goto { target }, - ); - target.unit() - } - } + res } crate fn in_opt_scope( @@ -566,51 +476,46 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { mut block: BasicBlock, ) -> BlockAnd<()> { debug!("pop_scope({:?}, {:?})", region_scope, block); + // If we are emitting a `drop` statement, we need to have the cached + // diverge cleanup pads ready in case that drop panics. + if self.scopes.may_panic(1) { + self.diverge_cleanup(); + } + let (scope, unwind_to) = self.scopes.pop_scope(region_scope); + let unwind_to = unwind_to.unwrap_or_else(|| self.resume_block()); - block = self.leave_top_scope(block); - - self.scopes.pop_scope(region_scope); + unpack!( + block = build_scope_drops( + &mut self.cfg, + self.generator_kind, + &scope, + block, + unwind_to, + self.arg_count, + false, // not generator + false, // not unwind path + ) + ); block.unit() } - /// Sets up the drops for breaking from `block` to `target`. crate fn break_scope( &mut self, mut block: BasicBlock, value: Option>, - target: BreakableTarget, + scope: BreakableTarget, source_info: SourceInfo, ) -> BlockAnd<()> { - let span = source_info.span; - - let get_scope_index = |scope: region::Scope| { - // find the loop-scope by its `region::Scope`. - self.scopes - .breakable_scopes - .iter() - .rposition(|breakable_scope| breakable_scope.region_scope == scope) - .unwrap_or_else(|| span_bug!(span, "no enclosing breakable scope found")) - }; - let (break_index, destination) = match target { - BreakableTarget::Return => { - let scope = &self.scopes.breakable_scopes[0]; - if scope.break_destination != Place::return_place() { - span_bug!(span, "`return` in item with no return scope"); - } - (0, Some(scope.break_destination)) - } - BreakableTarget::Break(scope) => { - let break_index = get_scope_index(scope); - let scope = &self.scopes.breakable_scopes[break_index]; - (break_index, Some(scope.break_destination)) - } - BreakableTarget::Continue(scope) => { - let break_index = get_scope_index(scope); - (break_index, None) - } - }; - + let (mut target_block, region_scope, destination) = + self.scopes.find_breakable_scope(source_info.span, scope); + if let BreakableTarget::Return = scope { + // We call this now, rather than when we start lowering the + // function so that the return block doesn't precede the entire + // rest of the CFG. Some passes and LLVM prefer blocks to be in + // approximately CFG order. + target_block = self.return_block(); + } if let Some(destination) = destination { if let Some(value) = value { debug!("stmt_expr Break val block_context.push(SubExpr)"); @@ -623,57 +528,131 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { } else { assert!(value.is_none(), "`return` and `break` should have a destination"); } - - let region_scope = self.scopes.breakable_scopes[break_index].region_scope; - let scope_index = self.scopes.scope_index(region_scope, span); - let drops = if destination.is_some() { - &mut self.scopes.breakable_scopes[break_index].break_drops - } else { - self.scopes.breakable_scopes[break_index].continue_drops.as_mut().unwrap() - }; - let mut drop_idx = ROOT_NODE; - for scope in &self.scopes.scopes[scope_index + 1..] { - for drop in &scope.drops { - drop_idx = drops.add_drop(*drop, drop_idx); - } - } - drops.add_entry(block, drop_idx); - - // `build_drop_tree` doesn't have access to our source_info, so we - // create a dummy terminator now. `TerminatorKind::Resume` is used - // because MIR type checking will panic if it hasn't been overwritten. - self.cfg.terminate(block, source_info, TerminatorKind::Resume); - + self.exit_scope(source_info.span, region_scope, block, target_block); self.cfg.start_new_block().unit() } - crate fn exit_top_scope( + /// Branch out of `block` to `target`, exiting all scopes up to + /// and including `region_scope`. This will insert whatever drops are + /// needed. See module comment for details. + crate fn exit_scope( &mut self, + span: Span, + region_scope: region::Scope, mut block: BasicBlock, target: BasicBlock, - source_info: SourceInfo, ) { - block = self.leave_top_scope(block); - self.cfg.terminate(block, source_info, TerminatorKind::Goto { target }); - } + debug!( + "exit_scope(region_scope={:?}, block={:?}, target={:?})", + region_scope, block, target + ); + let scope_count = self.scopes.num_scopes_above(region_scope, span); - fn leave_top_scope(&mut self, block: BasicBlock) -> BasicBlock { // If we are emitting a `drop` statement, we need to have the cached // diverge cleanup pads ready in case that drop panics. - let needs_cleanup = self.scopes.scopes.last().map_or(false, |scope| scope.needs_cleanup()); - let is_generator = self.generator_kind.is_some(); - let unwind_to = if needs_cleanup { self.diverge_cleanup() } else { DropIdx::MAX }; - - let scope = self.scopes.scopes.last().expect("leave_top_scope called with no scopes"); - unpack!(build_scope_drops( - &mut self.cfg, - &mut self.scopes.unwind_drops, - scope, - block, - unwind_to, - is_generator && needs_cleanup, - self.arg_count, - )) + let may_panic = self.scopes.may_panic(scope_count); + if may_panic { + self.diverge_cleanup(); + } + + let mut scopes = self.scopes.top_scopes(scope_count + 1).rev(); + let mut scope = scopes.next().unwrap(); + for next_scope in scopes { + if scope.drops.is_empty() { + scope = next_scope; + continue; + } + let source_info = scope.source_info(span); + block = match scope.cached_exits.entry((target, region_scope)) { + Entry::Occupied(e) => { + self.cfg.goto(block, source_info, *e.get()); + return; + } + Entry::Vacant(v) => { + let b = self.cfg.start_new_block(); + self.cfg.goto(block, source_info, b); + v.insert(b); + b + } + }; + + let unwind_to = next_scope.cached_unwind.get(false).unwrap_or_else(|| { + debug_assert!(!may_panic, "cached block not present?"); + START_BLOCK + }); + + unpack!( + block = build_scope_drops( + &mut self.cfg, + self.generator_kind, + scope, + block, + unwind_to, + self.arg_count, + false, // not generator + false, // not unwind path + ) + ); + + scope = next_scope; + } + + self.cfg.goto(block, self.scopes.source_info(scope_count, span), target); + } + + /// Creates a path that performs all required cleanup for dropping a generator. + /// + /// This path terminates in GeneratorDrop. Returns the start of the path. + /// None indicates there’s no cleanup to do at this point. + crate fn generator_drop_cleanup(&mut self) -> Option { + // Fill in the cache for unwinds + self.diverge_cleanup_gen(true); + + let src_info = self.scopes.source_info(self.scopes.len(), self.fn_span); + let resume_block = self.resume_block(); + let mut scopes = self.scopes.iter_mut().peekable(); + let mut block = self.cfg.start_new_block(); + let result = block; + + while let Some(scope) = scopes.next() { + block = if let Some(b) = scope.cached_generator_drop { + self.cfg.goto(block, src_info, b); + return Some(result); + } else { + let b = self.cfg.start_new_block(); + scope.cached_generator_drop = Some(b); + self.cfg.goto(block, src_info, b); + b + }; + + let unwind_to = scopes + .peek() + .as_ref() + .map(|scope| { + scope + .cached_unwind + .get(true) + .unwrap_or_else(|| span_bug!(src_info.span, "cached block not present?")) + }) + .unwrap_or(resume_block); + + unpack!( + block = build_scope_drops( + &mut self.cfg, + self.generator_kind, + scope, + block, + unwind_to, + self.arg_count, + true, // is generator + true, // is cached path + ) + ); + } + + self.cfg.terminate(block, src_info, TerminatorKind::GeneratorDrop); + + Some(result) } /// Creates a new source scope, nested in the current one. @@ -749,6 +728,15 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { } } + // Schedule an abort block - this is used for some ABIs that cannot unwind + crate fn schedule_abort(&mut self) -> BasicBlock { + let source_info = self.scopes.source_info(self.scopes.len(), self.fn_span); + let abortblk = self.cfg.start_new_cleanup_block(); + self.cfg.terminate(abortblk, source_info, TerminatorKind::Abort); + self.cached_resume_block = Some(abortblk); + abortblk + } + // Scheduling drops // ================ crate fn schedule_drop_storage_and_value( @@ -761,10 +749,11 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { self.schedule_drop(span, region_scope, local, DropKind::Value); } - /// Indicates that `place` should be dropped on exit from `region_scope`. + /// Indicates that `place` should be dropped on exit from + /// `region_scope`. /// - /// When called with `DropKind::Storage`, `place` shouldn't be the return - /// place, or a function parameter. + /// When called with `DropKind::Storage`, `place` should be a local + /// with an index higher than the current `self.arg_count`. crate fn schedule_drop( &mut self, span: Span, @@ -792,74 +781,70 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { } }; - // When building drops, we try to cache chains of drops to reduce the - // number of `DropTree::add_drop` calls. This, however, means that - // whenever we add a drop into a scope which already had some entries - // in the drop tree built (and thus, cached) for it, we must invalidate - // all caches which might branch into the scope which had a drop just - // added to it. This is necessary, because otherwise some other code - // might use the cache to branch into already built chain of drops, - // essentially ignoring the newly added drop. - // - // For example consider there’s two scopes with a drop in each. These - // are built and thus the caches are filled: - // - // +--------------------------------------------------------+ - // | +---------------------------------+ | - // | | +--------+ +-------------+ | +---------------+ | - // | | | return | <-+ | drop(outer) | <-+ | drop(middle) | | - // | | +--------+ +-------------+ | +---------------+ | - // | +------------|outer_scope cache|--+ | - // +------------------------------|middle_scope cache|------+ - // - // Now, a new, inner-most scope is added along with a new drop into - // both inner-most and outer-most scopes: - // - // +------------------------------------------------------------+ - // | +----------------------------------+ | - // | | +--------+ +-------------+ | +---------------+ | +-------------+ - // | | | return | <+ | drop(new) | <-+ | drop(middle) | <--+| drop(inner) | - // | | +--------+ | | drop(outer) | | +---------------+ | +-------------+ - // | | +-+ +-------------+ | | - // | +---|invalid outer_scope cache|----+ | - // +----=----------------|invalid middle_scope cache|-----------+ - // - // If, when adding `drop(new)` we do not invalidate the cached blocks for both - // outer_scope and middle_scope, then, when building drops for the inner (right-most) - // scope, the old, cached blocks, without `drop(new)` will get used, producing the - // wrong results. - // - // Note that this code iterates scopes from the inner-most to the outer-most, - // invalidating caches of each scope visited. This way bare minimum of the - // caches gets invalidated. i.e., if a new drop is added into the middle scope, the - // cache of outer scope stays intact. - // - // Since we only cache drops for the unwind path and the generator drop - // path, we only need to invalidate the cache for drops that happen on - // the unwind or generator drop paths. This means that for - // non-generators we don't need to invalidate caches for `DropKind::Storage`. - let invalidate_caches = needs_drop || self.generator_kind.is_some(); - for scope in self.scopes.scopes.iter_mut().rev() { - if invalidate_caches { - scope.invalidate_cache(); - } - - if scope.region_scope == region_scope { + for scope in self.scopes.iter_mut() { + let this_scope = scope.region_scope == region_scope; + // When building drops, we try to cache chains of drops in such a way so these drops + // could be reused by the drops which would branch into the cached (already built) + // blocks. This, however, means that whenever we add a drop into a scope which already + // had some blocks built (and thus, cached) for it, we must invalidate all caches which + // might branch into the scope which had a drop just added to it. This is necessary, + // because otherwise some other code might use the cache to branch into already built + // chain of drops, essentially ignoring the newly added drop. + // + // For example consider there’s two scopes with a drop in each. These are built and + // thus the caches are filled: + // + // +--------------------------------------------------------+ + // | +---------------------------------+ | + // | | +--------+ +-------------+ | +---------------+ | + // | | | return | <-+ | drop(outer) | <-+ | drop(middle) | | + // | | +--------+ +-------------+ | +---------------+ | + // | +------------|outer_scope cache|--+ | + // +------------------------------|middle_scope cache|------+ + // + // Now, a new, inner-most scope is added along with a new drop into both inner-most and + // outer-most scopes: + // + // +------------------------------------------------------------+ + // | +----------------------------------+ | + // | | +--------+ +-------------+ | +---------------+ | +-------------+ + // | | | return | <+ | drop(new) | <-+ | drop(middle) | <--+| drop(inner) | + // | | +--------+ | | drop(outer) | | +---------------+ | +-------------+ + // | | +-+ +-------------+ | | + // | +---|invalid outer_scope cache|----+ | + // +----=----------------|invalid middle_scope cache|-----------+ + // + // If, when adding `drop(new)` we do not invalidate the cached blocks for both + // outer_scope and middle_scope, then, when building drops for the inner (right-most) + // scope, the old, cached blocks, without `drop(new)` will get used, producing the + // wrong results. + // + // The cache and its invalidation for unwind branch is somewhat special. The cache is + // per-drop, rather than per scope, which has a several different implications. Adding + // a new drop into a scope will not invalidate cached blocks of the prior drops in the + // scope. That is true, because none of the already existing drops will have an edge + // into a block with the newly added drop. + // + // Note that this code iterates scopes from the inner-most to the outer-most, + // invalidating caches of each scope visited. This way bare minimum of the + // caches gets invalidated. i.e., if a new drop is added into the middle scope, the + // cache of outer scope stays intact. + scope.invalidate_cache(!needs_drop, self.generator_kind, this_scope); + if this_scope { let region_scope_span = region_scope.span(self.hir.tcx(), &self.hir.region_scope_tree); // Attribute scope exit drops to scope's closing brace. let scope_end = self.hir.tcx().sess.source_map().end_point(region_scope_span); scope.drops.push(DropData { - source_info: SourceInfo { span: scope_end, scope: scope.source_scope }, + span: scope_end, local, kind: drop_kind, + cached_block: CachedBlock::default(), }); - return; } } - span_bug!(span, "region scope {:?} not in scope to drop {:?}", region_scope, local); } @@ -907,10 +892,9 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { } Some(local_scope) => self - .scopes .scopes .iter_mut() - .rfind(|scope| scope.region_scope == local_scope) + .find(|scope| scope.region_scope == local_scope) .unwrap_or_else(|| bug!("scope {:?} not found in scope list!", local_scope)), }; @@ -960,16 +944,13 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { // Manually drop the condition on both branches. let top_scope = self.scopes.scopes.last_mut().unwrap(); let top_drop_data = top_scope.drops.pop().unwrap(); - if self.generator_kind.is_some() { - top_scope.invalidate_cache(); - } match top_drop_data.kind { DropKind::Value { .. } => { bug!("Drop scheduled on top of condition variable") } DropKind::Storage => { - let source_info = top_drop_data.source_info; + let source_info = top_scope.source_info(top_drop_data.span); let local = top_drop_data.local; assert_eq!(local, cond_temp, "Drop scheduled on top of condition"); self.cfg.push( @@ -982,6 +963,8 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { ); } } + + top_scope.invalidate_cache(true, self.generator_kind, true); } else { bug!("Expected as_local_operand to produce a temporary"); } @@ -991,86 +974,62 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { (true_block, false_block) } - /// Returns the [DropIdx] for the innermost drop if the function unwound at - /// this point. The `DropIdx` will be created if it doesn't already exist. - fn diverge_cleanup(&mut self) -> DropIdx { - let is_generator = self.generator_kind.is_some(); - let (uncached_scope, mut cached_drop) = self - .scopes - .scopes - .iter() - .enumerate() - .rev() - .find_map(|(scope_idx, scope)| { - scope.cached_unwind_block.map(|cached_block| (scope_idx + 1, cached_block)) - }) - .unwrap_or((0, ROOT_NODE)); - - for scope in &mut self.scopes.scopes[uncached_scope..] { - for drop in &scope.drops { - if is_generator || drop.kind == DropKind::Value { - cached_drop = self.scopes.unwind_drops.add_drop(*drop, cached_drop); - } - } - scope.cached_unwind_block = Some(cached_drop); - } - - cached_drop - } - - /// Prepares to create a path that performs all required cleanup for a - /// terminator that can unwind at the given basic block. + /// Creates a path that performs all required cleanup for unwinding. /// - /// This path terminates in Resume. The path isn't created until after all - /// of the non-unwind paths in this item have been lowered. - crate fn diverge_from(&mut self, start: BasicBlock) { - debug_assert!( - matches!( - self.cfg.block_data(start).terminator().kind, - TerminatorKind::Assert { .. } - | TerminatorKind::Call {..} - | TerminatorKind::DropAndReplace { .. } - | TerminatorKind::FalseUnwind { .. } - ), - "diverge_from called on block with terminator that cannot unwind." - ); + /// This path terminates in Resume. Returns the start of the path. + /// See module comment for more details. + crate fn diverge_cleanup(&mut self) -> BasicBlock { + self.diverge_cleanup_gen(false) + } - let next_drop = self.diverge_cleanup(); - self.scopes.unwind_drops.add_entry(start, next_drop); + fn resume_block(&mut self) -> BasicBlock { + if let Some(target) = self.cached_resume_block { + target + } else { + let resumeblk = self.cfg.start_new_cleanup_block(); + self.cfg.terminate( + resumeblk, + SourceInfo::outermost(self.fn_span), + TerminatorKind::Resume, + ); + self.cached_resume_block = Some(resumeblk); + resumeblk + } } - /// Sets up a path that performs all required cleanup for dropping a - /// generator, starting from the given block that ends in - /// [TerminatorKind::Yield]. - /// - /// This path terminates in GeneratorDrop. - crate fn generator_drop_cleanup(&mut self, yield_block: BasicBlock) { - debug_assert!( - matches!( - self.cfg.block_data(yield_block).terminator().kind, - TerminatorKind::Yield { .. } - ), - "generator_drop_cleanup called on block with non-yield terminator." - ); - let (uncached_scope, mut cached_drop) = self - .scopes - .scopes - .iter() - .enumerate() - .rev() - .find_map(|(scope_idx, scope)| { - scope.cached_generator_drop_block.map(|cached_block| (scope_idx + 1, cached_block)) - }) - .unwrap_or((0, ROOT_NODE)); - - for scope in &mut self.scopes.scopes[uncached_scope..] { - for drop in &scope.drops { - cached_drop = self.scopes.generator_drops.add_drop(*drop, cached_drop); - } - scope.cached_generator_drop_block = Some(cached_drop); + fn diverge_cleanup_gen(&mut self, generator_drop: bool) -> BasicBlock { + // Build up the drops in **reverse** order. The end result will + // look like: + // + // scopes[n] -> scopes[n-1] -> ... -> scopes[0] + // + // However, we build this in **reverse order**. That is, we + // process scopes[0], then scopes[1], etc, pointing each one at + // the result generates from the one before. Along the way, we + // store caches. If everything is cached, we'll just walk right + // to left reading the cached results but never created anything. + + // Find the last cached block + debug!("diverge_cleanup_gen(self.scopes = {:?})", self.scopes); + let cached_cleanup = self.scopes.iter_mut().enumerate().find_map(|(idx, ref scope)| { + let cached_block = scope.cached_unwind.get(generator_drop)?; + Some((cached_block, idx)) + }); + let (mut target, first_uncached) = + cached_cleanup.unwrap_or_else(|| (self.resume_block(), self.scopes.len())); + + for scope in self.scopes.top_scopes(first_uncached) { + target = build_diverge_scope( + &mut self.cfg, + scope.region_scope_span, + scope, + target, + generator_drop, + self.generator_kind, + ); } - self.scopes.generator_drops.add_entry(yield_block, cached_drop); + target } /// Utility function for *non*-scope code to build their own drops @@ -1083,18 +1042,21 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { ) -> BlockAnd<()> { let source_info = self.source_info(span); let next_target = self.cfg.start_new_block(); - + let diverge_target = self.diverge_cleanup(); self.cfg.terminate( block, source_info, - TerminatorKind::DropAndReplace { location, value, target: next_target, unwind: None }, + TerminatorKind::DropAndReplace { + location, + value, + target: next_target, + unwind: Some(diverge_target), + }, ); - self.diverge_from(block); - next_target.unit() } - /// Creates an `Assert` terminator and return the success block. + /// Creates an Assert terminator and return the success block. /// If the boolean condition operand is not the expected value, /// a runtime panic will be caused with the given message. crate fn assert( @@ -1106,41 +1068,51 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { span: Span, ) -> BasicBlock { let source_info = self.source_info(span); + let success_block = self.cfg.start_new_block(); + let cleanup = self.diverge_cleanup(); self.cfg.terminate( block, source_info, - TerminatorKind::Assert { cond, expected, msg, target: success_block, cleanup: None }, + TerminatorKind::Assert { + cond, + expected, + msg, + target: success_block, + cleanup: Some(cleanup), + }, ); - self.diverge_from(block); success_block } + // `match` arm scopes + // ================== /// Unschedules any drops in the top scope. /// /// This is only needed for `match` arm scopes, because they have one /// entrance per pattern, but only one exit. - crate fn clear_top_scope(&mut self, region_scope: region::Scope) { + pub(crate) fn clear_top_scope(&mut self, region_scope: region::Scope) { let top_scope = self.scopes.scopes.last_mut().unwrap(); assert_eq!(top_scope.region_scope, region_scope); top_scope.drops.clear(); - top_scope.invalidate_cache(); + top_scope.invalidate_cache(false, self.generator_kind, true); } } -/// Builds drops for `pop_scope` and `leave_top_scope`. +/// Builds drops for pop_scope and exit_scope. fn build_scope_drops<'tcx>( cfg: &mut CFG<'tcx>, - unwind_drops: &mut DropTree, + generator_kind: Option, scope: &Scope, mut block: BasicBlock, - mut unwind_to: DropIdx, - storage_dead_on_unwind: bool, + last_unwind_to: BasicBlock, arg_count: usize, + generator_drop: bool, + is_cached_path: bool, ) -> BlockAnd<()> { debug!("build_scope_drops({:?} -> {:?})", block, scope); @@ -1163,43 +1135,37 @@ fn build_scope_drops<'tcx>( // drops for the unwind path should have already been generated by // `diverge_cleanup_gen`. - for drop_data in scope.drops.iter().rev() { - let source_info = drop_data.source_info; + for drop_idx in (0..scope.drops.len()).rev() { + let drop_data = &scope.drops[drop_idx]; + let source_info = scope.source_info(drop_data.span); let local = drop_data.local; match drop_data.kind { DropKind::Value => { - // `unwind_to` should drop the value that we're about to - // schedule. If dropping this value panics, then we continue - // with the *next* value on the unwind path. - debug_assert_eq!(unwind_drops.drops[unwind_to].0.local, drop_data.local); - debug_assert_eq!(unwind_drops.drops[unwind_to].0.kind, drop_data.kind); - unwind_to = unwind_drops.drops[unwind_to].1; - // If the operand has been moved, and we are not on an unwind // path, then don't generate the drop. (We only take this into // account for non-unwind paths so as not to disturb the // caching mechanism.) - if scope.moved_locals.iter().any(|&o| o == local) { + if !is_cached_path && scope.moved_locals.iter().any(|&o| o == local) { continue; } - unwind_drops.add_entry(block, unwind_to); + let unwind_to = get_unwind_to(scope, generator_kind, drop_idx, generator_drop) + .unwrap_or(last_unwind_to); let next = cfg.start_new_block(); cfg.terminate( block, source_info, - TerminatorKind::Drop { location: local.into(), target: next, unwind: None }, + TerminatorKind::Drop { + location: local.into(), + target: next, + unwind: Some(unwind_to), + }, ); block = next; } DropKind::Storage => { - if storage_dead_on_unwind { - debug_assert_eq!(unwind_drops.drops[unwind_to].0.local, drop_data.local); - debug_assert_eq!(unwind_drops.drops[unwind_to].0.kind, drop_data.kind); - unwind_to = unwind_drops.drops[unwind_to].1; - } // Only temps and vars need their storage dead. assert!(local.index() > arg_count); cfg.push(block, Statement { source_info, kind: StatementKind::StorageDead(local) }); @@ -1209,189 +1175,139 @@ fn build_scope_drops<'tcx>( block.unit() } -impl<'a, 'tcx: 'a> Builder<'a, 'tcx> { - /// Build a drop tree for a breakable scope. - /// - /// If `continue_block` is `Some`, then the tree is for `continue` inside a - /// loop. Otherwise this is for `break` or `return`. - fn build_exit_tree( - &mut self, - mut drops: DropTree, - continue_block: Option, - ) -> Option> { - let mut blocks = IndexVec::from_elem(None, &drops.drops); - blocks[ROOT_NODE] = continue_block; - - drops.build_mir::(&mut self.cfg, &mut blocks); - - // Link the exit drop tree to unwind drop tree. - if drops.drops.iter().any(|(drop, _)| drop.kind == DropKind::Value) { - let unwind_target = self.diverge_cleanup(); - let mut unwind_indices = IndexVec::from_elem_n(unwind_target, 1); - for (drop_idx, drop_data) in drops.drops.iter_enumerated().skip(1) { - match drop_data.0.kind { - DropKind::Storage => { - if self.generator_kind.is_some() { - let unwind_drop = self - .scopes - .unwind_drops - .add_drop(drop_data.0, unwind_indices[drop_data.1]); - unwind_indices.push(unwind_drop); - } else { - unwind_indices.push(unwind_indices[drop_data.1]); - } - } - DropKind::Value => { - let unwind_drop = self - .scopes - .unwind_drops - .add_drop(drop_data.0, unwind_indices[drop_data.1]); - self.scopes - .unwind_drops - .add_entry(blocks[drop_idx].unwrap(), unwind_indices[drop_data.1]); - unwind_indices.push(unwind_drop); - } - } +fn get_unwind_to( + scope: &Scope, + generator_kind: Option, + unwind_from: usize, + generator_drop: bool, +) -> Option { + for drop_idx in (0..unwind_from).rev() { + let drop_data = &scope.drops[drop_idx]; + match (generator_kind, &drop_data.kind) { + (Some(_), DropKind::Storage) => { + return Some(drop_data.cached_block.get(generator_drop).unwrap_or_else(|| { + span_bug!(drop_data.span, "cached block not present for {:?}", drop_data) + })); } - } - blocks[ROOT_NODE].map(BasicBlock::unit) - } - - /// Build the unwind and generator drop trees. - crate fn build_drop_trees(&mut self, should_abort: bool) { - if self.generator_kind.is_some() { - self.build_generator_drop_trees(should_abort); - } else { - Self::build_unwind_tree( - &mut self.cfg, - &mut self.scopes.unwind_drops, - self.fn_span, - should_abort, - &mut None, - ); - } - } - - fn build_generator_drop_trees(&mut self, should_abort: bool) { - // Build the drop tree for dropping the generator while it's suspended. - let drops = &mut self.scopes.generator_drops; - let cfg = &mut self.cfg; - let fn_span = self.fn_span; - let mut blocks = IndexVec::from_elem(None, &drops.drops); - drops.build_mir::(cfg, &mut blocks); - if let Some(root_block) = blocks[ROOT_NODE] { - cfg.terminate( - root_block, - SourceInfo::outermost(fn_span), - TerminatorKind::GeneratorDrop, - ); - } - - // Build the drop tree for unwinding in the normal control flow paths. - let resume_block = &mut None; - let unwind_drops = &mut self.scopes.unwind_drops; - Self::build_unwind_tree(cfg, unwind_drops, fn_span, should_abort, resume_block); - - // Build the drop tree for unwinding when dropping a suspended - // generator. - // - // This is a different tree to the standard unwind paths here to - // prevent drop elaboration from creating drop flags that would have - // to be captured by the generator. I'm not sure how important this - // optimization is, but it is here. - for (drop_idx, drop_data) in drops.drops.iter_enumerated() { - if let DropKind::Value = drop_data.0.kind { - debug_assert!(drop_data.1 < drops.drops.next_index()); - drops.entry_points.push((drop_data.1, blocks[drop_idx].unwrap())); + (None, DropKind::Value) => { + return Some(drop_data.cached_block.get(generator_drop).unwrap_or_else(|| { + span_bug!(drop_data.span, "cached block not present for {:?}", drop_data) + })); } - } - Self::build_unwind_tree(cfg, drops, fn_span, should_abort, resume_block); - } - - fn build_unwind_tree( - cfg: &mut CFG<'tcx>, - drops: &mut DropTree, - fn_span: Span, - should_abort: bool, - resume_block: &mut Option, - ) { - let mut blocks = IndexVec::from_elem(None, &drops.drops); - blocks[ROOT_NODE] = *resume_block; - drops.build_mir::(cfg, &mut blocks); - if let (None, Some(resume)) = (*resume_block, blocks[ROOT_NODE]) { - // `TerminatorKind::Abort` is used for `#[unwind(aborts)]` - // functions. - let terminator = - if should_abort { TerminatorKind::Abort } else { TerminatorKind::Resume }; - - cfg.terminate(resume, SourceInfo::outermost(fn_span), terminator); - - *resume_block = blocks[ROOT_NODE]; + _ => (), } } + None } -// DropTreeBuilder implementations. - -struct ExitScopes; - -impl<'tcx> DropTreeBuilder<'tcx> for ExitScopes { - fn make_block(cfg: &mut CFG<'tcx>) -> BasicBlock { - cfg.start_new_block() - } - fn add_entry(cfg: &mut CFG<'tcx>, from: BasicBlock, to: BasicBlock) { - cfg.block_data_mut(from).terminator_mut().kind = TerminatorKind::Goto { target: to }; +fn build_diverge_scope<'tcx>( + cfg: &mut CFG<'tcx>, + span: Span, + scope: &mut Scope, + mut target: BasicBlock, + generator_drop: bool, + generator_kind: Option, +) -> BasicBlock { + // Build up the drops in **reverse** order. The end result will + // look like: + // + // [drops[n]] -...-> [drops[0]] -> [target] + // + // The code in this function reads from right to left. At each + // point, we check for cached blocks representing the + // remainder. If everything is cached, we'll just walk right to + // left reading the cached results but never create anything. + + let source_scope = scope.source_scope; + let source_info = |span| SourceInfo { span, scope: source_scope }; + + // We keep track of StorageDead statements to prepend to our current block + // and store them here, in reverse order. + let mut storage_deads = vec![]; + + let mut target_built_by_us = false; + + // Build up the drops. Here we iterate the vector in + // *forward* order, so that we generate drops[0] first (right to + // left in diagram above). + debug!("build_diverge_scope({:?})", scope.drops); + for (j, drop_data) in scope.drops.iter_mut().enumerate() { + debug!("build_diverge_scope drop_data[{}]: {:?}", j, drop_data); + // Only full value drops are emitted in the diverging path, + // not StorageDead, except in the case of generators. + // + // Note: This may not actually be what we desire (are we + // "freeing" stack storage as we unwind, or merely observing a + // frozen stack)? In particular, the intent may have been to + // match the behavior of clang, but on inspection eddyb says + // this is not what clang does. + match drop_data.kind { + DropKind::Storage if generator_kind.is_some() => { + storage_deads.push(Statement { + source_info: source_info(drop_data.span), + kind: StatementKind::StorageDead(drop_data.local), + }); + if !target_built_by_us { + // We cannot add statements to an existing block, so we create a new + // block for our StorageDead statements. + let block = cfg.start_new_cleanup_block(); + let source_info = SourceInfo { span: DUMMY_SP, scope: source_scope }; + cfg.goto(block, source_info, target); + target = block; + target_built_by_us = true; + } + *drop_data.cached_block.ref_mut(generator_drop) = Some(target); + } + DropKind::Storage => {} + DropKind::Value => { + let cached_block = drop_data.cached_block.ref_mut(generator_drop); + target = if let Some(cached_block) = *cached_block { + storage_deads.clear(); + target_built_by_us = false; + cached_block + } else { + push_storage_deads(cfg, target, &mut storage_deads); + let block = cfg.start_new_cleanup_block(); + cfg.terminate( + block, + source_info(drop_data.span), + TerminatorKind::Drop { + location: drop_data.local.into(), + target, + unwind: None, + }, + ); + *cached_block = Some(block); + target_built_by_us = true; + block + }; + } + }; } -} + push_storage_deads(cfg, target, &mut storage_deads); + *scope.cached_unwind.ref_mut(generator_drop) = Some(target); -struct GeneratorDrop; + assert!(storage_deads.is_empty()); + debug!("build_diverge_scope({:?}, {:?}) = {:?}", scope, span, target); -impl<'tcx> DropTreeBuilder<'tcx> for GeneratorDrop { - fn make_block(cfg: &mut CFG<'tcx>) -> BasicBlock { - cfg.start_new_block() - } - fn add_entry(cfg: &mut CFG<'tcx>, from: BasicBlock, to: BasicBlock) { - let term = cfg.block_data_mut(from).terminator_mut(); - if let TerminatorKind::Yield { ref mut drop, .. } = term.kind { - *drop = Some(to); - } else { - span_bug!( - term.source_info.span, - "cannot enter generator drop tree from {:?}", - term.kind - ) - } - } + target } -struct Unwind; - -impl<'tcx> DropTreeBuilder<'tcx> for Unwind { - fn make_block(cfg: &mut CFG<'tcx>) -> BasicBlock { - cfg.start_new_cleanup_block() - } - fn add_entry(cfg: &mut CFG<'tcx>, from: BasicBlock, to: BasicBlock) { - let term = &mut cfg.block_data_mut(from).terminator_mut(); - match &mut term.kind { - TerminatorKind::Drop { unwind, .. } - | TerminatorKind::DropAndReplace { unwind, .. } - | TerminatorKind::FalseUnwind { unwind, .. } - | TerminatorKind::Call { cleanup: unwind, .. } - | TerminatorKind::Assert { cleanup: unwind, .. } => { - *unwind = Some(to); - } - TerminatorKind::Goto { .. } - | TerminatorKind::SwitchInt { .. } - | TerminatorKind::Resume - | TerminatorKind::Abort - | TerminatorKind::Return - | TerminatorKind::Unreachable - | TerminatorKind::Yield { .. } - | TerminatorKind::GeneratorDrop - | TerminatorKind::FalseEdges { .. } - | TerminatorKind::InlineAsm { .. } => { - span_bug!(term.source_info.span, "cannot unwind from {:?}", term.kind) - } - } +fn push_storage_deads<'tcx>( + cfg: &mut CFG<'tcx>, + target: BasicBlock, + storage_deads: &mut Vec>, +) { + if storage_deads.is_empty() { + return; } + let statements = &mut cfg.block_data_mut(target).statements; + storage_deads.reverse(); + debug!( + "push_storage_deads({:?}), storage_deads={:?}, statements={:?}", + target, storage_deads, statements + ); + storage_deads.append(statements); + mem::swap(statements, storage_deads); + assert!(storage_deads.is_empty()); } diff --git a/src/librustc_trait_selection/traits/structural_match.rs b/src/librustc_trait_selection/traits/structural_match.rs index b877049fcf667..87ff667b6a09a 100644 --- a/src/librustc_trait_selection/traits/structural_match.rs +++ b/src/librustc_trait_selection/traits/structural_match.rs @@ -251,7 +251,10 @@ impl<'a, 'tcx> TypeVisitor<'tcx> for Search<'a, 'tcx> { // fields of ADT. let tcx = self.tcx(); for field_ty in adt_def.all_fields().map(|field| field.ty(tcx, substs)) { - if field_ty.visit_with(self) { + let ty = self.tcx().normalize_erasing_regions(ty::ParamEnv::empty(), field_ty); + debug!("structural-match ADT: field_ty={:?}, ty={:?}", field_ty, ty); + + if ty.visit_with(self) { // found an ADT without structural-match; halt visiting! assert!(self.found.is_some()); return true; diff --git a/src/libstd/net/addr.rs b/src/libstd/net/addr.rs index b780340884e1f..b8fa1a7f744d3 100644 --- a/src/libstd/net/addr.rs +++ b/src/libstd/net/addr.rs @@ -694,42 +694,6 @@ impl PartialEq for SocketAddrV6 { && self.inner.sin6_scope_id == other.inner.sin6_scope_id } } -#[stable(feature = "socketaddr_ordering", since = "1.45.0")] -impl PartialEq for SocketAddr { - fn eq(&self, other: &SocketAddrV4) -> bool { - match self { - SocketAddr::V4(v4) => v4 == other, - SocketAddr::V6(_) => false, - } - } -} -#[stable(feature = "socketaddr_ordering", since = "1.45.0")] -impl PartialEq for SocketAddr { - fn eq(&self, other: &SocketAddrV6) -> bool { - match self { - SocketAddr::V4(_) => false, - SocketAddr::V6(v6) => v6 == other, - } - } -} -#[stable(feature = "socketaddr_ordering", since = "1.45.0")] -impl PartialEq for SocketAddrV4 { - fn eq(&self, other: &SocketAddr) -> bool { - match other { - SocketAddr::V4(v4) => self == v4, - SocketAddr::V6(_) => false, - } - } -} -#[stable(feature = "socketaddr_ordering", since = "1.45.0")] -impl PartialEq for SocketAddrV6 { - fn eq(&self, other: &SocketAddr) -> bool { - match other { - SocketAddr::V4(_) => false, - SocketAddr::V6(v6) => self == v6, - } - } -} #[stable(feature = "rust1", since = "1.0.0")] impl Eq for SocketAddrV4 {} #[stable(feature = "rust1", since = "1.0.0")] @@ -1242,12 +1206,8 @@ mod tests { // equality assert_eq!(v4_1, v4_1); assert_eq!(v6_1, v6_1); - assert_eq!(v4_1, SocketAddr::V4(v4_1)); - assert_eq!(v6_1, SocketAddr::V6(v6_1)); assert_eq!(SocketAddr::V4(v4_1), SocketAddr::V4(v4_1)); assert_eq!(SocketAddr::V6(v6_1), SocketAddr::V6(v6_1)); - assert!(v4_1 != SocketAddr::V6(v6_1)); - assert!(v6_1 != SocketAddr::V4(v4_1)); assert!(v4_1 != v4_2); assert!(v6_1 != v6_2); @@ -1268,5 +1228,10 @@ mod tests { assert!(v6_1 < v6_3); assert!(v4_3 > v4_1); assert!(v6_3 > v6_1); + + // compare with an inferred right-hand side + assert_eq!(v4_1, "224.120.45.1:23456".parse().unwrap()); + assert_eq!(v6_1, "[2001:db8:f00::1002]:23456".parse().unwrap()); + assert_eq!(SocketAddr::V4(v4_1), "224.120.45.1:23456".parse().unwrap()); } } diff --git a/src/stage0.txt b/src/stage0.txt index 0778fbab7ea2c..13bac2e268fe1 100644 --- a/src/stage0.txt +++ b/src/stage0.txt @@ -12,7 +12,7 @@ # source tarball for a stable release you'll likely see `1.x.0` for rustc and # `0.(x+1).0` for Cargo where they were released on `date`. -date: 2020-06-01 +date: 2020-06-04 rustc: 1.44.0 cargo: 0.45.0 @@ -40,4 +40,4 @@ cargo: 0.45.0 # looking at a beta source tarball and it's uncommented we'll shortly comment it # out. -dev: 1 +#dev: 1 diff --git a/src/test/codegen/drop.rs b/src/test/codegen/drop.rs index 99a791464ab89..0c7f3bb2020a9 100644 --- a/src/test/codegen/drop.rs +++ b/src/test/codegen/drop.rs @@ -23,13 +23,13 @@ pub fn droppy() { // FIXME(eddyb) the `void @` forces a match on the instruction, instead of the // comment, that's `; call core::intrinsics::drop_in_place::` // for the `v0` mangling, should switch to matching on that once `legacy` is gone. -// CHECK-NOT: call void @{{.*}}drop_in_place{{.*}}SomeUniqueName -// CHECK: invoke void @{{.*}}drop_in_place{{.*}}SomeUniqueName -// CHECK: invoke void @{{.*}}drop_in_place{{.*}}SomeUniqueName // CHECK-NOT: invoke void @{{.*}}drop_in_place{{.*}}SomeUniqueName // CHECK: call void @{{.*}}drop_in_place{{.*}}SomeUniqueName // CHECK: call void @{{.*}}drop_in_place{{.*}}SomeUniqueName +// CHECK-NOT: call void @{{.*}}drop_in_place{{.*}}SomeUniqueName +// CHECK: invoke void @{{.*}}drop_in_place{{.*}}SomeUniqueName // CHECK: call void @{{.*}}drop_in_place{{.*}}SomeUniqueName +// CHECK: invoke void @{{.*}}drop_in_place{{.*}}SomeUniqueName // CHECK: call void @{{.*}}drop_in_place{{.*}}SomeUniqueName // CHECK-NOT: {{(call|invoke) void @.*}}drop_in_place{{.*}}SomeUniqueName // The next line checks for the } that ends the function definition diff --git a/src/test/mir-opt/basic_assignment/rustc.main.SimplifyCfg-initial.after.mir b/src/test/mir-opt/basic_assignment/rustc.main.SimplifyCfg-initial.after.mir index c0a292332711b..de423cd907afe 100644 --- a/src/test/mir-opt/basic_assignment/rustc.main.SimplifyCfg-initial.after.mir +++ b/src/test/mir-opt/basic_assignment/rustc.main.SimplifyCfg-initial.after.mir @@ -47,14 +47,30 @@ fn main() -> () { StorageLive(_5); // scope 3 at $DIR/basic_assignment.rs:19:9: 19:15 StorageLive(_6); // scope 4 at $DIR/basic_assignment.rs:23:14: 23:20 _6 = move _4; // scope 4 at $DIR/basic_assignment.rs:23:14: 23:20 - replace(_5 <- move _6) -> [return: bb1, unwind: bb5]; // scope 4 at $DIR/basic_assignment.rs:23:5: 23:11 + replace(_5 <- move _6) -> [return: bb2, unwind: bb5]; // scope 4 at $DIR/basic_assignment.rs:23:5: 23:11 } - bb1: { - drop(_6) -> [return: bb2, unwind: bb6]; // scope 4 at $DIR/basic_assignment.rs:23:19: 23:20 + bb1 (cleanup): { + resume; // scope 0 at $DIR/basic_assignment.rs:10:1: 24:2 } bb2: { + drop(_6) -> [return: bb6, unwind: bb4]; // scope 4 at $DIR/basic_assignment.rs:23:19: 23:20 + } + + bb3 (cleanup): { + drop(_4) -> bb1; // scope 2 at $DIR/basic_assignment.rs:24:1: 24:2 + } + + bb4 (cleanup): { + drop(_5) -> bb3; // scope 3 at $DIR/basic_assignment.rs:24:1: 24:2 + } + + bb5 (cleanup): { + drop(_6) -> bb4; // scope 4 at $DIR/basic_assignment.rs:23:19: 23:20 + } + + bb6: { StorageDead(_6); // scope 4 at $DIR/basic_assignment.rs:23:19: 23:20 _0 = const (); // scope 0 at $DIR/basic_assignment.rs:10:11: 24:2 // ty::Const @@ -63,34 +79,18 @@ fn main() -> () { // mir::Constant // + span: $DIR/basic_assignment.rs:10:11: 24:2 // + literal: Const { ty: (), val: Value(Scalar()) } - drop(_5) -> [return: bb3, unwind: bb7]; // scope 3 at $DIR/basic_assignment.rs:24:1: 24:2 + drop(_5) -> [return: bb7, unwind: bb3]; // scope 3 at $DIR/basic_assignment.rs:24:1: 24:2 } - bb3: { + bb7: { StorageDead(_5); // scope 3 at $DIR/basic_assignment.rs:24:1: 24:2 - drop(_4) -> [return: bb4, unwind: bb8]; // scope 2 at $DIR/basic_assignment.rs:24:1: 24:2 + drop(_4) -> [return: bb8, unwind: bb1]; // scope 2 at $DIR/basic_assignment.rs:24:1: 24:2 } - bb4: { + bb8: { StorageDead(_4); // scope 2 at $DIR/basic_assignment.rs:24:1: 24:2 StorageDead(_2); // scope 1 at $DIR/basic_assignment.rs:24:1: 24:2 StorageDead(_1); // scope 0 at $DIR/basic_assignment.rs:24:1: 24:2 return; // scope 0 at $DIR/basic_assignment.rs:24:2: 24:2 } - - bb5 (cleanup): { - drop(_6) -> bb6; // scope 4 at $DIR/basic_assignment.rs:23:19: 23:20 - } - - bb6 (cleanup): { - drop(_5) -> bb7; // scope 3 at $DIR/basic_assignment.rs:24:1: 24:2 - } - - bb7 (cleanup): { - drop(_4) -> bb8; // scope 2 at $DIR/basic_assignment.rs:24:1: 24:2 - } - - bb8 (cleanup): { - resume; // scope 0 at $DIR/basic_assignment.rs:10:1: 24:2 - } } diff --git a/src/test/mir-opt/box_expr/rustc.main.ElaborateDrops.before.mir b/src/test/mir-opt/box_expr/rustc.main.ElaborateDrops.before.mir index 9d5b9be363d3f..259501c7de951 100644 --- a/src/test/mir-opt/box_expr/rustc.main.ElaborateDrops.before.mir +++ b/src/test/mir-opt/box_expr/rustc.main.ElaborateDrops.before.mir @@ -14,7 +14,7 @@ fn main() -> () { StorageLive(_1); // scope 0 at $DIR/box_expr.rs:7:9: 7:10 StorageLive(_2); // scope 0 at $DIR/box_expr.rs:7:13: 7:25 _2 = Box(S); // scope 0 at $DIR/box_expr.rs:7:13: 7:25 - (*_2) = const S::new() -> [return: bb1, unwind: bb7]; // scope 0 at $DIR/box_expr.rs:7:17: 7:25 + (*_2) = const S::new() -> [return: bb2, unwind: bb3]; // scope 0 at $DIR/box_expr.rs:7:17: 7:25 // ty::Const // + ty: fn() -> S {S::new} // + val: Value(Scalar()) @@ -23,17 +23,25 @@ fn main() -> () { // + literal: Const { ty: fn() -> S {S::new}, val: Value(Scalar()) } } - bb1: { - _1 = move _2; // scope 0 at $DIR/box_expr.rs:7:13: 7:25 - drop(_2) -> bb2; // scope 0 at $DIR/box_expr.rs:7:24: 7:25 + bb1 (cleanup): { + resume; // scope 0 at $DIR/box_expr.rs:6:1: 9:2 } bb2: { + _1 = move _2; // scope 0 at $DIR/box_expr.rs:7:13: 7:25 + drop(_2) -> bb4; // scope 0 at $DIR/box_expr.rs:7:24: 7:25 + } + + bb3 (cleanup): { + drop(_2) -> bb1; // scope 0 at $DIR/box_expr.rs:7:24: 7:25 + } + + bb4: { StorageDead(_2); // scope 0 at $DIR/box_expr.rs:7:24: 7:25 StorageLive(_3); // scope 1 at $DIR/box_expr.rs:8:5: 8:12 StorageLive(_4); // scope 1 at $DIR/box_expr.rs:8:10: 8:11 _4 = move _1; // scope 1 at $DIR/box_expr.rs:8:10: 8:11 - _3 = const std::mem::drop::>(move _4) -> [return: bb3, unwind: bb5]; // scope 1 at $DIR/box_expr.rs:8:5: 8:12 + _3 = const std::mem::drop::>(move _4) -> [return: bb5, unwind: bb7]; // scope 1 at $DIR/box_expr.rs:8:5: 8:12 // ty::Const // + ty: fn(std::boxed::Box) {std::mem::drop::>} // + val: Value(Scalar()) @@ -42,7 +50,7 @@ fn main() -> () { // + literal: Const { ty: fn(std::boxed::Box) {std::mem::drop::>}, val: Value(Scalar()) } } - bb3: { + bb5: { StorageDead(_4); // scope 1 at $DIR/box_expr.rs:8:11: 8:12 StorageDead(_3); // scope 1 at $DIR/box_expr.rs:8:12: 8:13 _0 = const (); // scope 0 at $DIR/box_expr.rs:6:11: 9:2 @@ -52,27 +60,19 @@ fn main() -> () { // mir::Constant // + span: $DIR/box_expr.rs:6:11: 9:2 // + literal: Const { ty: (), val: Value(Scalar()) } - drop(_1) -> bb4; // scope 0 at $DIR/box_expr.rs:9:1: 9:2 - } - - bb4: { - StorageDead(_1); // scope 0 at $DIR/box_expr.rs:9:1: 9:2 - return; // scope 0 at $DIR/box_expr.rs:9:2: 9:2 - } - - bb5 (cleanup): { - drop(_4) -> bb6; // scope 1 at $DIR/box_expr.rs:8:11: 8:12 + drop(_1) -> bb8; // scope 0 at $DIR/box_expr.rs:9:1: 9:2 } bb6 (cleanup): { - drop(_1) -> bb8; // scope 0 at $DIR/box_expr.rs:9:1: 9:2 + drop(_1) -> bb1; // scope 0 at $DIR/box_expr.rs:9:1: 9:2 } bb7 (cleanup): { - drop(_2) -> bb8; // scope 0 at $DIR/box_expr.rs:7:24: 7:25 + drop(_4) -> bb6; // scope 1 at $DIR/box_expr.rs:8:11: 8:12 } - bb8 (cleanup): { - resume; // scope 0 at $DIR/box_expr.rs:6:1: 9:2 + bb8: { + StorageDead(_1); // scope 0 at $DIR/box_expr.rs:9:1: 9:2 + return; // scope 0 at $DIR/box_expr.rs:9:2: 9:2 } } diff --git a/src/test/mir-opt/const-promotion-extern-static/rustc.BAR.PromoteTemps.diff b/src/test/mir-opt/const-promotion-extern-static/rustc.BAR.PromoteTemps.diff index 8eb8d4c667b1c..5c192979a8696 100644 --- a/src/test/mir-opt/const-promotion-extern-static/rustc.BAR.PromoteTemps.diff +++ b/src/test/mir-opt/const-promotion-extern-static/rustc.BAR.PromoteTemps.diff @@ -33,7 +33,7 @@ + // + literal: Const { ty: &[&i32; 1], val: Unevaluated(DefId(0:6 ~ const_promotion_extern_static[317d]::BAR[0]), [], Some(promoted[0])) } + _2 = &(*_6); // scope 0 at $DIR/const-promotion-extern-static.rs:9:31: 9:35 _1 = move _2 as &[&i32] (Pointer(Unsize)); // scope 0 at $DIR/const-promotion-extern-static.rs:9:31: 9:35 - _0 = const core::slice::::as_ptr(move _1) -> [return: bb1, unwind: bb2]; // scope 0 at $DIR/const-promotion-extern-static.rs:9:31: 9:44 + _0 = const core::slice::::as_ptr(move _1) -> [return: bb2, unwind: bb1]; // scope 0 at $DIR/const-promotion-extern-static.rs:9:31: 9:44 // ty::Const // + ty: for<'r> fn(&'r [&i32]) -> *const &i32 {core::slice::::as_ptr} // + val: Value(Scalar()) @@ -42,15 +42,15 @@ // + literal: Const { ty: for<'r> fn(&'r [&i32]) -> *const &i32 {core::slice::::as_ptr}, val: Value(Scalar()) } } - bb1: { + bb1 (cleanup): { + resume; // scope 0 at $DIR/const-promotion-extern-static.rs:9:1: 9:45 + } + + bb2: { - StorageDead(_5); // scope 0 at $DIR/const-promotion-extern-static.rs:9:43: 9:44 - StorageDead(_3); // scope 0 at $DIR/const-promotion-extern-static.rs:9:43: 9:44 return; // scope 0 at $DIR/const-promotion-extern-static.rs:9:1: 9:45 } - - bb2 (cleanup): { - resume; // scope 0 at $DIR/const-promotion-extern-static.rs:9:1: 9:45 - } - } - - alloc0 (static: Y, size: 4, align: 4) { diff --git a/src/test/mir-opt/const-promotion-extern-static/rustc.FOO.PromoteTemps.diff b/src/test/mir-opt/const-promotion-extern-static/rustc.FOO.PromoteTemps.diff index 781aa3c5500c0..649cea6493e45 100644 --- a/src/test/mir-opt/const-promotion-extern-static/rustc.FOO.PromoteTemps.diff +++ b/src/test/mir-opt/const-promotion-extern-static/rustc.FOO.PromoteTemps.diff @@ -35,7 +35,7 @@ + // + literal: Const { ty: &[&i32; 1], val: Unevaluated(DefId(0:7 ~ const_promotion_extern_static[317d]::FOO[0]), [], Some(promoted[0])) } + _2 = &(*_6); // scope 0 at $DIR/const-promotion-extern-static.rs:13:31: 13:46 _1 = move _2 as &[&i32] (Pointer(Unsize)); // scope 0 at $DIR/const-promotion-extern-static.rs:13:31: 13:46 - _0 = const core::slice::::as_ptr(move _1) -> [return: bb1, unwind: bb2]; // scope 0 at $DIR/const-promotion-extern-static.rs:13:31: 13:55 + _0 = const core::slice::::as_ptr(move _1) -> [return: bb2, unwind: bb1]; // scope 0 at $DIR/const-promotion-extern-static.rs:13:31: 13:55 // ty::Const // + ty: for<'r> fn(&'r [&i32]) -> *const &i32 {core::slice::::as_ptr} // + val: Value(Scalar()) @@ -44,15 +44,15 @@ // + literal: Const { ty: for<'r> fn(&'r [&i32]) -> *const &i32 {core::slice::::as_ptr}, val: Value(Scalar()) } } - bb1: { + bb1 (cleanup): { + resume; // scope 0 at $DIR/const-promotion-extern-static.rs:13:1: 13:56 + } + + bb2: { - StorageDead(_5); // scope 0 at $DIR/const-promotion-extern-static.rs:13:54: 13:55 - StorageDead(_3); // scope 0 at $DIR/const-promotion-extern-static.rs:13:54: 13:55 return; // scope 0 at $DIR/const-promotion-extern-static.rs:13:1: 13:56 } - - bb2 (cleanup): { - resume; // scope 0 at $DIR/const-promotion-extern-static.rs:13:1: 13:56 - } } - - alloc2 (extern static: X) diff --git a/src/test/mir-opt/const_prop/boxes/rustc.main.ConstProp.diff b/src/test/mir-opt/const_prop/boxes/rustc.main.ConstProp.diff index c9b082ea6e30e..16f937f3e7b5e 100644 --- a/src/test/mir-opt/const_prop/boxes/rustc.main.ConstProp.diff +++ b/src/test/mir-opt/const_prop/boxes/rustc.main.ConstProp.diff @@ -35,10 +35,14 @@ // + span: $DIR/boxes.rs:12:25: 12:26 // + literal: Const { ty: i32, val: Value(Scalar(0x00000000)) } StorageDead(_2); // scope 0 at $DIR/boxes.rs:12:25: 12:26 - drop(_3) -> [return: bb1, unwind: bb2]; // scope 0 at $DIR/boxes.rs:12:26: 12:27 + drop(_3) -> [return: bb2, unwind: bb1]; // scope 0 at $DIR/boxes.rs:12:26: 12:27 } - bb1: { + bb1 (cleanup): { + resume; // scope 0 at $DIR/boxes.rs:11:1: 13:2 + } + + bb2: { StorageDead(_3); // scope 0 at $DIR/boxes.rs:12:26: 12:27 _0 = const (); // scope 0 at $DIR/boxes.rs:11:11: 13:2 // ty::Const @@ -50,9 +54,5 @@ StorageDead(_1); // scope 0 at $DIR/boxes.rs:13:1: 13:2 return; // scope 0 at $DIR/boxes.rs:13:2: 13:2 } - - bb2 (cleanup): { - resume; // scope 0 at $DIR/boxes.rs:11:1: 13:2 - } } diff --git a/src/test/mir-opt/generator-drop-cleanup/rustc.main-{{closure}}.generator_drop.0.mir b/src/test/mir-opt/generator-drop-cleanup/rustc.main-{{closure}}.generator_drop.0.mir index 6a32a42d85a82..3e7083ff62ecd 100644 --- a/src/test/mir-opt/generator-drop-cleanup/rustc.main-{{closure}}.generator_drop.0.mir +++ b/src/test/mir-opt/generator-drop-cleanup/rustc.main-{{closure}}.generator_drop.0.mir @@ -21,31 +21,31 @@ fn main::{{closure}}#0(_1: *mut [generator@$DIR/generator-drop-cleanup.rs:10:15: bb0: { _9 = discriminant((*_1)); // scope 0 at $DIR/generator-drop-cleanup.rs:10:15: 13:6 - switchInt(move _9) -> [0u32: bb7, 3u32: bb10, otherwise: bb11]; // scope 0 at $DIR/generator-drop-cleanup.rs:10:15: 13:6 + switchInt(move _9) -> [0u32: bb7, 3u32: bb11, otherwise: bb12]; // scope 0 at $DIR/generator-drop-cleanup.rs:10:15: 13:6 } - bb1: { - StorageDead(_5); // scope 1 at $DIR/generator-drop-cleanup.rs:12:13: 12:14 - StorageDead(_4); // scope 1 at $DIR/generator-drop-cleanup.rs:12:14: 12:15 - drop((((*_1) as variant#3).0: std::string::String)) -> [return: bb2, unwind: bb5]; // scope 0 at $DIR/generator-drop-cleanup.rs:13:5: 13:6 + bb1 (cleanup): { + resume; // scope 0 at $DIR/generator-drop-cleanup.rs:10:15: 13:6 } - bb2: { + bb2 (cleanup): { nop; // scope 0 at $DIR/generator-drop-cleanup.rs:13:5: 13:6 goto -> bb8; // scope 0 at $DIR/generator-drop-cleanup.rs:13:5: 13:6 } bb3: { - return; // scope 0 at $DIR/generator-drop-cleanup.rs:10:15: 13:6 + StorageDead(_5); // scope 1 at $DIR/generator-drop-cleanup.rs:12:13: 12:14 + StorageDead(_4); // scope 1 at $DIR/generator-drop-cleanup.rs:12:14: 12:15 + drop((((*_1) as variant#3).0: std::string::String)) -> [return: bb4, unwind: bb2]; // scope 0 at $DIR/generator-drop-cleanup.rs:13:5: 13:6 } - bb4 (cleanup): { - resume; // scope 0 at $DIR/generator-drop-cleanup.rs:10:15: 13:6 + bb4: { + nop; // scope 0 at $DIR/generator-drop-cleanup.rs:13:5: 13:6 + goto -> bb9; // scope 0 at $DIR/generator-drop-cleanup.rs:13:5: 13:6 } - bb5 (cleanup): { - nop; // scope 0 at $DIR/generator-drop-cleanup.rs:13:5: 13:6 - goto -> bb4; // scope 0 at $DIR/generator-drop-cleanup.rs:13:5: 13:6 + bb5: { + return; // scope 0 at $DIR/generator-drop-cleanup.rs:10:15: 13:6 } bb6: { @@ -53,24 +53,28 @@ fn main::{{closure}}#0(_1: *mut [generator@$DIR/generator-drop-cleanup.rs:10:15: } bb7: { - goto -> bb9; // scope 0 at $DIR/generator-drop-cleanup.rs:10:15: 13:6 + goto -> bb10; // scope 0 at $DIR/generator-drop-cleanup.rs:10:15: 13:6 } - bb8: { - goto -> bb3; // scope 0 at $DIR/generator-drop-cleanup.rs:13:5: 13:6 + bb8 (cleanup): { + goto -> bb1; // scope 0 at $DIR/generator-drop-cleanup.rs:13:5: 13:6 } bb9: { - goto -> bb6; // scope 0 at $DIR/generator-drop-cleanup.rs:10:15: 13:6 + goto -> bb5; // scope 0 at $DIR/generator-drop-cleanup.rs:13:5: 13:6 } bb10: { + goto -> bb6; // scope 0 at $DIR/generator-drop-cleanup.rs:10:15: 13:6 + } + + bb11: { StorageLive(_4); // scope 0 at $DIR/generator-drop-cleanup.rs:10:15: 13:6 StorageLive(_5); // scope 0 at $DIR/generator-drop-cleanup.rs:10:15: 13:6 - goto -> bb1; // scope 0 at $DIR/generator-drop-cleanup.rs:10:15: 13:6 + goto -> bb3; // scope 0 at $DIR/generator-drop-cleanup.rs:10:15: 13:6 } - bb11: { + bb12: { return; // scope 0 at $DIR/generator-drop-cleanup.rs:10:15: 13:6 } } diff --git a/src/test/mir-opt/generator-storage-dead-unwind/rustc.main-{{closure}}.StateTransform.before.mir b/src/test/mir-opt/generator-storage-dead-unwind/rustc.main-{{closure}}.StateTransform.before.mir index e9e977a611b19..06645860d842d 100644 --- a/src/test/mir-opt/generator-storage-dead-unwind/rustc.main-{{closure}}.StateTransform.before.mir +++ b/src/test/mir-opt/generator-storage-dead-unwind/rustc.main-{{closure}}.StateTransform.before.mir @@ -39,16 +39,20 @@ yields () StorageLive(_5); // scope 2 at $DIR/generator-storage-dead-unwind.rs:25:9: 25:14 StorageLive(_6); // scope 2 at $DIR/generator-storage-dead-unwind.rs:25:9: 25:14 _6 = (); // scope 2 at $DIR/generator-storage-dead-unwind.rs:25:9: 25:14 - _5 = yield(move _6) -> [resume: bb1, drop: bb5]; // scope 2 at $DIR/generator-storage-dead-unwind.rs:25:9: 25:14 + _5 = yield(move _6) -> [resume: bb2, drop: bb4]; // scope 2 at $DIR/generator-storage-dead-unwind.rs:25:9: 25:14 } - bb1: { + bb1 (cleanup): { + resume; // scope 0 at $DIR/generator-storage-dead-unwind.rs:22:16: 28:6 + } + + bb2: { StorageDead(_6); // scope 2 at $DIR/generator-storage-dead-unwind.rs:25:13: 25:14 StorageDead(_5); // scope 2 at $DIR/generator-storage-dead-unwind.rs:25:14: 25:15 StorageLive(_7); // scope 2 at $DIR/generator-storage-dead-unwind.rs:26:9: 26:16 StorageLive(_8); // scope 2 at $DIR/generator-storage-dead-unwind.rs:26:14: 26:15 _8 = move _3; // scope 2 at $DIR/generator-storage-dead-unwind.rs:26:14: 26:15 - _7 = const take::(move _8) -> [return: bb2, unwind: bb9]; // scope 2 at $DIR/generator-storage-dead-unwind.rs:26:9: 26:16 + _7 = const take::(move _8) -> [return: bb7, unwind: bb9]; // scope 2 at $DIR/generator-storage-dead-unwind.rs:26:9: 26:16 // ty::Const // + ty: fn(Foo) {take::} // + val: Value(Scalar()) @@ -57,80 +61,76 @@ yields () // + literal: Const { ty: fn(Foo) {take::}, val: Value(Scalar()) } } - bb2: { - StorageDead(_8); // scope 2 at $DIR/generator-storage-dead-unwind.rs:26:15: 26:16 - StorageDead(_7); // scope 2 at $DIR/generator-storage-dead-unwind.rs:26:16: 26:17 - StorageLive(_9); // scope 2 at $DIR/generator-storage-dead-unwind.rs:27:9: 27:16 - StorageLive(_10); // scope 2 at $DIR/generator-storage-dead-unwind.rs:27:14: 27:15 - _10 = move _4; // scope 2 at $DIR/generator-storage-dead-unwind.rs:27:14: 27:15 - _9 = const take::(move _10) -> [return: bb3, unwind: bb8]; // scope 2 at $DIR/generator-storage-dead-unwind.rs:27:9: 27:16 - // ty::Const - // + ty: fn(Bar) {take::} - // + val: Value(Scalar()) - // mir::Constant - // + span: $DIR/generator-storage-dead-unwind.rs:27:9: 27:13 - // + literal: Const { ty: fn(Bar) {take::}, val: Value(Scalar()) } - } - - bb3: { - StorageDead(_10); // scope 2 at $DIR/generator-storage-dead-unwind.rs:27:15: 27:16 - StorageDead(_9); // scope 2 at $DIR/generator-storage-dead-unwind.rs:27:16: 27:17 - _0 = const (); // scope 0 at $DIR/generator-storage-dead-unwind.rs:22:19: 28:6 - // ty::Const - // + ty: () - // + val: Value(Scalar()) - // mir::Constant - // + span: $DIR/generator-storage-dead-unwind.rs:22:19: 28:6 - // + literal: Const { ty: (), val: Value(Scalar()) } - StorageDead(_4); // scope 1 at $DIR/generator-storage-dead-unwind.rs:28:5: 28:6 + bb3 (cleanup): { StorageDead(_3); // scope 0 at $DIR/generator-storage-dead-unwind.rs:28:5: 28:6 - drop(_1) -> [return: bb4, unwind: bb11]; // scope 0 at $DIR/generator-storage-dead-unwind.rs:28:5: 28:6 + drop(_1) -> bb1; // scope 0 at $DIR/generator-storage-dead-unwind.rs:28:5: 28:6 } bb4: { - return; // scope 0 at $DIR/generator-storage-dead-unwind.rs:28:6: 28:6 - } - - bb5: { StorageDead(_6); // scope 2 at $DIR/generator-storage-dead-unwind.rs:25:13: 25:14 StorageDead(_5); // scope 2 at $DIR/generator-storage-dead-unwind.rs:25:14: 25:15 StorageDead(_4); // scope 1 at $DIR/generator-storage-dead-unwind.rs:28:5: 28:6 - drop(_3) -> [return: bb6, unwind: bb12]; // scope 0 at $DIR/generator-storage-dead-unwind.rs:28:5: 28:6 + drop(_3) -> [return: bb5, unwind: bb3]; // scope 0 at $DIR/generator-storage-dead-unwind.rs:28:5: 28:6 } - bb6: { + bb5: { StorageDead(_3); // scope 0 at $DIR/generator-storage-dead-unwind.rs:28:5: 28:6 - drop(_1) -> [return: bb7, unwind: bb11]; // scope 0 at $DIR/generator-storage-dead-unwind.rs:28:5: 28:6 + drop(_1) -> [return: bb6, unwind: bb1]; // scope 0 at $DIR/generator-storage-dead-unwind.rs:28:5: 28:6 } - bb7: { + bb6: { generator_drop; // scope 0 at $DIR/generator-storage-dead-unwind.rs:22:16: 28:6 } + bb7: { + StorageDead(_8); // scope 2 at $DIR/generator-storage-dead-unwind.rs:26:15: 26:16 + StorageDead(_7); // scope 2 at $DIR/generator-storage-dead-unwind.rs:26:16: 26:17 + StorageLive(_9); // scope 2 at $DIR/generator-storage-dead-unwind.rs:27:9: 27:16 + StorageLive(_10); // scope 2 at $DIR/generator-storage-dead-unwind.rs:27:14: 27:15 + _10 = move _4; // scope 2 at $DIR/generator-storage-dead-unwind.rs:27:14: 27:15 + _9 = const take::(move _10) -> [return: bb10, unwind: bb11]; // scope 2 at $DIR/generator-storage-dead-unwind.rs:27:9: 27:16 + // ty::Const + // + ty: fn(Bar) {take::} + // + val: Value(Scalar()) + // mir::Constant + // + span: $DIR/generator-storage-dead-unwind.rs:27:9: 27:13 + // + literal: Const { ty: fn(Bar) {take::}, val: Value(Scalar()) } + } + bb8 (cleanup): { - StorageDead(_10); // scope 2 at $DIR/generator-storage-dead-unwind.rs:27:15: 27:16 - StorageDead(_9); // scope 2 at $DIR/generator-storage-dead-unwind.rs:27:16: 27:17 - goto -> bb10; // scope 2 at $DIR/generator-storage-dead-unwind.rs:1:1: 1:1 + StorageDead(_4); // scope 1 at $DIR/generator-storage-dead-unwind.rs:28:5: 28:6 + StorageDead(_3); // scope 0 at $DIR/generator-storage-dead-unwind.rs:28:5: 28:6 + drop(_1) -> bb1; // scope 0 at $DIR/generator-storage-dead-unwind.rs:28:5: 28:6 } bb9 (cleanup): { StorageDead(_8); // scope 2 at $DIR/generator-storage-dead-unwind.rs:26:15: 26:16 StorageDead(_7); // scope 2 at $DIR/generator-storage-dead-unwind.rs:26:16: 26:17 - goto -> bb10; // scope 2 at $DIR/generator-storage-dead-unwind.rs:1:1: 1:1 + goto -> bb8; // scope 2 at $DIR/generator-storage-dead-unwind.rs:1:1: 1:1 } - bb10 (cleanup): { + bb10: { + StorageDead(_10); // scope 2 at $DIR/generator-storage-dead-unwind.rs:27:15: 27:16 + StorageDead(_9); // scope 2 at $DIR/generator-storage-dead-unwind.rs:27:16: 27:17 + _0 = const (); // scope 0 at $DIR/generator-storage-dead-unwind.rs:22:19: 28:6 + // ty::Const + // + ty: () + // + val: Value(Scalar()) + // mir::Constant + // + span: $DIR/generator-storage-dead-unwind.rs:22:19: 28:6 + // + literal: Const { ty: (), val: Value(Scalar()) } StorageDead(_4); // scope 1 at $DIR/generator-storage-dead-unwind.rs:28:5: 28:6 StorageDead(_3); // scope 0 at $DIR/generator-storage-dead-unwind.rs:28:5: 28:6 - drop(_1) -> bb11; // scope 0 at $DIR/generator-storage-dead-unwind.rs:28:5: 28:6 + drop(_1) -> [return: bb12, unwind: bb1]; // scope 0 at $DIR/generator-storage-dead-unwind.rs:28:5: 28:6 } bb11 (cleanup): { - resume; // scope 0 at $DIR/generator-storage-dead-unwind.rs:22:16: 28:6 + StorageDead(_10); // scope 2 at $DIR/generator-storage-dead-unwind.rs:27:15: 27:16 + StorageDead(_9); // scope 2 at $DIR/generator-storage-dead-unwind.rs:27:16: 27:17 + goto -> bb8; // scope 2 at $DIR/generator-storage-dead-unwind.rs:1:1: 1:1 } - bb12 (cleanup): { - StorageDead(_3); // scope 0 at $DIR/generator-storage-dead-unwind.rs:28:5: 28:6 - drop(_1) -> bb11; // scope 0 at $DIR/generator-storage-dead-unwind.rs:28:5: 28:6 + bb12: { + return; // scope 0 at $DIR/generator-storage-dead-unwind.rs:28:6: 28:6 } } diff --git a/src/test/mir-opt/graphviz/rustc.main.mir_map.0.dot b/src/test/mir-opt/graphviz/rustc.main.mir_map.0.dot index 1a66b53c69bc0..f5d8b84812a3e 100644 --- a/src/test/mir-opt/graphviz/rustc.main.mir_map.0.dot +++ b/src/test/mir-opt/graphviz/rustc.main.mir_map.0.dot @@ -3,5 +3,8 @@ digraph Mir_0_3 { node [fontname="monospace"]; edge [fontname="monospace"]; label=>; - bb0__0_3 [shape="none", label=<
{blk}
{blk}
0
_0 = const ()
return
>]; + bb0__0_3 [shape="none", label=<
0
_0 = const ()
goto
>]; + bb1__0_3 [shape="none", label=<
1
resume
>]; + bb2__0_3 [shape="none", label=<
2
return
>]; + bb0__0_3 -> bb2__0_3 [label=""]; } diff --git a/src/test/mir-opt/inline/inline-into-box-place/32bit/rustc.main.Inline.diff b/src/test/mir-opt/inline/inline-into-box-place/32bit/rustc.main.Inline.diff index f31d5fae9ed37..50913de98b506 100644 --- a/src/test/mir-opt/inline/inline-into-box-place/32bit/rustc.main.Inline.diff +++ b/src/test/mir-opt/inline/inline-into-box-place/32bit/rustc.main.Inline.diff @@ -17,7 +17,7 @@ StorageLive(_1); // scope 0 at $DIR/inline-into-box-place.rs:8:9: 8:11 StorageLive(_2); // scope 0 at $DIR/inline-into-box-place.rs:8:29: 8:43 _2 = Box(std::vec::Vec); // scope 0 at $DIR/inline-into-box-place.rs:8:29: 8:43 -- (*_2) = const std::vec::Vec::::new() -> [return: bb1, unwind: bb4]; // scope 0 at $DIR/inline-into-box-place.rs:8:33: 8:43 +- (*_2) = const std::vec::Vec::::new() -> [return: bb2, unwind: bb4]; // scope 0 at $DIR/inline-into-box-place.rs:8:33: 8:43 + _4 = &mut (*_2); // scope 0 at $DIR/inline-into-box-place.rs:8:33: 8:43 + ((*_4).0: alloc::raw_vec::RawVec) = const alloc::raw_vec::RawVec:: { ptr: std::ptr::Unique:: { pointer: {0x4 as *const u32}, _marker: std::marker::PhantomData:: }, cap: 0usize, alloc: std::alloc::Global }; // scope 2 at $SRC_DIR/liballoc/vec.rs:LL:COL // ty::Const @@ -31,7 +31,11 @@ - // + literal: Const { ty: fn() -> std::vec::Vec {std::vec::Vec::::new}, val: Value(Scalar()) } - } - -- bb1: { +- bb1 (cleanup): { +- resume; // scope 0 at $DIR/inline-into-box-place.rs:7:1: 9:2 +- } +- +- bb2: { + // + span: $SRC_DIR/liballoc/vec.rs:LL:COL + // + user_ty: UserType(0) + // + literal: Const { ty: alloc::raw_vec::RawVec, val: Value(ByRef { alloc: Allocation { bytes: [4, 0, 0, 0, 0, 0, 0, 0], relocations: Relocations(SortedMap { data: [] }), init_mask: InitMask { blocks: [255], len: Size { raw: 8 } }, size: Size { raw: 8 }, align: Align { pow2: 2 }, mutability: Not, extra: () }, offset: Size { raw: 0 } }) } @@ -51,29 +55,28 @@ // mir::Constant // + span: $DIR/inline-into-box-place.rs:7:11: 9:2 // + literal: Const { ty: (), val: Value(Scalar()) } -- drop(_1) -> [return: bb2, unwind: bb3]; // scope 0 at $DIR/inline-into-box-place.rs:9:1: 9:2 -+ drop(_1) -> [return: bb1, unwind: bb2]; // scope 0 at $DIR/inline-into-box-place.rs:9:1: 9:2 +- drop(_1) -> [return: bb3, unwind: bb1]; // scope 0 at $DIR/inline-into-box-place.rs:9:1: 9:2 ++ drop(_1) -> [return: bb2, unwind: bb1]; // scope 0 at $DIR/inline-into-box-place.rs:9:1: 9:2 } -- bb2: { -+ bb1: { - StorageDead(_1); // scope 0 at $DIR/inline-into-box-place.rs:9:1: 9:2 - return; // scope 0 at $DIR/inline-into-box-place.rs:9:2: 9:2 +- bb3: { +- StorageDead(_1); // scope 0 at $DIR/inline-into-box-place.rs:9:1: 9:2 +- return; // scope 0 at $DIR/inline-into-box-place.rs:9:2: 9:2 ++ bb1 (cleanup): { ++ resume; // scope 0 at $DIR/inline-into-box-place.rs:7:1: 9:2 } -- bb3 (cleanup): { -+ bb2 (cleanup): { - resume; // scope 0 at $DIR/inline-into-box-place.rs:7:1: 9:2 -- } -- - bb4 (cleanup): { -- _3 = const alloc::alloc::box_free::>(move (_2.0: std::ptr::Unique>)) -> bb3; // scope 0 at $DIR/inline-into-box-place.rs:8:42: 8:43 +- _3 = const alloc::alloc::box_free::>(move (_2.0: std::ptr::Unique>)) -> bb1; // scope 0 at $DIR/inline-into-box-place.rs:8:42: 8:43 - // ty::Const - // + ty: unsafe fn(std::ptr::Unique>) {alloc::alloc::box_free::>} - // + val: Value(Scalar()) - // mir::Constant - // + span: $DIR/inline-into-box-place.rs:8:42: 8:43 - // + literal: Const { ty: unsafe fn(std::ptr::Unique>) {alloc::alloc::box_free::>}, val: Value(Scalar()) } ++ bb2: { ++ StorageDead(_1); // scope 0 at $DIR/inline-into-box-place.rs:9:1: 9:2 ++ return; // scope 0 at $DIR/inline-into-box-place.rs:9:2: 9:2 } } diff --git a/src/test/mir-opt/inline/inline-into-box-place/64bit/rustc.main.Inline.diff b/src/test/mir-opt/inline/inline-into-box-place/64bit/rustc.main.Inline.diff index 324ec2d7c5bcd..7a1b6460c5bb3 100644 --- a/src/test/mir-opt/inline/inline-into-box-place/64bit/rustc.main.Inline.diff +++ b/src/test/mir-opt/inline/inline-into-box-place/64bit/rustc.main.Inline.diff @@ -17,7 +17,7 @@ StorageLive(_1); // scope 0 at $DIR/inline-into-box-place.rs:8:9: 8:11 StorageLive(_2); // scope 0 at $DIR/inline-into-box-place.rs:8:29: 8:43 _2 = Box(std::vec::Vec); // scope 0 at $DIR/inline-into-box-place.rs:8:29: 8:43 -- (*_2) = const std::vec::Vec::::new() -> [return: bb1, unwind: bb4]; // scope 0 at $DIR/inline-into-box-place.rs:8:33: 8:43 +- (*_2) = const std::vec::Vec::::new() -> [return: bb2, unwind: bb4]; // scope 0 at $DIR/inline-into-box-place.rs:8:33: 8:43 + _4 = &mut (*_2); // scope 0 at $DIR/inline-into-box-place.rs:8:33: 8:43 + ((*_4).0: alloc::raw_vec::RawVec) = const alloc::raw_vec::RawVec:: { ptr: std::ptr::Unique:: { pointer: {0x4 as *const u32}, _marker: std::marker::PhantomData:: }, cap: 0usize, alloc: std::alloc::Global }; // scope 2 at $SRC_DIR/liballoc/vec.rs:LL:COL // ty::Const @@ -31,7 +31,11 @@ - // + literal: Const { ty: fn() -> std::vec::Vec {std::vec::Vec::::new}, val: Value(Scalar()) } - } - -- bb1: { +- bb1 (cleanup): { +- resume; // scope 0 at $DIR/inline-into-box-place.rs:7:1: 9:2 +- } +- +- bb2: { + // + span: $SRC_DIR/liballoc/vec.rs:LL:COL + // + user_ty: UserType(0) + // + literal: Const { ty: alloc::raw_vec::RawVec, val: Value(ByRef { alloc: Allocation { bytes: [4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], relocations: Relocations(SortedMap { data: [] }), init_mask: InitMask { blocks: [65535], len: Size { raw: 16 } }, size: Size { raw: 16 }, align: Align { pow2: 3 }, mutability: Not, extra: () }, offset: Size { raw: 0 } }) } @@ -51,29 +55,28 @@ // mir::Constant // + span: $DIR/inline-into-box-place.rs:7:11: 9:2 // + literal: Const { ty: (), val: Value(Scalar()) } -- drop(_1) -> [return: bb2, unwind: bb3]; // scope 0 at $DIR/inline-into-box-place.rs:9:1: 9:2 -+ drop(_1) -> [return: bb1, unwind: bb2]; // scope 0 at $DIR/inline-into-box-place.rs:9:1: 9:2 +- drop(_1) -> [return: bb3, unwind: bb1]; // scope 0 at $DIR/inline-into-box-place.rs:9:1: 9:2 ++ drop(_1) -> [return: bb2, unwind: bb1]; // scope 0 at $DIR/inline-into-box-place.rs:9:1: 9:2 } -- bb2: { -+ bb1: { - StorageDead(_1); // scope 0 at $DIR/inline-into-box-place.rs:9:1: 9:2 - return; // scope 0 at $DIR/inline-into-box-place.rs:9:2: 9:2 +- bb3: { +- StorageDead(_1); // scope 0 at $DIR/inline-into-box-place.rs:9:1: 9:2 +- return; // scope 0 at $DIR/inline-into-box-place.rs:9:2: 9:2 ++ bb1 (cleanup): { ++ resume; // scope 0 at $DIR/inline-into-box-place.rs:7:1: 9:2 } -- bb3 (cleanup): { -+ bb2 (cleanup): { - resume; // scope 0 at $DIR/inline-into-box-place.rs:7:1: 9:2 -- } -- - bb4 (cleanup): { -- _3 = const alloc::alloc::box_free::>(move (_2.0: std::ptr::Unique>)) -> bb3; // scope 0 at $DIR/inline-into-box-place.rs:8:42: 8:43 +- _3 = const alloc::alloc::box_free::>(move (_2.0: std::ptr::Unique>)) -> bb1; // scope 0 at $DIR/inline-into-box-place.rs:8:42: 8:43 - // ty::Const - // + ty: unsafe fn(std::ptr::Unique>) {alloc::alloc::box_free::>} - // + val: Value(Scalar()) - // mir::Constant - // + span: $DIR/inline-into-box-place.rs:8:42: 8:43 - // + literal: Const { ty: unsafe fn(std::ptr::Unique>) {alloc::alloc::box_free::>}, val: Value(Scalar()) } ++ bb2: { ++ StorageDead(_1); // scope 0 at $DIR/inline-into-box-place.rs:9:1: 9:2 ++ return; // scope 0 at $DIR/inline-into-box-place.rs:9:2: 9:2 } } diff --git a/src/test/mir-opt/issue-38669/rustc.main.SimplifyCfg-initial.after.mir b/src/test/mir-opt/issue-38669/rustc.main.SimplifyCfg-initial.after.mir index 525254b15d496..fa7bd2563ae44 100644 --- a/src/test/mir-opt/issue-38669/rustc.main.SimplifyCfg-initial.after.mir +++ b/src/test/mir-opt/issue-38669/rustc.main.SimplifyCfg-initial.after.mir @@ -21,26 +21,30 @@ fn main() -> () { // + span: $DIR/issue-38669.rs:5:28: 5:33 // + literal: Const { ty: bool, val: Value(Scalar(0x00)) } FakeRead(ForLet, _1); // scope 0 at $DIR/issue-38669.rs:5:9: 5:25 - goto -> bb1; // scope 1 at $DIR/issue-38669.rs:6:5: 11:6 + goto -> bb2; // scope 1 at $DIR/issue-38669.rs:6:5: 11:6 } - bb1: { - falseUnwind -> [real: bb2, cleanup: bb6]; // scope 1 at $DIR/issue-38669.rs:6:5: 11:6 + bb1 (cleanup): { + resume; // scope 0 at $DIR/issue-38669.rs:4:1: 12:2 } bb2: { + falseUnwind -> [real: bb3, cleanup: bb1]; // scope 1 at $DIR/issue-38669.rs:6:5: 11:6 + } + + bb3: { StorageLive(_3); // scope 1 at $DIR/issue-38669.rs:7:9: 9:10 StorageLive(_4); // scope 1 at $DIR/issue-38669.rs:7:12: 7:24 _4 = _1; // scope 1 at $DIR/issue-38669.rs:7:12: 7:24 FakeRead(ForMatchedPlace, _4); // scope 1 at $DIR/issue-38669.rs:7:12: 7:24 - switchInt(_4) -> [false: bb4, otherwise: bb3]; // scope 1 at $DIR/issue-38669.rs:7:9: 9:10 + switchInt(_4) -> [false: bb5, otherwise: bb4]; // scope 1 at $DIR/issue-38669.rs:7:9: 9:10 } - bb3: { - falseEdges -> [real: bb5, imaginary: bb4]; // scope 1 at $DIR/issue-38669.rs:7:9: 9:10 + bb4: { + falseEdges -> [real: bb6, imaginary: bb5]; // scope 1 at $DIR/issue-38669.rs:7:9: 9:10 } - bb4: { + bb5: { _3 = const (); // scope 1 at $DIR/issue-38669.rs:7:9: 9:10 // ty::Const // + ty: () @@ -64,10 +68,10 @@ fn main() -> () { // mir::Constant // + span: $DIR/issue-38669.rs:6:10: 11:6 // + literal: Const { ty: (), val: Value(Scalar()) } - goto -> bb1; // scope 1 at $DIR/issue-38669.rs:6:5: 11:6 + goto -> bb2; // scope 1 at $DIR/issue-38669.rs:6:5: 11:6 } - bb5: { + bb6: { _0 = const (); // scope 1 at $DIR/issue-38669.rs:8:13: 8:18 // ty::Const // + ty: () @@ -80,8 +84,4 @@ fn main() -> () { StorageDead(_1); // scope 0 at $DIR/issue-38669.rs:12:1: 12:2 return; // scope 0 at $DIR/issue-38669.rs:12:2: 12:2 } - - bb6 (cleanup): { - resume; // scope 0 at $DIR/issue-38669.rs:4:1: 12:2 - } } diff --git a/src/test/mir-opt/issue-41110/rustc.main.ElaborateDrops.after.mir b/src/test/mir-opt/issue-41110/rustc.main.ElaborateDrops.after.mir index 3272ca8454e9c..77763f2d3a0d1 100644 --- a/src/test/mir-opt/issue-41110/rustc.main.ElaborateDrops.after.mir +++ b/src/test/mir-opt/issue-41110/rustc.main.ElaborateDrops.after.mir @@ -32,7 +32,7 @@ fn main() -> () { StorageLive(_3); // scope 0 at $DIR/issue-41110.rs:8:21: 8:27 StorageLive(_4); // scope 0 at $DIR/issue-41110.rs:8:21: 8:22 _4 = S; // scope 0 at $DIR/issue-41110.rs:8:21: 8:22 - _3 = const S::id(move _4) -> [return: bb1, unwind: bb4]; // scope 0 at $DIR/issue-41110.rs:8:21: 8:27 + _3 = const S::id(move _4) -> [return: bb2, unwind: bb4]; // scope 0 at $DIR/issue-41110.rs:8:21: 8:27 // ty::Const // + ty: fn(S) -> S {S::id} // + val: Value(Scalar()) @@ -41,7 +41,11 @@ fn main() -> () { // + literal: Const { ty: fn(S) -> S {S::id}, val: Value(Scalar()) } } - bb1: { + bb1 (cleanup): { + resume; // scope 0 at $DIR/issue-41110.rs:7:1: 9:2 + } + + bb2: { StorageDead(_4); // scope 0 at $DIR/issue-41110.rs:8:26: 8:27 _5 = const false; // scope 0 at $DIR/issue-41110.rs:8:13: 8:28 // ty::Const @@ -50,7 +54,7 @@ fn main() -> () { // mir::Constant // + span: $DIR/issue-41110.rs:8:13: 8:28 // + literal: Const { ty: bool, val: Value(Scalar(0x00)) } - _1 = const S::other(move _2, move _3) -> [return: bb2, unwind: bb3]; // scope 0 at $DIR/issue-41110.rs:8:13: 8:28 + _1 = const S::other(move _2, move _3) -> [return: bb6, unwind: bb5]; // scope 0 at $DIR/issue-41110.rs:8:13: 8:28 // ty::Const // + ty: fn(S, S) {S::other} // + val: Value(Scalar()) @@ -59,7 +63,19 @@ fn main() -> () { // + literal: Const { ty: fn(S, S) {S::other}, val: Value(Scalar()) } } - bb2: { + bb3 (cleanup): { + goto -> bb9; // scope 0 at $DIR/issue-41110.rs:8:27: 8:28 + } + + bb4 (cleanup): { + goto -> bb3; // scope 0 at $DIR/issue-41110.rs:8:26: 8:27 + } + + bb5 (cleanup): { + goto -> bb3; // scope 0 at $DIR/issue-41110.rs:8:27: 8:28 + } + + bb6: { StorageDead(_3); // scope 0 at $DIR/issue-41110.rs:8:27: 8:28 _5 = const false; // scope 0 at $DIR/issue-41110.rs:8:27: 8:28 // ty::Const @@ -80,27 +96,22 @@ fn main() -> () { return; // scope 0 at $DIR/issue-41110.rs:9:2: 9:2 } - bb3 (cleanup): { - goto -> bb5; // scope 0 at $DIR/issue-41110.rs:8:27: 8:28 - } - - bb4 (cleanup): { - goto -> bb5; // scope 0 at $DIR/issue-41110.rs:8:26: 8:27 - } - - bb5 (cleanup): { - goto -> bb8; // scope 0 at $DIR/issue-41110.rs:8:27: 8:28 - } - - bb6 (cleanup): { - resume; // scope 0 at $DIR/issue-41110.rs:7:1: 9:2 - } - bb7 (cleanup): { - drop(_2) -> bb6; // scope 0 at $DIR/issue-41110.rs:8:27: 8:28 + drop(_2) -> bb1; // scope 0 at $DIR/issue-41110.rs:8:27: 8:28 } bb8 (cleanup): { - switchInt(_5) -> [false: bb6, otherwise: bb7]; // scope 0 at $DIR/issue-41110.rs:8:27: 8:28 + _5 = const false; // scope 0 at $DIR/issue-41110.rs:8:27: 8:28 + // ty::Const + // + ty: bool + // + val: Value(Scalar(0x00)) + // mir::Constant + // + span: $DIR/issue-41110.rs:8:27: 8:28 + // + literal: Const { ty: bool, val: Value(Scalar(0x00)) } + goto -> bb7; // scope 0 at $DIR/issue-41110.rs:8:27: 8:28 + } + + bb9 (cleanup): { + switchInt(_5) -> [false: bb1, otherwise: bb8]; // scope 0 at $DIR/issue-41110.rs:8:27: 8:28 } } diff --git a/src/test/mir-opt/issue-41110/rustc.test.ElaborateDrops.after.mir b/src/test/mir-opt/issue-41110/rustc.test.ElaborateDrops.after.mir index 99da0398d9639..a99846bd15daf 100644 --- a/src/test/mir-opt/issue-41110/rustc.test.ElaborateDrops.after.mir +++ b/src/test/mir-opt/issue-41110/rustc.test.ElaborateDrops.after.mir @@ -37,7 +37,7 @@ fn test() -> () { StorageLive(_3); // scope 2 at $DIR/issue-41110.rs:17:5: 17:12 StorageLive(_4); // scope 2 at $DIR/issue-41110.rs:17:10: 17:11 _4 = move _2; // scope 2 at $DIR/issue-41110.rs:17:10: 17:11 - _3 = const std::mem::drop::(move _4) -> [return: bb1, unwind: bb7]; // scope 2 at $DIR/issue-41110.rs:17:5: 17:12 + _3 = const std::mem::drop::(move _4) -> [return: bb2, unwind: bb5]; // scope 2 at $DIR/issue-41110.rs:17:5: 17:12 // ty::Const // + ty: fn(S) {std::mem::drop::} // + val: Value(Scalar()) @@ -46,7 +46,11 @@ fn test() -> () { // + literal: Const { ty: fn(S) {std::mem::drop::}, val: Value(Scalar()) } } - bb1: { + bb1 (cleanup): { + resume; // scope 0 at $DIR/issue-41110.rs:14:1: 19:2 + } + + bb2: { StorageDead(_4); // scope 2 at $DIR/issue-41110.rs:17:11: 17:12 StorageDead(_3); // scope 2 at $DIR/issue-41110.rs:17:12: 17:13 StorageLive(_5); // scope 2 at $DIR/issue-41110.rs:18:9: 18:10 @@ -61,11 +65,27 @@ fn test() -> () { goto -> bb12; // scope 2 at $DIR/issue-41110.rs:18:5: 18:6 } - bb2: { - goto -> bb3; // scope 2 at $DIR/issue-41110.rs:18:9: 18:10 + bb3 (cleanup): { + goto -> bb15; // scope 0 at $DIR/issue-41110.rs:19:1: 19:2 + } + + bb4 (cleanup): { + goto -> bb3; // scope 1 at $DIR/issue-41110.rs:19:1: 19:2 + } + + bb5 (cleanup): { + goto -> bb4; // scope 2 at $DIR/issue-41110.rs:17:11: 17:12 + } + + bb6: { + goto -> bb8; // scope 2 at $DIR/issue-41110.rs:18:9: 18:10 + } + + bb7 (cleanup): { + goto -> bb4; // scope 2 at $DIR/issue-41110.rs:18:9: 18:10 } - bb3: { + bb8: { StorageDead(_5); // scope 2 at $DIR/issue-41110.rs:18:9: 18:10 _0 = const (); // scope 0 at $DIR/issue-41110.rs:14:15: 19:2 // ty::Const @@ -74,15 +94,15 @@ fn test() -> () { // mir::Constant // + span: $DIR/issue-41110.rs:14:15: 19:2 // + literal: Const { ty: (), val: Value(Scalar()) } - drop(_2) -> [return: bb4, unwind: bb9]; // scope 1 at $DIR/issue-41110.rs:19:1: 19:2 + drop(_2) -> [return: bb9, unwind: bb3]; // scope 1 at $DIR/issue-41110.rs:19:1: 19:2 } - bb4: { + bb9: { StorageDead(_2); // scope 1 at $DIR/issue-41110.rs:19:1: 19:2 - goto -> bb5; // scope 0 at $DIR/issue-41110.rs:19:1: 19:2 + goto -> bb10; // scope 0 at $DIR/issue-41110.rs:19:1: 19:2 } - bb5: { + bb10: { _6 = const false; // scope 0 at $DIR/issue-41110.rs:19:1: 19:2 // ty::Const // + ty: bool @@ -94,41 +114,32 @@ fn test() -> () { return; // scope 0 at $DIR/issue-41110.rs:19:2: 19:2 } - bb6 (cleanup): { - goto -> bb8; // scope 2 at $DIR/issue-41110.rs:18:9: 18:10 - } - - bb7 (cleanup): { - goto -> bb8; // scope 2 at $DIR/issue-41110.rs:17:11: 17:12 - } - - bb8 (cleanup): { - goto -> bb9; // scope 1 at $DIR/issue-41110.rs:19:1: 19:2 - } - - bb9 (cleanup): { - goto -> bb14; // scope 0 at $DIR/issue-41110.rs:19:1: 19:2 - } - - bb10 (cleanup): { - resume; // scope 0 at $DIR/issue-41110.rs:14:1: 19:2 - } - bb11 (cleanup): { _2 = move _5; // scope 2 at $DIR/issue-41110.rs:18:5: 18:6 - goto -> bb6; // scope 2 at $DIR/issue-41110.rs:18:5: 18:6 + goto -> bb7; // scope 2 at $DIR/issue-41110.rs:18:5: 18:6 } bb12: { _2 = move _5; // scope 2 at $DIR/issue-41110.rs:18:5: 18:6 - goto -> bb2; // scope 2 at $DIR/issue-41110.rs:18:5: 18:6 + goto -> bb6; // scope 2 at $DIR/issue-41110.rs:18:5: 18:6 } bb13 (cleanup): { - drop(_1) -> bb10; // scope 0 at $DIR/issue-41110.rs:19:1: 19:2 + drop(_1) -> bb1; // scope 0 at $DIR/issue-41110.rs:19:1: 19:2 } bb14 (cleanup): { - switchInt(_6) -> [false: bb10, otherwise: bb13]; // scope 0 at $DIR/issue-41110.rs:19:1: 19:2 + _6 = const false; // scope 0 at $DIR/issue-41110.rs:19:1: 19:2 + // ty::Const + // + ty: bool + // + val: Value(Scalar(0x00)) + // mir::Constant + // + span: $DIR/issue-41110.rs:19:1: 19:2 + // + literal: Const { ty: bool, val: Value(Scalar(0x00)) } + goto -> bb13; // scope 0 at $DIR/issue-41110.rs:19:1: 19:2 + } + + bb15 (cleanup): { + switchInt(_6) -> [false: bb1, otherwise: bb14]; // scope 0 at $DIR/issue-41110.rs:19:1: 19:2 } } diff --git a/src/test/mir-opt/issue-41697/32bit/rustc.{{impl}}-{{constant}}.SimplifyCfg-qualify-consts.after.mir b/src/test/mir-opt/issue-41697/32bit/rustc.{{impl}}-{{constant}}.SimplifyCfg-qualify-consts.after.mir index 0588ec9b4ceef..d263b2515f17a 100644 --- a/src/test/mir-opt/issue-41697/32bit/rustc.{{impl}}-{{constant}}.SimplifyCfg-qualify-consts.after.mir +++ b/src/test/mir-opt/issue-41697/32bit/rustc.{{impl}}-{{constant}}.SimplifyCfg-qualify-consts.after.mir @@ -18,15 +18,15 @@ // mir::Constant // + span: $DIR/issue-41697.rs:18:21: 18:22 // + literal: Const { ty: usize, val: Value(Scalar(0x00000001)) } - assert(!move (_1.1: bool), "attempt to add with overflow") -> [success: bb1, unwind: bb2]; // scope 0 at $DIR/issue-41697.rs:18:19: 18:22 + assert(!move (_1.1: bool), "attempt to add with overflow") -> [success: bb2, unwind: bb1]; // scope 0 at $DIR/issue-41697.rs:18:19: 18:22 } - bb1: { - _0 = move (_1.0: usize); // scope 0 at $DIR/issue-41697.rs:18:19: 18:22 - return; // scope 0 at $DIR/issue-41697.rs:18:19: 18:22 + bb1 (cleanup): { + resume; // scope 0 at $DIR/issue-41697.rs:18:19: 18:22 } - bb2 (cleanup): { - resume; // scope 0 at $DIR/issue-41697.rs:18:19: 18:22 + bb2: { + _0 = move (_1.0: usize); // scope 0 at $DIR/issue-41697.rs:18:19: 18:22 + return; // scope 0 at $DIR/issue-41697.rs:18:19: 18:22 } } diff --git a/src/test/mir-opt/issue-41697/64bit/rustc.{{impl}}-{{constant}}.SimplifyCfg-qualify-consts.after.mir b/src/test/mir-opt/issue-41697/64bit/rustc.{{impl}}-{{constant}}.SimplifyCfg-qualify-consts.after.mir index 4040403ffa059..6c00f49fb75b1 100644 --- a/src/test/mir-opt/issue-41697/64bit/rustc.{{impl}}-{{constant}}.SimplifyCfg-qualify-consts.after.mir +++ b/src/test/mir-opt/issue-41697/64bit/rustc.{{impl}}-{{constant}}.SimplifyCfg-qualify-consts.after.mir @@ -18,15 +18,15 @@ // mir::Constant // + span: $DIR/issue-41697.rs:18:21: 18:22 // + literal: Const { ty: usize, val: Value(Scalar(0x0000000000000001)) } - assert(!move (_1.1: bool), "attempt to add with overflow") -> [success: bb1, unwind: bb2]; // scope 0 at $DIR/issue-41697.rs:18:19: 18:22 + assert(!move (_1.1: bool), "attempt to add with overflow") -> [success: bb2, unwind: bb1]; // scope 0 at $DIR/issue-41697.rs:18:19: 18:22 } - bb1: { - _0 = move (_1.0: usize); // scope 0 at $DIR/issue-41697.rs:18:19: 18:22 - return; // scope 0 at $DIR/issue-41697.rs:18:19: 18:22 + bb1 (cleanup): { + resume; // scope 0 at $DIR/issue-41697.rs:18:19: 18:22 } - bb2 (cleanup): { - resume; // scope 0 at $DIR/issue-41697.rs:18:19: 18:22 + bb2: { + _0 = move (_1.0: usize); // scope 0 at $DIR/issue-41697.rs:18:19: 18:22 + return; // scope 0 at $DIR/issue-41697.rs:18:19: 18:22 } } diff --git a/src/test/mir-opt/issue-41888/rustc.main.ElaborateDrops.after.mir b/src/test/mir-opt/issue-41888/rustc.main.ElaborateDrops.after.mir index b64dd5c3776cf..ce940273c3ef5 100644 --- a/src/test/mir-opt/issue-41888/rustc.main.ElaborateDrops.after.mir +++ b/src/test/mir-opt/issue-41888/rustc.main.ElaborateDrops.after.mir @@ -44,7 +44,7 @@ fn main() -> () { // + literal: Const { ty: bool, val: Value(Scalar(0x00)) } StorageLive(_1); // scope 0 at $DIR/issue-41888.rs:7:9: 7:10 StorageLive(_2); // scope 1 at $DIR/issue-41888.rs:8:8: 8:14 - _2 = const cond() -> [return: bb1, unwind: bb11]; // scope 1 at $DIR/issue-41888.rs:8:8: 8:14 + _2 = const cond() -> [return: bb2, unwind: bb3]; // scope 1 at $DIR/issue-41888.rs:8:8: 8:14 // ty::Const // + ty: fn() -> bool {cond} // + val: Value(Scalar()) @@ -53,11 +53,19 @@ fn main() -> () { // + literal: Const { ty: fn() -> bool {cond}, val: Value(Scalar()) } } - bb1: { - switchInt(_2) -> [false: bb2, otherwise: bb3]; // scope 1 at $DIR/issue-41888.rs:8:5: 14:6 + bb1 (cleanup): { + resume; // scope 0 at $DIR/issue-41888.rs:6:1: 15:2 } bb2: { + switchInt(_2) -> [false: bb4, otherwise: bb5]; // scope 1 at $DIR/issue-41888.rs:8:5: 14:6 + } + + bb3 (cleanup): { + goto -> bb1; // scope 0 at $DIR/issue-41888.rs:15:1: 15:2 + } + + bb4: { _0 = const (); // scope 1 at $DIR/issue-41888.rs:8:5: 14:6 // ty::Const // + ty: () @@ -65,10 +73,10 @@ fn main() -> () { // mir::Constant // + span: $DIR/issue-41888.rs:8:5: 14:6 // + literal: Const { ty: (), val: Value(Scalar()) } - goto -> bb8; // scope 1 at $DIR/issue-41888.rs:8:5: 14:6 + goto -> bb11; // scope 1 at $DIR/issue-41888.rs:8:5: 14:6 } - bb3: { + bb5: { StorageLive(_3); // scope 1 at $DIR/issue-41888.rs:9:13: 9:20 StorageLive(_4); // scope 1 at $DIR/issue-41888.rs:9:18: 9:19 _4 = K; // scope 1 at $DIR/issue-41888.rs:9:18: 9:19 @@ -77,17 +85,21 @@ fn main() -> () { goto -> bb14; // scope 1 at $DIR/issue-41888.rs:9:9: 9:10 } - bb4: { - goto -> bb5; // scope 1 at $DIR/issue-41888.rs:9:19: 9:20 + bb6: { + goto -> bb8; // scope 1 at $DIR/issue-41888.rs:9:19: 9:20 } - bb5: { + bb7 (cleanup): { + goto -> bb3; // scope 1 at $DIR/issue-41888.rs:9:19: 9:20 + } + + bb8: { StorageDead(_3); // scope 1 at $DIR/issue-41888.rs:9:19: 9:20 _5 = discriminant(_1); // scope 1 at $DIR/issue-41888.rs:10:16: 10:24 - switchInt(move _5) -> [0isize: bb7, otherwise: bb6]; // scope 1 at $DIR/issue-41888.rs:10:16: 10:24 + switchInt(move _5) -> [0isize: bb10, otherwise: bb9]; // scope 1 at $DIR/issue-41888.rs:10:16: 10:24 } - bb6: { + bb9: { _0 = const (); // scope 1 at $DIR/issue-41888.rs:10:9: 13:10 // ty::Const // + ty: () @@ -95,10 +107,10 @@ fn main() -> () { // mir::Constant // + span: $DIR/issue-41888.rs:10:9: 13:10 // + literal: Const { ty: (), val: Value(Scalar()) } - goto -> bb8; // scope 1 at $DIR/issue-41888.rs:10:9: 13:10 + goto -> bb11; // scope 1 at $DIR/issue-41888.rs:10:9: 13:10 } - bb7: { + bb10: { StorageLive(_6); // scope 1 at $DIR/issue-41888.rs:10:21: 10:23 _9 = const false; // scope 1 at $DIR/issue-41888.rs:10:21: 10:23 // ty::Const @@ -116,14 +128,14 @@ fn main() -> () { // + span: $DIR/issue-41888.rs:10:29: 13:10 // + literal: Const { ty: (), val: Value(Scalar()) } StorageDead(_6); // scope 1 at $DIR/issue-41888.rs:13:9: 13:10 - goto -> bb8; // scope 1 at $DIR/issue-41888.rs:10:9: 13:10 + goto -> bb11; // scope 1 at $DIR/issue-41888.rs:10:9: 13:10 } - bb8: { - goto -> bb20; // scope 0 at $DIR/issue-41888.rs:15:1: 15:2 + bb11: { + goto -> bb21; // scope 0 at $DIR/issue-41888.rs:15:1: 15:2 } - bb9: { + bb12: { _7 = const false; // scope 0 at $DIR/issue-41888.rs:15:1: 15:2 // ty::Const // + ty: bool @@ -150,18 +162,6 @@ fn main() -> () { return; // scope 0 at $DIR/issue-41888.rs:15:2: 15:2 } - bb10 (cleanup): { - goto -> bb11; // scope 1 at $DIR/issue-41888.rs:9:19: 9:20 - } - - bb11 (cleanup): { - goto -> bb12; // scope 0 at $DIR/issue-41888.rs:15:1: 15:2 - } - - bb12 (cleanup): { - resume; // scope 0 at $DIR/issue-41888.rs:6:1: 15:2 - } - bb13 (cleanup): { _7 = const true; // scope 1 at $DIR/issue-41888.rs:9:9: 9:10 // ty::Const @@ -185,7 +185,7 @@ fn main() -> () { // + span: $DIR/issue-41888.rs:9:9: 9:10 // + literal: Const { ty: bool, val: Value(Scalar(0x01)) } _1 = move _3; // scope 1 at $DIR/issue-41888.rs:9:9: 9:10 - goto -> bb10; // scope 1 at $DIR/issue-41888.rs:9:9: 9:10 + goto -> bb7; // scope 1 at $DIR/issue-41888.rs:9:9: 9:10 } bb14: { @@ -211,7 +211,7 @@ fn main() -> () { // + span: $DIR/issue-41888.rs:9:9: 9:10 // + literal: Const { ty: bool, val: Value(Scalar(0x01)) } _1 = move _3; // scope 1 at $DIR/issue-41888.rs:9:9: 9:10 - goto -> bb4; // scope 1 at $DIR/issue-41888.rs:9:9: 9:10 + goto -> bb6; // scope 1 at $DIR/issue-41888.rs:9:9: 9:10 } bb15: { @@ -222,36 +222,47 @@ fn main() -> () { // mir::Constant // + span: $DIR/issue-41888.rs:15:1: 15:2 // + literal: Const { ty: bool, val: Value(Scalar(0x00)) } - goto -> bb9; // scope 0 at $DIR/issue-41888.rs:15:1: 15:2 + goto -> bb12; // scope 0 at $DIR/issue-41888.rs:15:1: 15:2 } bb16 (cleanup): { - goto -> bb12; // scope 0 at $DIR/issue-41888.rs:15:1: 15:2 + _7 = const false; // scope 0 at $DIR/issue-41888.rs:15:1: 15:2 + // ty::Const + // + ty: bool + // + val: Value(Scalar(0x00)) + // mir::Constant + // + span: $DIR/issue-41888.rs:15:1: 15:2 + // + literal: Const { ty: bool, val: Value(Scalar(0x00)) } + goto -> bb1; // scope 0 at $DIR/issue-41888.rs:15:1: 15:2 } - bb17: { - drop(_1) -> [return: bb15, unwind: bb12]; // scope 0 at $DIR/issue-41888.rs:15:1: 15:2 + bb17 (cleanup): { + goto -> bb16; // scope 0 at $DIR/issue-41888.rs:15:1: 15:2 } - bb18 (cleanup): { - drop(_1) -> bb12; // scope 0 at $DIR/issue-41888.rs:15:1: 15:2 + bb18: { + drop(_1) -> [return: bb15, unwind: bb16]; // scope 0 at $DIR/issue-41888.rs:15:1: 15:2 } - bb19: { - _10 = discriminant(_1); // scope 0 at $DIR/issue-41888.rs:15:1: 15:2 - switchInt(move _10) -> [0isize: bb15, otherwise: bb17]; // scope 0 at $DIR/issue-41888.rs:15:1: 15:2 + bb19 (cleanup): { + drop(_1) -> bb16; // scope 0 at $DIR/issue-41888.rs:15:1: 15:2 } bb20: { - switchInt(_7) -> [false: bb15, otherwise: bb19]; // scope 0 at $DIR/issue-41888.rs:15:1: 15:2 + _10 = discriminant(_1); // scope 0 at $DIR/issue-41888.rs:15:1: 15:2 + switchInt(move _10) -> [0isize: bb15, otherwise: bb18]; // scope 0 at $DIR/issue-41888.rs:15:1: 15:2 } - bb21 (cleanup): { - _11 = discriminant(_1); // scope 0 at $DIR/issue-41888.rs:15:1: 15:2 - switchInt(move _11) -> [0isize: bb16, otherwise: bb18]; // scope 0 at $DIR/issue-41888.rs:15:1: 15:2 + bb21: { + switchInt(_7) -> [false: bb15, otherwise: bb20]; // scope 0 at $DIR/issue-41888.rs:15:1: 15:2 } bb22 (cleanup): { - switchInt(_7) -> [false: bb12, otherwise: bb21]; // scope 0 at $DIR/issue-41888.rs:15:1: 15:2 + _11 = discriminant(_1); // scope 0 at $DIR/issue-41888.rs:15:1: 15:2 + switchInt(move _11) -> [0isize: bb17, otherwise: bb19]; // scope 0 at $DIR/issue-41888.rs:15:1: 15:2 + } + + bb23 (cleanup): { + switchInt(_7) -> [false: bb16, otherwise: bb22]; // scope 0 at $DIR/issue-41888.rs:15:1: 15:2 } } diff --git a/src/test/mir-opt/issue-49232/rustc.main.mir_map.0.mir b/src/test/mir-opt/issue-49232/rustc.main.mir_map.0.mir index 75c1ca5af92b0..abf33cce133dd 100644 --- a/src/test/mir-opt/issue-49232/rustc.main.mir_map.0.mir +++ b/src/test/mir-opt/issue-49232/rustc.main.mir_map.0.mir @@ -17,10 +17,14 @@ fn main() -> () { } bb1: { - falseUnwind -> [real: bb2, cleanup: bb11]; // scope 0 at $DIR/issue-49232.rs:6:5: 14:6 + falseUnwind -> [real: bb3, cleanup: bb4]; // scope 0 at $DIR/issue-49232.rs:6:5: 14:6 } bb2: { + goto -> bb14; // scope 0 at $DIR/issue-49232.rs:15:2: 15:2 + } + + bb3: { StorageLive(_2); // scope 0 at $DIR/issue-49232.rs:7:13: 7:19 StorageLive(_3); // scope 0 at $DIR/issue-49232.rs:8:19: 8:23 _3 = const true; // scope 0 at $DIR/issue-49232.rs:8:19: 8:23 @@ -31,14 +35,18 @@ fn main() -> () { // + span: $DIR/issue-49232.rs:8:19: 8:23 // + literal: Const { ty: bool, val: Value(Scalar(0x01)) } FakeRead(ForMatchedPlace, _3); // scope 0 at $DIR/issue-49232.rs:8:19: 8:23 - switchInt(_3) -> [false: bb3, otherwise: bb4]; // scope 0 at $DIR/issue-49232.rs:9:17: 9:22 + switchInt(_3) -> [false: bb5, otherwise: bb6]; // scope 0 at $DIR/issue-49232.rs:9:17: 9:22 } - bb3: { - falseEdges -> [real: bb5, imaginary: bb4]; // scope 0 at $DIR/issue-49232.rs:9:17: 9:22 + bb4 (cleanup): { + resume; // scope 0 at $DIR/issue-49232.rs:5:1: 15:2 + } + + bb5: { + falseEdges -> [real: bb7, imaginary: bb6]; // scope 0 at $DIR/issue-49232.rs:9:17: 9:22 } - bb4: { + bb6: { _0 = const (); // scope 0 at $DIR/issue-49232.rs:10:25: 10:30 // ty::Const // + ty: () @@ -46,10 +54,10 @@ fn main() -> () { // mir::Constant // + span: $DIR/issue-49232.rs:10:25: 10:30 // + literal: Const { ty: (), val: Value(Scalar()) } - goto -> bb10; // scope 0 at $DIR/issue-49232.rs:10:25: 10:30 + goto -> bb8; // scope 0 at $DIR/issue-49232.rs:10:25: 10:30 } - bb5: { + bb7: { _2 = const 4i32; // scope 0 at $DIR/issue-49232.rs:9:26: 9:27 // ty::Const // + ty: i32 @@ -57,10 +65,20 @@ fn main() -> () { // mir::Constant // + span: $DIR/issue-49232.rs:9:26: 9:27 // + literal: Const { ty: i32, val: Value(Scalar(0x00000004)) } - goto -> bb8; // scope 0 at $DIR/issue-49232.rs:8:13: 11:14 + goto -> bb12; // scope 0 at $DIR/issue-49232.rs:8:13: 11:14 } - bb6: { + bb8: { + StorageDead(_3); // scope 0 at $DIR/issue-49232.rs:12:10: 12:11 + goto -> bb9; // scope 0 at $DIR/issue-49232.rs:10:25: 10:30 + } + + bb9: { + StorageDead(_2); // scope 0 at $DIR/issue-49232.rs:14:5: 14:6 + goto -> bb2; // scope 0 at $DIR/issue-49232.rs:10:25: 10:30 + } + + bb10: { _4 = const (); // scope 0 at $DIR/issue-49232.rs:10:25: 10:30 // ty::Const // + ty: () @@ -71,17 +89,17 @@ fn main() -> () { unreachable; // scope 0 at $DIR/issue-49232.rs:10:25: 10:30 } - bb7: { - goto -> bb8; // scope 0 at $DIR/issue-49232.rs:8:13: 11:14 + bb11: { + goto -> bb12; // scope 0 at $DIR/issue-49232.rs:8:13: 11:14 } - bb8: { + bb12: { FakeRead(ForLet, _2); // scope 0 at $DIR/issue-49232.rs:7:13: 7:19 StorageDead(_3); // scope 0 at $DIR/issue-49232.rs:12:10: 12:11 StorageLive(_5); // scope 1 at $DIR/issue-49232.rs:13:9: 13:22 StorageLive(_6); // scope 1 at $DIR/issue-49232.rs:13:14: 13:21 _6 = &_2; // scope 1 at $DIR/issue-49232.rs:13:14: 13:21 - _5 = const std::mem::drop::<&i32>(move _6) -> [return: bb9, unwind: bb11]; // scope 1 at $DIR/issue-49232.rs:13:9: 13:22 + _5 = const std::mem::drop::<&i32>(move _6) -> [return: bb13, unwind: bb4]; // scope 1 at $DIR/issue-49232.rs:13:9: 13:22 // ty::Const // + ty: fn(&i32) {std::mem::drop::<&i32>} // + val: Value(Scalar()) @@ -90,7 +108,7 @@ fn main() -> () { // + literal: Const { ty: fn(&i32) {std::mem::drop::<&i32>}, val: Value(Scalar()) } } - bb9: { + bb13: { StorageDead(_6); // scope 1 at $DIR/issue-49232.rs:13:21: 13:22 StorageDead(_5); // scope 1 at $DIR/issue-49232.rs:13:22: 13:23 _1 = const (); // scope 0 at $DIR/issue-49232.rs:6:10: 14:6 @@ -104,13 +122,7 @@ fn main() -> () { goto -> bb1; // scope 0 at $DIR/issue-49232.rs:6:5: 14:6 } - bb10: { - StorageDead(_3); // scope 0 at $DIR/issue-49232.rs:12:10: 12:11 - StorageDead(_2); // scope 0 at $DIR/issue-49232.rs:14:5: 14:6 + bb14: { return; // scope 0 at $DIR/issue-49232.rs:15:2: 15:2 } - - bb11 (cleanup): { - resume; // scope 0 at $DIR/issue-49232.rs:5:1: 15:2 - } } diff --git a/src/test/mir-opt/issue-62289/rustc.test.ElaborateDrops.before.mir b/src/test/mir-opt/issue-62289/rustc.test.ElaborateDrops.before.mir index f59d157d2971a..0b8b03961f2a0 100644 --- a/src/test/mir-opt/issue-62289/rustc.test.ElaborateDrops.before.mir +++ b/src/test/mir-opt/issue-62289/rustc.test.ElaborateDrops.before.mir @@ -30,7 +30,7 @@ fn test() -> std::option::Option> { StorageLive(_3); // scope 0 at $DIR/issue-62289.rs:9:15: 9:20 StorageLive(_4); // scope 0 at $DIR/issue-62289.rs:9:15: 9:19 _4 = std::option::Option::::None; // scope 0 at $DIR/issue-62289.rs:9:15: 9:19 - _3 = const as std::ops::Try>::into_result(move _4) -> [return: bb1, unwind: bb12]; // scope 0 at $DIR/issue-62289.rs:9:15: 9:20 + _3 = const as std::ops::Try>::into_result(move _4) -> [return: bb2, unwind: bb3]; // scope 0 at $DIR/issue-62289.rs:9:15: 9:20 // ty::Const // + ty: fn(std::option::Option) -> std::result::Result< as std::ops::Try>::Ok, as std::ops::Try>::Error> { as std::ops::Try>::into_result} // + val: Value(Scalar()) @@ -39,32 +39,40 @@ fn test() -> std::option::Option> { // + literal: Const { ty: fn(std::option::Option) -> std::result::Result< as std::ops::Try>::Ok, as std::ops::Try>::Error> { as std::ops::Try>::into_result}, val: Value(Scalar()) } } - bb1: { + bb1 (cleanup): { + resume; // scope 0 at $DIR/issue-62289.rs:8:1: 10:2 + } + + bb2: { StorageDead(_4); // scope 0 at $DIR/issue-62289.rs:9:19: 9:20 _5 = discriminant(_3); // scope 0 at $DIR/issue-62289.rs:9:19: 9:20 - switchInt(move _5) -> [0isize: bb2, 1isize: bb4, otherwise: bb3]; // scope 0 at $DIR/issue-62289.rs:9:19: 9:20 + switchInt(move _5) -> [0isize: bb4, 1isize: bb6, otherwise: bb5]; // scope 0 at $DIR/issue-62289.rs:9:19: 9:20 } - bb2: { + bb3 (cleanup): { + drop(_2) -> bb1; // scope 0 at $DIR/issue-62289.rs:9:20: 9:21 + } + + bb4: { StorageLive(_10); // scope 0 at $DIR/issue-62289.rs:9:15: 9:20 _10 = ((_3 as Ok).0: u32); // scope 0 at $DIR/issue-62289.rs:9:15: 9:20 (*_2) = _10; // scope 4 at $DIR/issue-62289.rs:9:15: 9:20 StorageDead(_10); // scope 0 at $DIR/issue-62289.rs:9:19: 9:20 _1 = move _2; // scope 0 at $DIR/issue-62289.rs:9:10: 9:21 - drop(_2) -> [return: bb7, unwind: bb11]; // scope 0 at $DIR/issue-62289.rs:9:20: 9:21 + drop(_2) -> [return: bb12, unwind: bb11]; // scope 0 at $DIR/issue-62289.rs:9:20: 9:21 } - bb3: { + bb5: { unreachable; // scope 0 at $DIR/issue-62289.rs:9:15: 9:20 } - bb4: { + bb6: { StorageLive(_6); // scope 0 at $DIR/issue-62289.rs:9:19: 9:20 _6 = ((_3 as Err).0: std::option::NoneError); // scope 0 at $DIR/issue-62289.rs:9:19: 9:20 StorageLive(_8); // scope 2 at $DIR/issue-62289.rs:9:19: 9:20 StorageLive(_9); // scope 2 at $DIR/issue-62289.rs:9:19: 9:20 _9 = _6; // scope 2 at $DIR/issue-62289.rs:9:19: 9:20 - _8 = const >::from(move _9) -> [return: bb5, unwind: bb12]; // scope 2 at $DIR/issue-62289.rs:9:19: 9:20 + _8 = const >::from(move _9) -> [return: bb8, unwind: bb3]; // scope 2 at $DIR/issue-62289.rs:9:19: 9:20 // ty::Const // + ty: fn(std::option::NoneError) -> std::option::NoneError {>::from} // + val: Value(Scalar()) @@ -73,9 +81,13 @@ fn test() -> std::option::Option> { // + literal: Const { ty: fn(std::option::NoneError) -> std::option::NoneError {>::from}, val: Value(Scalar()) } } - bb5: { + bb7: { + return; // scope 0 at $DIR/issue-62289.rs:10:2: 10:2 + } + + bb8: { StorageDead(_9); // scope 2 at $DIR/issue-62289.rs:9:19: 9:20 - _0 = const > as std::ops::Try>::from_error(move _8) -> [return: bb6, unwind: bb12]; // scope 2 at $DIR/issue-62289.rs:9:19: 9:20 + _0 = const > as std::ops::Try>::from_error(move _8) -> [return: bb9, unwind: bb3]; // scope 2 at $DIR/issue-62289.rs:9:19: 9:20 // ty::Const // + ty: fn(> as std::ops::Try>::Error) -> std::option::Option> {> as std::ops::Try>::from_error} // + val: Value(Scalar()) @@ -84,44 +96,32 @@ fn test() -> std::option::Option> { // + literal: Const { ty: fn(> as std::ops::Try>::Error) -> std::option::Option> {> as std::ops::Try>::from_error}, val: Value(Scalar()) } } - bb6: { + bb9: { StorageDead(_8); // scope 2 at $DIR/issue-62289.rs:9:19: 9:20 StorageDead(_6); // scope 0 at $DIR/issue-62289.rs:9:19: 9:20 - drop(_2) -> bb9; // scope 0 at $DIR/issue-62289.rs:9:20: 9:21 - } - - bb7: { - StorageDead(_2); // scope 0 at $DIR/issue-62289.rs:9:20: 9:21 - _0 = std::option::Option::>::Some(move _1); // scope 0 at $DIR/issue-62289.rs:9:5: 9:22 - drop(_1) -> bb8; // scope 0 at $DIR/issue-62289.rs:9:21: 9:22 + drop(_2) -> bb10; // scope 0 at $DIR/issue-62289.rs:9:20: 9:21 } - bb8: { - StorageDead(_1); // scope 0 at $DIR/issue-62289.rs:9:21: 9:22 - StorageDead(_3); // scope 0 at $DIR/issue-62289.rs:10:1: 10:2 - goto -> bb10; // scope 0 at $DIR/issue-62289.rs:10:2: 10:2 - } - - bb9: { + bb10: { StorageDead(_2); // scope 0 at $DIR/issue-62289.rs:9:20: 9:21 StorageDead(_1); // scope 0 at $DIR/issue-62289.rs:9:21: 9:22 StorageDead(_3); // scope 0 at $DIR/issue-62289.rs:10:1: 10:2 - goto -> bb10; // scope 0 at $DIR/issue-62289.rs:10:2: 10:2 - } - - bb10: { - return; // scope 0 at $DIR/issue-62289.rs:10:2: 10:2 + goto -> bb7; // scope 0 at $DIR/issue-62289.rs:9:19: 9:20 } bb11 (cleanup): { - drop(_1) -> bb13; // scope 0 at $DIR/issue-62289.rs:9:21: 9:22 + drop(_1) -> bb1; // scope 0 at $DIR/issue-62289.rs:9:21: 9:22 } - bb12 (cleanup): { - drop(_2) -> bb13; // scope 0 at $DIR/issue-62289.rs:9:20: 9:21 + bb12: { + StorageDead(_2); // scope 0 at $DIR/issue-62289.rs:9:20: 9:21 + _0 = std::option::Option::>::Some(move _1); // scope 0 at $DIR/issue-62289.rs:9:5: 9:22 + drop(_1) -> bb13; // scope 0 at $DIR/issue-62289.rs:9:21: 9:22 } - bb13 (cleanup): { - resume; // scope 0 at $DIR/issue-62289.rs:8:1: 10:2 + bb13: { + StorageDead(_1); // scope 0 at $DIR/issue-62289.rs:9:21: 9:22 + StorageDead(_3); // scope 0 at $DIR/issue-62289.rs:10:1: 10:2 + goto -> bb7; // scope 0 at $DIR/issue-62289.rs:10:2: 10:2 } } diff --git a/src/test/mir-opt/loop_test/rustc.main.SimplifyCfg-qualify-consts.after.mir b/src/test/mir-opt/loop_test/rustc.main.SimplifyCfg-qualify-consts.after.mir index 33c59bd3f1478..38e04a6399fc2 100644 --- a/src/test/mir-opt/loop_test/rustc.main.SimplifyCfg-qualify-consts.after.mir +++ b/src/test/mir-opt/loop_test/rustc.main.SimplifyCfg-qualify-consts.after.mir @@ -23,14 +23,18 @@ fn main() -> () { // + span: $DIR/loop_test.rs:10:8: 10:12 // + literal: Const { ty: bool, val: Value(Scalar(0x01)) } FakeRead(ForMatchedPlace, _2); // scope 0 at $DIR/loop_test.rs:10:8: 10:12 - switchInt(_2) -> [false: bb2, otherwise: bb1]; // scope 0 at $DIR/loop_test.rs:10:5: 12:6 + switchInt(_2) -> [false: bb3, otherwise: bb2]; // scope 0 at $DIR/loop_test.rs:10:5: 12:6 } - bb1: { - falseEdges -> [real: bb3, imaginary: bb2]; // scope 0 at $DIR/loop_test.rs:10:5: 12:6 + bb1 (cleanup): { + resume; // scope 0 at $DIR/loop_test.rs:6:1: 17:2 } bb2: { + falseEdges -> [real: bb4, imaginary: bb3]; // scope 0 at $DIR/loop_test.rs:10:5: 12:6 + } + + bb3: { _1 = const (); // scope 0 at $DIR/loop_test.rs:10:5: 12:6 // ty::Const // + ty: () @@ -41,10 +45,10 @@ fn main() -> () { StorageDead(_2); // scope 0 at $DIR/loop_test.rs:12:5: 12:6 StorageDead(_1); // scope 0 at $DIR/loop_test.rs:12:5: 12:6 StorageLive(_4); // scope 0 at $DIR/loop_test.rs:13:5: 16:6 - goto -> bb4; // scope 0 at $DIR/loop_test.rs:13:5: 16:6 + goto -> bb5; // scope 0 at $DIR/loop_test.rs:13:5: 16:6 } - bb3: { + bb4: { _0 = const (); // scope 0 at $DIR/loop_test.rs:11:9: 11:15 // ty::Const // + ty: () @@ -57,11 +61,11 @@ fn main() -> () { return; // scope 0 at $DIR/loop_test.rs:17:2: 17:2 } - bb4: { - falseUnwind -> [real: bb5, cleanup: bb6]; // scope 0 at $DIR/loop_test.rs:13:5: 16:6 + bb5: { + falseUnwind -> [real: bb6, cleanup: bb1]; // scope 0 at $DIR/loop_test.rs:13:5: 16:6 } - bb5: { + bb6: { StorageLive(_6); // scope 0 at $DIR/loop_test.rs:14:13: 14:14 _6 = const 1i32; // scope 0 at $DIR/loop_test.rs:14:17: 14:18 // ty::Const @@ -72,10 +76,6 @@ fn main() -> () { // + literal: Const { ty: i32, val: Value(Scalar(0x00000001)) } FakeRead(ForLet, _6); // scope 0 at $DIR/loop_test.rs:14:13: 14:14 StorageDead(_6); // scope 0 at $DIR/loop_test.rs:16:5: 16:6 - goto -> bb4; // scope 0 at $DIR/loop_test.rs:1:1: 1:1 - } - - bb6 (cleanup): { - resume; // scope 0 at $DIR/loop_test.rs:6:1: 17:2 + goto -> bb5; // scope 0 at $DIR/loop_test.rs:15:9: 15:17 } } diff --git a/src/test/mir-opt/match-arm-scopes/rustc.complicated_match.ElaborateDrops.after.mir b/src/test/mir-opt/match-arm-scopes/rustc.complicated_match.ElaborateDrops.after.mir index bca19398ce7af..856248e90d495 100644 --- a/src/test/mir-opt/match-arm-scopes/rustc.complicated_match.ElaborateDrops.after.mir +++ b/src/test/mir-opt/match-arm-scopes/rustc.complicated_match.ElaborateDrops.after.mir @@ -30,26 +30,30 @@ fn complicated_match(_1: bool, _2: (bool, bool, std::string::String)) -> i32 { } bb0: { - switchInt((_2.0: bool)) -> [false: bb5, otherwise: bb1]; // scope 0 at $DIR/match-arm-scopes.rs:16:10: 16:15 + switchInt((_2.0: bool)) -> [false: bb6, otherwise: bb2]; // scope 0 at $DIR/match-arm-scopes.rs:16:10: 16:15 } - bb1: { - switchInt((_2.1: bool)) -> [false: bb10, otherwise: bb2]; // scope 0 at $DIR/match-arm-scopes.rs:16:29: 16:34 + bb1 (cleanup): { + resume; // scope 0 at $DIR/match-arm-scopes.rs:14:1: 19:2 } bb2: { - switchInt((_2.0: bool)) -> [false: bb3, otherwise: bb17]; // scope 0 at $DIR/match-arm-scopes.rs:17:10: 17:14 + switchInt((_2.1: bool)) -> [false: bb14, otherwise: bb3]; // scope 0 at $DIR/match-arm-scopes.rs:16:29: 16:34 } bb3: { + switchInt((_2.0: bool)) -> [false: bb4, otherwise: bb21]; // scope 0 at $DIR/match-arm-scopes.rs:17:10: 17:14 + } + + bb4: { StorageLive(_15); // scope 0 at $DIR/match-arm-scopes.rs:17:32: 17:33 _15 = (_2.1: bool); // scope 0 at $DIR/match-arm-scopes.rs:17:32: 17:33 StorageLive(_16); // scope 0 at $DIR/match-arm-scopes.rs:17:35: 17:36 _16 = move (_2.2: std::string::String); // scope 0 at $DIR/match-arm-scopes.rs:17:35: 17:36 - goto -> bb16; // scope 0 at $DIR/match-arm-scopes.rs:15:5: 18:6 + goto -> bb20; // scope 0 at $DIR/match-arm-scopes.rs:15:5: 18:6 } - bb4: { + bb5: { _0 = const 1i32; // scope 1 at $DIR/match-arm-scopes.rs:16:77: 16:78 // ty::Const // + ty: i32 @@ -57,10 +61,10 @@ fn complicated_match(_1: bool, _2: (bool, bool, std::string::String)) -> i32 { // mir::Constant // + span: $DIR/match-arm-scopes.rs:16:77: 16:78 // + literal: Const { ty: i32, val: Value(Scalar(0x00000001)) } - drop(_7) -> [return: bb15, unwind: bb22]; // scope 0 at $DIR/match-arm-scopes.rs:16:78: 16:79 + drop(_7) -> [return: bb19, unwind: bb10]; // scope 0 at $DIR/match-arm-scopes.rs:16:78: 16:79 } - bb5: { + bb6: { StorageLive(_6); // scope 0 at $DIR/match-arm-scopes.rs:16:17: 16:18 _6 = &(_2.1: bool); // scope 0 at $DIR/match-arm-scopes.rs:16:17: 16:18 StorageLive(_8); // scope 0 at $DIR/match-arm-scopes.rs:16:20: 16:21 @@ -68,16 +72,16 @@ fn complicated_match(_1: bool, _2: (bool, bool, std::string::String)) -> i32 { StorageLive(_9); // scope 0 at $DIR/match-arm-scopes.rs:16:42: 16:73 StorageLive(_10); // scope 0 at $DIR/match-arm-scopes.rs:16:45: 16:49 _10 = _1; // scope 0 at $DIR/match-arm-scopes.rs:16:45: 16:49 - switchInt(_10) -> [false: bb6, otherwise: bb7]; // scope 0 at $DIR/match-arm-scopes.rs:16:42: 16:73 + switchInt(_10) -> [false: bb7, otherwise: bb8]; // scope 0 at $DIR/match-arm-scopes.rs:16:42: 16:73 } - bb6: { + bb7: { _9 = (*_6); // scope 0 at $DIR/match-arm-scopes.rs:16:70: 16:71 StorageDead(_10); // scope 0 at $DIR/match-arm-scopes.rs:16:72: 16:73 - switchInt(move _9) -> [false: bb9, otherwise: bb8]; // scope 0 at $DIR/match-arm-scopes.rs:16:42: 16:73 + switchInt(move _9) -> [false: bb13, otherwise: bb12]; // scope 0 at $DIR/match-arm-scopes.rs:16:42: 16:73 } - bb7: { + bb8: { _0 = const 3i32; // scope 0 at $DIR/match-arm-scopes.rs:16:59: 16:60 // ty::Const // + ty: i32 @@ -87,26 +91,40 @@ fn complicated_match(_1: bool, _2: (bool, bool, std::string::String)) -> i32 { // + literal: Const { ty: i32, val: Value(Scalar(0x00000003)) } StorageDead(_10); // scope 0 at $DIR/match-arm-scopes.rs:16:72: 16:73 StorageDead(_9); // scope 0 at $DIR/match-arm-scopes.rs:16:78: 16:79 - goto -> bb20; // scope 0 at $DIR/match-arm-scopes.rs:1:1: 1:1 + StorageDead(_8); // scope 0 at $DIR/match-arm-scopes.rs:16:78: 16:79 + StorageDead(_6); // scope 0 at $DIR/match-arm-scopes.rs:16:78: 16:79 + goto -> bb11; // scope 0 at $DIR/match-arm-scopes.rs:16:52: 16:60 } - bb8: { + bb9: { + return; // scope 0 at $DIR/match-arm-scopes.rs:19:2: 19:2 + } + + bb10 (cleanup): { + goto -> bb25; // scope 0 at $DIR/match-arm-scopes.rs:19:1: 19:2 + } + + bb11: { + drop(_2) -> [return: bb9, unwind: bb1]; // scope 0 at $DIR/match-arm-scopes.rs:19:1: 19:2 + } + + bb12: { StorageDead(_9); // scope 0 at $DIR/match-arm-scopes.rs:16:78: 16:79 StorageLive(_5); // scope 0 at $DIR/match-arm-scopes.rs:16:17: 16:18 _5 = (_2.1: bool); // scope 0 at $DIR/match-arm-scopes.rs:16:17: 16:18 StorageLive(_7); // scope 0 at $DIR/match-arm-scopes.rs:16:20: 16:21 _7 = move (_2.2: std::string::String); // scope 0 at $DIR/match-arm-scopes.rs:16:20: 16:21 - goto -> bb4; // scope 0 at $DIR/match-arm-scopes.rs:15:5: 18:6 + goto -> bb5; // scope 0 at $DIR/match-arm-scopes.rs:15:5: 18:6 } - bb9: { + bb13: { StorageDead(_9); // scope 0 at $DIR/match-arm-scopes.rs:16:78: 16:79 StorageDead(_8); // scope 0 at $DIR/match-arm-scopes.rs:16:78: 16:79 StorageDead(_6); // scope 0 at $DIR/match-arm-scopes.rs:16:78: 16:79 - goto -> bb1; // scope 0 at $DIR/match-arm-scopes.rs:16:42: 16:73 + goto -> bb2; // scope 0 at $DIR/match-arm-scopes.rs:16:42: 16:73 } - bb10: { + bb14: { StorageLive(_6); // scope 0 at $DIR/match-arm-scopes.rs:16:26: 16:27 _6 = &(_2.0: bool); // scope 0 at $DIR/match-arm-scopes.rs:16:26: 16:27 StorageLive(_8); // scope 0 at $DIR/match-arm-scopes.rs:16:36: 16:37 @@ -114,16 +132,16 @@ fn complicated_match(_1: bool, _2: (bool, bool, std::string::String)) -> i32 { StorageLive(_12); // scope 0 at $DIR/match-arm-scopes.rs:16:42: 16:73 StorageLive(_13); // scope 0 at $DIR/match-arm-scopes.rs:16:45: 16:49 _13 = _1; // scope 0 at $DIR/match-arm-scopes.rs:16:45: 16:49 - switchInt(_13) -> [false: bb11, otherwise: bb12]; // scope 0 at $DIR/match-arm-scopes.rs:16:42: 16:73 + switchInt(_13) -> [false: bb15, otherwise: bb16]; // scope 0 at $DIR/match-arm-scopes.rs:16:42: 16:73 } - bb11: { + bb15: { _12 = (*_6); // scope 0 at $DIR/match-arm-scopes.rs:16:70: 16:71 StorageDead(_13); // scope 0 at $DIR/match-arm-scopes.rs:16:72: 16:73 - switchInt(move _12) -> [false: bb14, otherwise: bb13]; // scope 0 at $DIR/match-arm-scopes.rs:16:42: 16:73 + switchInt(move _12) -> [false: bb18, otherwise: bb17]; // scope 0 at $DIR/match-arm-scopes.rs:16:42: 16:73 } - bb12: { + bb16: { _0 = const 3i32; // scope 0 at $DIR/match-arm-scopes.rs:16:59: 16:60 // ty::Const // + ty: i32 @@ -133,34 +151,36 @@ fn complicated_match(_1: bool, _2: (bool, bool, std::string::String)) -> i32 { // + literal: Const { ty: i32, val: Value(Scalar(0x00000003)) } StorageDead(_13); // scope 0 at $DIR/match-arm-scopes.rs:16:72: 16:73 StorageDead(_12); // scope 0 at $DIR/match-arm-scopes.rs:16:78: 16:79 - goto -> bb20; // scope 0 at $DIR/match-arm-scopes.rs:1:1: 1:1 + StorageDead(_8); // scope 0 at $DIR/match-arm-scopes.rs:16:78: 16:79 + StorageDead(_6); // scope 0 at $DIR/match-arm-scopes.rs:16:78: 16:79 + goto -> bb11; // scope 0 at $DIR/match-arm-scopes.rs:16:52: 16:60 } - bb13: { + bb17: { StorageDead(_12); // scope 0 at $DIR/match-arm-scopes.rs:16:78: 16:79 StorageLive(_5); // scope 0 at $DIR/match-arm-scopes.rs:16:26: 16:27 _5 = (_2.0: bool); // scope 0 at $DIR/match-arm-scopes.rs:16:26: 16:27 StorageLive(_7); // scope 0 at $DIR/match-arm-scopes.rs:16:36: 16:37 _7 = move (_2.2: std::string::String); // scope 0 at $DIR/match-arm-scopes.rs:16:36: 16:37 - goto -> bb4; // scope 0 at $DIR/match-arm-scopes.rs:15:5: 18:6 + goto -> bb5; // scope 0 at $DIR/match-arm-scopes.rs:15:5: 18:6 } - bb14: { + bb18: { StorageDead(_12); // scope 0 at $DIR/match-arm-scopes.rs:16:78: 16:79 StorageDead(_8); // scope 0 at $DIR/match-arm-scopes.rs:16:78: 16:79 StorageDead(_6); // scope 0 at $DIR/match-arm-scopes.rs:16:78: 16:79 - goto -> bb2; // scope 0 at $DIR/match-arm-scopes.rs:16:42: 16:73 + goto -> bb3; // scope 0 at $DIR/match-arm-scopes.rs:16:42: 16:73 } - bb15: { + bb19: { StorageDead(_7); // scope 0 at $DIR/match-arm-scopes.rs:16:78: 16:79 StorageDead(_5); // scope 0 at $DIR/match-arm-scopes.rs:16:78: 16:79 StorageDead(_8); // scope 0 at $DIR/match-arm-scopes.rs:16:78: 16:79 StorageDead(_6); // scope 0 at $DIR/match-arm-scopes.rs:16:78: 16:79 - goto -> bb19; // scope 0 at $DIR/match-arm-scopes.rs:15:5: 18:6 + goto -> bb23; // scope 0 at $DIR/match-arm-scopes.rs:15:5: 18:6 } - bb16: { + bb20: { _0 = const 2i32; // scope 2 at $DIR/match-arm-scopes.rs:17:41: 17:42 // ty::Const // + ty: i32 @@ -168,58 +188,48 @@ fn complicated_match(_1: bool, _2: (bool, bool, std::string::String)) -> i32 { // mir::Constant // + span: $DIR/match-arm-scopes.rs:17:41: 17:42 // + literal: Const { ty: i32, val: Value(Scalar(0x00000002)) } - drop(_16) -> [return: bb18, unwind: bb22]; // scope 0 at $DIR/match-arm-scopes.rs:17:42: 17:43 + drop(_16) -> [return: bb22, unwind: bb10]; // scope 0 at $DIR/match-arm-scopes.rs:17:42: 17:43 } - bb17: { + bb21: { StorageLive(_15); // scope 0 at $DIR/match-arm-scopes.rs:17:16: 17:17 _15 = (_2.1: bool); // scope 0 at $DIR/match-arm-scopes.rs:17:16: 17:17 StorageLive(_16); // scope 0 at $DIR/match-arm-scopes.rs:17:19: 17:20 _16 = move (_2.2: std::string::String); // scope 0 at $DIR/match-arm-scopes.rs:17:19: 17:20 - goto -> bb16; // scope 0 at $DIR/match-arm-scopes.rs:15:5: 18:6 + goto -> bb20; // scope 0 at $DIR/match-arm-scopes.rs:15:5: 18:6 } - bb18: { + bb22: { StorageDead(_16); // scope 0 at $DIR/match-arm-scopes.rs:17:42: 17:43 StorageDead(_15); // scope 0 at $DIR/match-arm-scopes.rs:17:42: 17:43 - goto -> bb19; // scope 0 at $DIR/match-arm-scopes.rs:15:5: 18:6 + goto -> bb23; // scope 0 at $DIR/match-arm-scopes.rs:15:5: 18:6 } - bb19: { - goto -> bb26; // scope 0 at $DIR/match-arm-scopes.rs:19:1: 19:2 + bb23: { + goto -> bb29; // scope 0 at $DIR/match-arm-scopes.rs:19:1: 19:2 } - bb20: { - StorageDead(_8); // scope 0 at $DIR/match-arm-scopes.rs:16:78: 16:79 - StorageDead(_6); // scope 0 at $DIR/match-arm-scopes.rs:16:78: 16:79 - drop(_2) -> [return: bb21, unwind: bb23]; // scope 0 at $DIR/match-arm-scopes.rs:19:1: 19:2 + bb24 (cleanup): { + goto -> bb1; // scope 0 at $DIR/match-arm-scopes.rs:19:1: 19:2 } - bb21: { - return; // scope 0 at $DIR/match-arm-scopes.rs:19:2: 19:2 - } - - bb22 (cleanup): { - goto -> bb27; // scope 0 at $DIR/match-arm-scopes.rs:19:1: 19:2 - } - - bb23 (cleanup): { - resume; // scope 0 at $DIR/match-arm-scopes.rs:14:1: 19:2 + bb25 (cleanup): { + goto -> bb24; // scope 0 at $DIR/match-arm-scopes.rs:19:1: 19:2 } - bb24: { - goto -> bb21; // scope 0 at $DIR/match-arm-scopes.rs:19:1: 19:2 + bb26: { + goto -> bb9; // scope 0 at $DIR/match-arm-scopes.rs:19:1: 19:2 } - bb25 (cleanup): { - goto -> bb23; // scope 0 at $DIR/match-arm-scopes.rs:19:1: 19:2 + bb27 (cleanup): { + goto -> bb1; // scope 0 at $DIR/match-arm-scopes.rs:19:1: 19:2 } - bb26: { - goto -> bb24; // scope 0 at $DIR/match-arm-scopes.rs:19:1: 19:2 + bb28 (cleanup): { + goto -> bb27; // scope 0 at $DIR/match-arm-scopes.rs:19:1: 19:2 } - bb27 (cleanup): { - goto -> bb23; // scope 0 at $DIR/match-arm-scopes.rs:19:1: 19:2 + bb29: { + goto -> bb26; // scope 0 at $DIR/match-arm-scopes.rs:19:1: 19:2 } } diff --git a/src/test/mir-opt/match-arm-scopes/rustc.complicated_match.SimplifyCfg-initial.after.mir b/src/test/mir-opt/match-arm-scopes/rustc.complicated_match.SimplifyCfg-initial.after.mir index 952aa0e804af9..63974bda26038 100644 --- a/src/test/mir-opt/match-arm-scopes/rustc.complicated_match.SimplifyCfg-initial.after.mir +++ b/src/test/mir-opt/match-arm-scopes/rustc.complicated_match.SimplifyCfg-initial.after.mir @@ -31,38 +31,42 @@ fn complicated_match(_1: bool, _2: (bool, bool, std::string::String)) -> i32 { bb0: { FakeRead(ForMatchedPlace, _2); // scope 0 at $DIR/match-arm-scopes.rs:15:11: 15:16 - switchInt((_2.0: bool)) -> [false: bb1, otherwise: bb2]; // scope 0 at $DIR/match-arm-scopes.rs:16:10: 16:15 + switchInt((_2.0: bool)) -> [false: bb2, otherwise: bb3]; // scope 0 at $DIR/match-arm-scopes.rs:16:10: 16:15 } - bb1: { - falseEdges -> [real: bb8, imaginary: bb3]; // scope 0 at $DIR/match-arm-scopes.rs:16:9: 16:22 + bb1 (cleanup): { + resume; // scope 0 at $DIR/match-arm-scopes.rs:14:1: 19:2 } bb2: { - switchInt((_2.1: bool)) -> [false: bb3, otherwise: bb4]; // scope 0 at $DIR/match-arm-scopes.rs:16:29: 16:34 + falseEdges -> [real: bb9, imaginary: bb4]; // scope 0 at $DIR/match-arm-scopes.rs:16:9: 16:22 } bb3: { - falseEdges -> [real: bb14, imaginary: bb5]; // scope 0 at $DIR/match-arm-scopes.rs:16:25: 16:38 + switchInt((_2.1: bool)) -> [false: bb4, otherwise: bb5]; // scope 0 at $DIR/match-arm-scopes.rs:16:29: 16:34 } bb4: { - switchInt((_2.0: bool)) -> [false: bb6, otherwise: bb5]; // scope 0 at $DIR/match-arm-scopes.rs:17:10: 17:14 + falseEdges -> [real: bb18, imaginary: bb6]; // scope 0 at $DIR/match-arm-scopes.rs:16:25: 16:38 } bb5: { - falseEdges -> [real: bb22, imaginary: bb6]; // scope 0 at $DIR/match-arm-scopes.rs:17:9: 17:21 + switchInt((_2.0: bool)) -> [false: bb7, otherwise: bb6]; // scope 0 at $DIR/match-arm-scopes.rs:17:10: 17:14 } bb6: { + falseEdges -> [real: bb26, imaginary: bb7]; // scope 0 at $DIR/match-arm-scopes.rs:17:9: 17:21 + } + + bb7: { StorageLive(_15); // scope 0 at $DIR/match-arm-scopes.rs:17:32: 17:33 _15 = (_2.1: bool); // scope 0 at $DIR/match-arm-scopes.rs:17:32: 17:33 StorageLive(_16); // scope 0 at $DIR/match-arm-scopes.rs:17:35: 17:36 _16 = move (_2.2: std::string::String); // scope 0 at $DIR/match-arm-scopes.rs:17:35: 17:36 - goto -> bb21; // scope 0 at $DIR/match-arm-scopes.rs:15:5: 18:6 + goto -> bb25; // scope 0 at $DIR/match-arm-scopes.rs:15:5: 18:6 } - bb7: { + bb8: { _0 = const 1i32; // scope 1 at $DIR/match-arm-scopes.rs:16:77: 16:78 // ty::Const // + ty: i32 @@ -70,10 +74,10 @@ fn complicated_match(_1: bool, _2: (bool, bool, std::string::String)) -> i32 { // mir::Constant // + span: $DIR/match-arm-scopes.rs:16:77: 16:78 // + literal: Const { ty: i32, val: Value(Scalar(0x00000001)) } - drop(_7) -> [return: bb20, unwind: bb27]; // scope 0 at $DIR/match-arm-scopes.rs:16:78: 16:79 + drop(_7) -> [return: bb24, unwind: bb14]; // scope 0 at $DIR/match-arm-scopes.rs:16:78: 16:79 } - bb8: { + bb9: { StorageLive(_6); // scope 0 at $DIR/match-arm-scopes.rs:16:17: 16:18 _6 = &(_2.1: bool); // scope 0 at $DIR/match-arm-scopes.rs:16:17: 16:18 StorageLive(_8); // scope 0 at $DIR/match-arm-scopes.rs:16:20: 16:21 @@ -84,20 +88,20 @@ fn complicated_match(_1: bool, _2: (bool, bool, std::string::String)) -> i32 { StorageLive(_10); // scope 0 at $DIR/match-arm-scopes.rs:16:45: 16:49 _10 = _1; // scope 0 at $DIR/match-arm-scopes.rs:16:45: 16:49 FakeRead(ForMatchedPlace, _10); // scope 0 at $DIR/match-arm-scopes.rs:16:45: 16:49 - switchInt(_10) -> [false: bb10, otherwise: bb9]; // scope 0 at $DIR/match-arm-scopes.rs:16:42: 16:73 + switchInt(_10) -> [false: bb11, otherwise: bb10]; // scope 0 at $DIR/match-arm-scopes.rs:16:42: 16:73 } - bb9: { - falseEdges -> [real: bb11, imaginary: bb10]; // scope 0 at $DIR/match-arm-scopes.rs:16:42: 16:73 + bb10: { + falseEdges -> [real: bb12, imaginary: bb11]; // scope 0 at $DIR/match-arm-scopes.rs:16:42: 16:73 } - bb10: { + bb11: { _9 = (*_6); // scope 0 at $DIR/match-arm-scopes.rs:16:70: 16:71 StorageDead(_10); // scope 0 at $DIR/match-arm-scopes.rs:16:72: 16:73 - switchInt(move _9) -> [false: bb13, otherwise: bb12]; // scope 0 at $DIR/match-arm-scopes.rs:16:42: 16:73 + switchInt(move _9) -> [false: bb17, otherwise: bb16]; // scope 0 at $DIR/match-arm-scopes.rs:16:42: 16:73 } - bb11: { + bb12: { _0 = const 3i32; // scope 0 at $DIR/match-arm-scopes.rs:16:59: 16:60 // ty::Const // + ty: i32 @@ -107,10 +111,24 @@ fn complicated_match(_1: bool, _2: (bool, bool, std::string::String)) -> i32 { // + literal: Const { ty: i32, val: Value(Scalar(0x00000003)) } StorageDead(_10); // scope 0 at $DIR/match-arm-scopes.rs:16:72: 16:73 StorageDead(_9); // scope 0 at $DIR/match-arm-scopes.rs:16:78: 16:79 - goto -> bb25; // scope 0 at $DIR/match-arm-scopes.rs:1:1: 1:1 + StorageDead(_8); // scope 0 at $DIR/match-arm-scopes.rs:16:78: 16:79 + StorageDead(_6); // scope 0 at $DIR/match-arm-scopes.rs:16:78: 16:79 + goto -> bb15; // scope 0 at $DIR/match-arm-scopes.rs:16:52: 16:60 } - bb12: { + bb13: { + return; // scope 0 at $DIR/match-arm-scopes.rs:19:2: 19:2 + } + + bb14 (cleanup): { + drop(_2) -> bb1; // scope 0 at $DIR/match-arm-scopes.rs:19:1: 19:2 + } + + bb15: { + drop(_2) -> [return: bb13, unwind: bb1]; // scope 0 at $DIR/match-arm-scopes.rs:19:1: 19:2 + } + + bb16: { StorageDead(_9); // scope 0 at $DIR/match-arm-scopes.rs:16:78: 16:79 FakeRead(ForMatchGuard, _3); // scope 0 at $DIR/match-arm-scopes.rs:16:72: 16:73 FakeRead(ForMatchGuard, _4); // scope 0 at $DIR/match-arm-scopes.rs:16:72: 16:73 @@ -120,17 +138,17 @@ fn complicated_match(_1: bool, _2: (bool, bool, std::string::String)) -> i32 { _5 = (_2.1: bool); // scope 0 at $DIR/match-arm-scopes.rs:16:17: 16:18 StorageLive(_7); // scope 0 at $DIR/match-arm-scopes.rs:16:20: 16:21 _7 = move (_2.2: std::string::String); // scope 0 at $DIR/match-arm-scopes.rs:16:20: 16:21 - goto -> bb7; // scope 0 at $DIR/match-arm-scopes.rs:15:5: 18:6 + goto -> bb8; // scope 0 at $DIR/match-arm-scopes.rs:15:5: 18:6 } - bb13: { + bb17: { StorageDead(_9); // scope 0 at $DIR/match-arm-scopes.rs:16:78: 16:79 StorageDead(_8); // scope 0 at $DIR/match-arm-scopes.rs:16:78: 16:79 StorageDead(_6); // scope 0 at $DIR/match-arm-scopes.rs:16:78: 16:79 - falseEdges -> [real: bb2, imaginary: bb3]; // scope 0 at $DIR/match-arm-scopes.rs:16:42: 16:73 + falseEdges -> [real: bb3, imaginary: bb4]; // scope 0 at $DIR/match-arm-scopes.rs:16:42: 16:73 } - bb14: { + bb18: { StorageLive(_6); // scope 0 at $DIR/match-arm-scopes.rs:16:26: 16:27 _6 = &(_2.0: bool); // scope 0 at $DIR/match-arm-scopes.rs:16:26: 16:27 StorageLive(_8); // scope 0 at $DIR/match-arm-scopes.rs:16:36: 16:37 @@ -141,20 +159,20 @@ fn complicated_match(_1: bool, _2: (bool, bool, std::string::String)) -> i32 { StorageLive(_13); // scope 0 at $DIR/match-arm-scopes.rs:16:45: 16:49 _13 = _1; // scope 0 at $DIR/match-arm-scopes.rs:16:45: 16:49 FakeRead(ForMatchedPlace, _13); // scope 0 at $DIR/match-arm-scopes.rs:16:45: 16:49 - switchInt(_13) -> [false: bb16, otherwise: bb15]; // scope 0 at $DIR/match-arm-scopes.rs:16:42: 16:73 + switchInt(_13) -> [false: bb20, otherwise: bb19]; // scope 0 at $DIR/match-arm-scopes.rs:16:42: 16:73 } - bb15: { - falseEdges -> [real: bb17, imaginary: bb16]; // scope 0 at $DIR/match-arm-scopes.rs:16:42: 16:73 + bb19: { + falseEdges -> [real: bb21, imaginary: bb20]; // scope 0 at $DIR/match-arm-scopes.rs:16:42: 16:73 } - bb16: { + bb20: { _12 = (*_6); // scope 0 at $DIR/match-arm-scopes.rs:16:70: 16:71 StorageDead(_13); // scope 0 at $DIR/match-arm-scopes.rs:16:72: 16:73 - switchInt(move _12) -> [false: bb19, otherwise: bb18]; // scope 0 at $DIR/match-arm-scopes.rs:16:42: 16:73 + switchInt(move _12) -> [false: bb23, otherwise: bb22]; // scope 0 at $DIR/match-arm-scopes.rs:16:42: 16:73 } - bb17: { + bb21: { _0 = const 3i32; // scope 0 at $DIR/match-arm-scopes.rs:16:59: 16:60 // ty::Const // + ty: i32 @@ -164,10 +182,12 @@ fn complicated_match(_1: bool, _2: (bool, bool, std::string::String)) -> i32 { // + literal: Const { ty: i32, val: Value(Scalar(0x00000003)) } StorageDead(_13); // scope 0 at $DIR/match-arm-scopes.rs:16:72: 16:73 StorageDead(_12); // scope 0 at $DIR/match-arm-scopes.rs:16:78: 16:79 - goto -> bb25; // scope 0 at $DIR/match-arm-scopes.rs:1:1: 1:1 + StorageDead(_8); // scope 0 at $DIR/match-arm-scopes.rs:16:78: 16:79 + StorageDead(_6); // scope 0 at $DIR/match-arm-scopes.rs:16:78: 16:79 + goto -> bb15; // scope 0 at $DIR/match-arm-scopes.rs:16:52: 16:60 } - bb18: { + bb22: { StorageDead(_12); // scope 0 at $DIR/match-arm-scopes.rs:16:78: 16:79 FakeRead(ForMatchGuard, _3); // scope 0 at $DIR/match-arm-scopes.rs:16:72: 16:73 FakeRead(ForMatchGuard, _4); // scope 0 at $DIR/match-arm-scopes.rs:16:72: 16:73 @@ -177,25 +197,25 @@ fn complicated_match(_1: bool, _2: (bool, bool, std::string::String)) -> i32 { _5 = (_2.0: bool); // scope 0 at $DIR/match-arm-scopes.rs:16:26: 16:27 StorageLive(_7); // scope 0 at $DIR/match-arm-scopes.rs:16:36: 16:37 _7 = move (_2.2: std::string::String); // scope 0 at $DIR/match-arm-scopes.rs:16:36: 16:37 - goto -> bb7; // scope 0 at $DIR/match-arm-scopes.rs:15:5: 18:6 + goto -> bb8; // scope 0 at $DIR/match-arm-scopes.rs:15:5: 18:6 } - bb19: { + bb23: { StorageDead(_12); // scope 0 at $DIR/match-arm-scopes.rs:16:78: 16:79 StorageDead(_8); // scope 0 at $DIR/match-arm-scopes.rs:16:78: 16:79 StorageDead(_6); // scope 0 at $DIR/match-arm-scopes.rs:16:78: 16:79 - falseEdges -> [real: bb4, imaginary: bb5]; // scope 0 at $DIR/match-arm-scopes.rs:16:42: 16:73 + falseEdges -> [real: bb5, imaginary: bb6]; // scope 0 at $DIR/match-arm-scopes.rs:16:42: 16:73 } - bb20: { + bb24: { StorageDead(_7); // scope 0 at $DIR/match-arm-scopes.rs:16:78: 16:79 StorageDead(_5); // scope 0 at $DIR/match-arm-scopes.rs:16:78: 16:79 StorageDead(_8); // scope 0 at $DIR/match-arm-scopes.rs:16:78: 16:79 StorageDead(_6); // scope 0 at $DIR/match-arm-scopes.rs:16:78: 16:79 - goto -> bb24; // scope 0 at $DIR/match-arm-scopes.rs:15:5: 18:6 + goto -> bb28; // scope 0 at $DIR/match-arm-scopes.rs:15:5: 18:6 } - bb21: { + bb25: { _0 = const 2i32; // scope 2 at $DIR/match-arm-scopes.rs:17:41: 17:42 // ty::Const // + ty: i32 @@ -203,42 +223,24 @@ fn complicated_match(_1: bool, _2: (bool, bool, std::string::String)) -> i32 { // mir::Constant // + span: $DIR/match-arm-scopes.rs:17:41: 17:42 // + literal: Const { ty: i32, val: Value(Scalar(0x00000002)) } - drop(_16) -> [return: bb23, unwind: bb27]; // scope 0 at $DIR/match-arm-scopes.rs:17:42: 17:43 + drop(_16) -> [return: bb27, unwind: bb14]; // scope 0 at $DIR/match-arm-scopes.rs:17:42: 17:43 } - bb22: { + bb26: { StorageLive(_15); // scope 0 at $DIR/match-arm-scopes.rs:17:16: 17:17 _15 = (_2.1: bool); // scope 0 at $DIR/match-arm-scopes.rs:17:16: 17:17 StorageLive(_16); // scope 0 at $DIR/match-arm-scopes.rs:17:19: 17:20 _16 = move (_2.2: std::string::String); // scope 0 at $DIR/match-arm-scopes.rs:17:19: 17:20 - goto -> bb21; // scope 0 at $DIR/match-arm-scopes.rs:15:5: 18:6 + goto -> bb25; // scope 0 at $DIR/match-arm-scopes.rs:15:5: 18:6 } - bb23: { + bb27: { StorageDead(_16); // scope 0 at $DIR/match-arm-scopes.rs:17:42: 17:43 StorageDead(_15); // scope 0 at $DIR/match-arm-scopes.rs:17:42: 17:43 - goto -> bb24; // scope 0 at $DIR/match-arm-scopes.rs:15:5: 18:6 - } - - bb24: { - drop(_2) -> [return: bb26, unwind: bb28]; // scope 0 at $DIR/match-arm-scopes.rs:19:1: 19:2 - } - - bb25: { - StorageDead(_8); // scope 0 at $DIR/match-arm-scopes.rs:16:78: 16:79 - StorageDead(_6); // scope 0 at $DIR/match-arm-scopes.rs:16:78: 16:79 - drop(_2) -> [return: bb26, unwind: bb28]; // scope 0 at $DIR/match-arm-scopes.rs:19:1: 19:2 - } - - bb26: { - return; // scope 0 at $DIR/match-arm-scopes.rs:19:2: 19:2 - } - - bb27 (cleanup): { - drop(_2) -> bb28; // scope 0 at $DIR/match-arm-scopes.rs:19:1: 19:2 + goto -> bb28; // scope 0 at $DIR/match-arm-scopes.rs:15:5: 18:6 } - bb28 (cleanup): { - resume; // scope 0 at $DIR/match-arm-scopes.rs:14:1: 19:2 + bb28: { + drop(_2) -> [return: bb13, unwind: bb1]; // scope 0 at $DIR/match-arm-scopes.rs:19:1: 19:2 } } diff --git a/src/test/mir-opt/match_false_edges/rustc.full_tested_match.PromoteTemps.after.mir b/src/test/mir-opt/match_false_edges/rustc.full_tested_match.PromoteTemps.after.mir index ddb0e1b75face..db888bc4b87b2 100644 --- a/src/test/mir-opt/match_false_edges/rustc.full_tested_match.PromoteTemps.after.mir +++ b/src/test/mir-opt/match_false_edges/rustc.full_tested_match.PromoteTemps.after.mir @@ -35,10 +35,14 @@ fn full_tested_match() -> () { // + literal: Const { ty: i32, val: Value(Scalar(0x0000002a)) } FakeRead(ForMatchedPlace, _2); // scope 0 at $DIR/match_false_edges.rs:15:19: 15:27 _3 = discriminant(_2); // scope 0 at $DIR/match_false_edges.rs:16:9: 16:16 - switchInt(move _3) -> [0isize: bb1, 1isize: bb2, otherwise: bb4]; // scope 0 at $DIR/match_false_edges.rs:16:9: 16:16 + switchInt(move _3) -> [0isize: bb2, 1isize: bb3, otherwise: bb5]; // scope 0 at $DIR/match_false_edges.rs:16:9: 16:16 } - bb1: { + bb1 (cleanup): { + resume; // scope 0 at $DIR/match_false_edges.rs:14:1: 20:2 + } + + bb2: { _1 = (const 3i32, const 3i32); // scope 0 at $DIR/match_false_edges.rs:18:17: 18:23 // ty::Const // + ty: i32 @@ -52,22 +56,22 @@ fn full_tested_match() -> () { // mir::Constant // + span: $DIR/match_false_edges.rs:18:21: 18:22 // + literal: Const { ty: i32, val: Value(Scalar(0x00000003)) } - goto -> bb10; // scope 0 at $DIR/match_false_edges.rs:15:13: 19:6 - } - - bb2: { - falseEdges -> [real: bb5, imaginary: bb3]; // scope 0 at $DIR/match_false_edges.rs:16:9: 16:16 + goto -> bb11; // scope 0 at $DIR/match_false_edges.rs:15:13: 19:6 } bb3: { - falseEdges -> [real: bb9, imaginary: bb1]; // scope 0 at $DIR/match_false_edges.rs:17:9: 17:16 + falseEdges -> [real: bb6, imaginary: bb4]; // scope 0 at $DIR/match_false_edges.rs:16:9: 16:16 } bb4: { - unreachable; // scope 0 at $DIR/match_false_edges.rs:15:19: 15:27 + falseEdges -> [real: bb10, imaginary: bb2]; // scope 0 at $DIR/match_false_edges.rs:17:9: 17:16 } bb5: { + unreachable; // scope 0 at $DIR/match_false_edges.rs:15:19: 15:27 + } + + bb6: { StorageLive(_6); // scope 0 at $DIR/match_false_edges.rs:16:14: 16:15 _11 = const full_tested_match::promoted[0]; // scope 0 at $DIR/match_false_edges.rs:16:14: 16:15 // ty::Const @@ -79,7 +83,7 @@ fn full_tested_match() -> () { _6 = &(((*_11) as Some).0: i32); // scope 0 at $DIR/match_false_edges.rs:16:14: 16:15 _4 = &shallow _2; // scope 0 at $DIR/match_false_edges.rs:15:19: 15:27 StorageLive(_7); // scope 0 at $DIR/match_false_edges.rs:16:20: 16:27 - _7 = const guard() -> [return: bb6, unwind: bb11]; // scope 0 at $DIR/match_false_edges.rs:16:20: 16:27 + _7 = const guard() -> [return: bb7, unwind: bb1]; // scope 0 at $DIR/match_false_edges.rs:16:20: 16:27 // ty::Const // + ty: fn() -> bool {guard} // + val: Value(Scalar()) @@ -88,11 +92,11 @@ fn full_tested_match() -> () { // + literal: Const { ty: fn() -> bool {guard}, val: Value(Scalar()) } } - bb6: { - switchInt(move _7) -> [false: bb8, otherwise: bb7]; // scope 0 at $DIR/match_false_edges.rs:16:20: 16:27 + bb7: { + switchInt(move _7) -> [false: bb9, otherwise: bb8]; // scope 0 at $DIR/match_false_edges.rs:16:20: 16:27 } - bb7: { + bb8: { StorageDead(_7); // scope 0 at $DIR/match_false_edges.rs:16:37: 16:38 FakeRead(ForMatchGuard, _4); // scope 0 at $DIR/match_false_edges.rs:16:26: 16:27 FakeRead(ForGuardBinding, _6); // scope 0 at $DIR/match_false_edges.rs:16:26: 16:27 @@ -110,16 +114,16 @@ fn full_tested_match() -> () { StorageDead(_8); // scope 2 at $DIR/match_false_edges.rs:16:36: 16:37 StorageDead(_5); // scope 0 at $DIR/match_false_edges.rs:16:37: 16:38 StorageDead(_6); // scope 0 at $DIR/match_false_edges.rs:16:37: 16:38 - goto -> bb10; // scope 0 at $DIR/match_false_edges.rs:15:13: 19:6 + goto -> bb11; // scope 0 at $DIR/match_false_edges.rs:15:13: 19:6 } - bb8: { + bb9: { StorageDead(_7); // scope 0 at $DIR/match_false_edges.rs:16:37: 16:38 StorageDead(_6); // scope 0 at $DIR/match_false_edges.rs:16:37: 16:38 - goto -> bb3; // scope 0 at $DIR/match_false_edges.rs:16:20: 16:27 + goto -> bb4; // scope 0 at $DIR/match_false_edges.rs:16:20: 16:27 } - bb9: { + bb10: { StorageLive(_9); // scope 0 at $DIR/match_false_edges.rs:17:14: 17:15 _9 = ((_2 as Some).0: i32); // scope 0 at $DIR/match_false_edges.rs:17:14: 17:15 StorageLive(_10); // scope 3 at $DIR/match_false_edges.rs:17:24: 17:25 @@ -133,10 +137,10 @@ fn full_tested_match() -> () { // + literal: Const { ty: i32, val: Value(Scalar(0x00000002)) } StorageDead(_10); // scope 3 at $DIR/match_false_edges.rs:17:25: 17:26 StorageDead(_9); // scope 0 at $DIR/match_false_edges.rs:17:26: 17:27 - goto -> bb10; // scope 0 at $DIR/match_false_edges.rs:15:13: 19:6 + goto -> bb11; // scope 0 at $DIR/match_false_edges.rs:15:13: 19:6 } - bb10: { + bb11: { StorageDead(_2); // scope 0 at $DIR/match_false_edges.rs:19:6: 19:7 StorageDead(_1); // scope 0 at $DIR/match_false_edges.rs:19:6: 19:7 _0 = const (); // scope 0 at $DIR/match_false_edges.rs:14:28: 20:2 @@ -148,8 +152,4 @@ fn full_tested_match() -> () { // + literal: Const { ty: (), val: Value(Scalar()) } return; // scope 0 at $DIR/match_false_edges.rs:20:2: 20:2 } - - bb11 (cleanup): { - resume; // scope 0 at $DIR/match_false_edges.rs:14:1: 20:2 - } } diff --git a/src/test/mir-opt/match_false_edges/rustc.full_tested_match2.PromoteTemps.before.mir b/src/test/mir-opt/match_false_edges/rustc.full_tested_match2.PromoteTemps.before.mir index 41687006ca3b7..96aa9e828d784 100644 --- a/src/test/mir-opt/match_false_edges/rustc.full_tested_match2.PromoteTemps.before.mir +++ b/src/test/mir-opt/match_false_edges/rustc.full_tested_match2.PromoteTemps.before.mir @@ -34,18 +34,22 @@ fn full_tested_match2() -> () { // + literal: Const { ty: i32, val: Value(Scalar(0x0000002a)) } FakeRead(ForMatchedPlace, _2); // scope 0 at $DIR/match_false_edges.rs:26:19: 26:27 _3 = discriminant(_2); // scope 0 at $DIR/match_false_edges.rs:27:9: 27:16 - switchInt(move _3) -> [0isize: bb1, 1isize: bb2, otherwise: bb4]; // scope 0 at $DIR/match_false_edges.rs:27:9: 27:16 + switchInt(move _3) -> [0isize: bb2, 1isize: bb3, otherwise: bb5]; // scope 0 at $DIR/match_false_edges.rs:27:9: 27:16 } - bb1: { - falseEdges -> [real: bb9, imaginary: bb3]; // scope 0 at $DIR/match_false_edges.rs:28:9: 28:13 + bb1 (cleanup): { + resume; // scope 0 at $DIR/match_false_edges.rs:25:1: 31:2 } bb2: { - falseEdges -> [real: bb5, imaginary: bb1]; // scope 0 at $DIR/match_false_edges.rs:27:9: 27:16 + falseEdges -> [real: bb10, imaginary: bb4]; // scope 0 at $DIR/match_false_edges.rs:28:9: 28:13 } bb3: { + falseEdges -> [real: bb6, imaginary: bb2]; // scope 0 at $DIR/match_false_edges.rs:27:9: 27:16 + } + + bb4: { StorageLive(_9); // scope 0 at $DIR/match_false_edges.rs:29:14: 29:15 _9 = ((_2 as Some).0: i32); // scope 0 at $DIR/match_false_edges.rs:29:14: 29:15 StorageLive(_10); // scope 3 at $DIR/match_false_edges.rs:29:24: 29:25 @@ -59,19 +63,19 @@ fn full_tested_match2() -> () { // + literal: Const { ty: i32, val: Value(Scalar(0x00000002)) } StorageDead(_10); // scope 3 at $DIR/match_false_edges.rs:29:25: 29:26 StorageDead(_9); // scope 0 at $DIR/match_false_edges.rs:29:26: 29:27 - goto -> bb10; // scope 0 at $DIR/match_false_edges.rs:26:13: 30:6 + goto -> bb11; // scope 0 at $DIR/match_false_edges.rs:26:13: 30:6 } - bb4: { + bb5: { unreachable; // scope 0 at $DIR/match_false_edges.rs:26:19: 26:27 } - bb5: { + bb6: { StorageLive(_6); // scope 0 at $DIR/match_false_edges.rs:27:14: 27:15 _6 = &((_2 as Some).0: i32); // scope 0 at $DIR/match_false_edges.rs:27:14: 27:15 _4 = &shallow _2; // scope 0 at $DIR/match_false_edges.rs:26:19: 26:27 StorageLive(_7); // scope 0 at $DIR/match_false_edges.rs:27:20: 27:27 - _7 = const guard() -> [return: bb6, unwind: bb11]; // scope 0 at $DIR/match_false_edges.rs:27:20: 27:27 + _7 = const guard() -> [return: bb7, unwind: bb1]; // scope 0 at $DIR/match_false_edges.rs:27:20: 27:27 // ty::Const // + ty: fn() -> bool {guard} // + val: Value(Scalar()) @@ -80,11 +84,11 @@ fn full_tested_match2() -> () { // + literal: Const { ty: fn() -> bool {guard}, val: Value(Scalar()) } } - bb6: { - switchInt(move _7) -> [false: bb8, otherwise: bb7]; // scope 0 at $DIR/match_false_edges.rs:27:20: 27:27 + bb7: { + switchInt(move _7) -> [false: bb9, otherwise: bb8]; // scope 0 at $DIR/match_false_edges.rs:27:20: 27:27 } - bb7: { + bb8: { StorageDead(_7); // scope 0 at $DIR/match_false_edges.rs:27:37: 27:38 FakeRead(ForMatchGuard, _4); // scope 0 at $DIR/match_false_edges.rs:27:26: 27:27 FakeRead(ForGuardBinding, _6); // scope 0 at $DIR/match_false_edges.rs:27:26: 27:27 @@ -102,16 +106,16 @@ fn full_tested_match2() -> () { StorageDead(_8); // scope 2 at $DIR/match_false_edges.rs:27:36: 27:37 StorageDead(_5); // scope 0 at $DIR/match_false_edges.rs:27:37: 27:38 StorageDead(_6); // scope 0 at $DIR/match_false_edges.rs:27:37: 27:38 - goto -> bb10; // scope 0 at $DIR/match_false_edges.rs:26:13: 30:6 + goto -> bb11; // scope 0 at $DIR/match_false_edges.rs:26:13: 30:6 } - bb8: { + bb9: { StorageDead(_7); // scope 0 at $DIR/match_false_edges.rs:27:37: 27:38 StorageDead(_6); // scope 0 at $DIR/match_false_edges.rs:27:37: 27:38 - falseEdges -> [real: bb3, imaginary: bb1]; // scope 0 at $DIR/match_false_edges.rs:27:20: 27:27 + falseEdges -> [real: bb4, imaginary: bb2]; // scope 0 at $DIR/match_false_edges.rs:27:20: 27:27 } - bb9: { + bb10: { _1 = (const 3i32, const 3i32); // scope 0 at $DIR/match_false_edges.rs:28:17: 28:23 // ty::Const // + ty: i32 @@ -125,10 +129,10 @@ fn full_tested_match2() -> () { // mir::Constant // + span: $DIR/match_false_edges.rs:28:21: 28:22 // + literal: Const { ty: i32, val: Value(Scalar(0x00000003)) } - goto -> bb10; // scope 0 at $DIR/match_false_edges.rs:26:13: 30:6 + goto -> bb11; // scope 0 at $DIR/match_false_edges.rs:26:13: 30:6 } - bb10: { + bb11: { StorageDead(_2); // scope 0 at $DIR/match_false_edges.rs:30:6: 30:7 StorageDead(_1); // scope 0 at $DIR/match_false_edges.rs:30:6: 30:7 _0 = const (); // scope 0 at $DIR/match_false_edges.rs:25:29: 31:2 @@ -140,8 +144,4 @@ fn full_tested_match2() -> () { // + literal: Const { ty: (), val: Value(Scalar()) } return; // scope 0 at $DIR/match_false_edges.rs:31:2: 31:2 } - - bb11 (cleanup): { - resume; // scope 0 at $DIR/match_false_edges.rs:25:1: 31:2 - } } diff --git a/src/test/mir-opt/match_false_edges/rustc.main.PromoteTemps.before.mir b/src/test/mir-opt/match_false_edges/rustc.main.PromoteTemps.before.mir index 208e8e698ab74..63a7c4bc43d7a 100644 --- a/src/test/mir-opt/match_false_edges/rustc.main.PromoteTemps.before.mir +++ b/src/test/mir-opt/match_false_edges/rustc.main.PromoteTemps.before.mir @@ -45,18 +45,22 @@ fn main() -> () { // + literal: Const { ty: i32, val: Value(Scalar(0x00000001)) } FakeRead(ForMatchedPlace, _2); // scope 0 at $DIR/match_false_edges.rs:35:19: 35:26 _4 = discriminant(_2); // scope 0 at $DIR/match_false_edges.rs:36:9: 36:17 - switchInt(move _4) -> [1isize: bb2, otherwise: bb1]; // scope 0 at $DIR/match_false_edges.rs:36:9: 36:17 + switchInt(move _4) -> [1isize: bb3, otherwise: bb2]; // scope 0 at $DIR/match_false_edges.rs:36:9: 36:17 } - bb1: { - falseEdges -> [real: bb9, imaginary: bb4]; // scope 0 at $DIR/match_false_edges.rs:37:9: 37:11 + bb1 (cleanup): { + resume; // scope 0 at $DIR/match_false_edges.rs:34:1: 41:2 } bb2: { - falseEdges -> [real: bb5, imaginary: bb1]; // scope 0 at $DIR/match_false_edges.rs:36:9: 36:17 + falseEdges -> [real: bb10, imaginary: bb5]; // scope 0 at $DIR/match_false_edges.rs:37:9: 37:11 } bb3: { + falseEdges -> [real: bb6, imaginary: bb2]; // scope 0 at $DIR/match_false_edges.rs:36:9: 36:17 + } + + bb4: { StorageLive(_14); // scope 0 at $DIR/match_false_edges.rs:39:9: 39:11 _14 = _2; // scope 0 at $DIR/match_false_edges.rs:39:9: 39:11 _1 = const 4i32; // scope 5 at $DIR/match_false_edges.rs:39:15: 39:16 @@ -67,19 +71,19 @@ fn main() -> () { // + span: $DIR/match_false_edges.rs:39:15: 39:16 // + literal: Const { ty: i32, val: Value(Scalar(0x00000004)) } StorageDead(_14); // scope 0 at $DIR/match_false_edges.rs:39:16: 39:17 - goto -> bb14; // scope 0 at $DIR/match_false_edges.rs:35:13: 40:6 + goto -> bb15; // scope 0 at $DIR/match_false_edges.rs:35:13: 40:6 } - bb4: { - falseEdges -> [real: bb10, imaginary: bb3]; // scope 0 at $DIR/match_false_edges.rs:38:9: 38:16 + bb5: { + falseEdges -> [real: bb11, imaginary: bb4]; // scope 0 at $DIR/match_false_edges.rs:38:9: 38:16 } - bb5: { + bb6: { StorageLive(_7); // scope 0 at $DIR/match_false_edges.rs:36:14: 36:16 _7 = &((_2 as Some).0: i32); // scope 0 at $DIR/match_false_edges.rs:36:14: 36:16 _5 = &shallow _2; // scope 0 at $DIR/match_false_edges.rs:35:19: 35:26 StorageLive(_8); // scope 0 at $DIR/match_false_edges.rs:36:21: 36:28 - _8 = const guard() -> [return: bb6, unwind: bb15]; // scope 0 at $DIR/match_false_edges.rs:36:21: 36:28 + _8 = const guard() -> [return: bb7, unwind: bb1]; // scope 0 at $DIR/match_false_edges.rs:36:21: 36:28 // ty::Const // + ty: fn() -> bool {guard} // + val: Value(Scalar()) @@ -88,11 +92,11 @@ fn main() -> () { // + literal: Const { ty: fn() -> bool {guard}, val: Value(Scalar()) } } - bb6: { - switchInt(move _8) -> [false: bb8, otherwise: bb7]; // scope 0 at $DIR/match_false_edges.rs:36:21: 36:28 + bb7: { + switchInt(move _8) -> [false: bb9, otherwise: bb8]; // scope 0 at $DIR/match_false_edges.rs:36:21: 36:28 } - bb7: { + bb8: { StorageDead(_8); // scope 0 at $DIR/match_false_edges.rs:36:33: 36:34 FakeRead(ForMatchGuard, _5); // scope 0 at $DIR/match_false_edges.rs:36:27: 36:28 FakeRead(ForGuardBinding, _7); // scope 0 at $DIR/match_false_edges.rs:36:27: 36:28 @@ -107,16 +111,16 @@ fn main() -> () { // + literal: Const { ty: i32, val: Value(Scalar(0x00000001)) } StorageDead(_6); // scope 0 at $DIR/match_false_edges.rs:36:33: 36:34 StorageDead(_7); // scope 0 at $DIR/match_false_edges.rs:36:33: 36:34 - goto -> bb14; // scope 0 at $DIR/match_false_edges.rs:35:13: 40:6 + goto -> bb15; // scope 0 at $DIR/match_false_edges.rs:35:13: 40:6 } - bb8: { + bb9: { StorageDead(_8); // scope 0 at $DIR/match_false_edges.rs:36:33: 36:34 StorageDead(_7); // scope 0 at $DIR/match_false_edges.rs:36:33: 36:34 - falseEdges -> [real: bb1, imaginary: bb1]; // scope 0 at $DIR/match_false_edges.rs:36:21: 36:28 + falseEdges -> [real: bb2, imaginary: bb2]; // scope 0 at $DIR/match_false_edges.rs:36:21: 36:28 } - bb9: { + bb10: { StorageLive(_9); // scope 0 at $DIR/match_false_edges.rs:37:9: 37:11 _9 = _2; // scope 0 at $DIR/match_false_edges.rs:37:9: 37:11 _1 = const 2i32; // scope 3 at $DIR/match_false_edges.rs:37:15: 37:16 @@ -127,17 +131,17 @@ fn main() -> () { // + span: $DIR/match_false_edges.rs:37:15: 37:16 // + literal: Const { ty: i32, val: Value(Scalar(0x00000002)) } StorageDead(_9); // scope 0 at $DIR/match_false_edges.rs:37:16: 37:17 - goto -> bb14; // scope 0 at $DIR/match_false_edges.rs:35:13: 40:6 + goto -> bb15; // scope 0 at $DIR/match_false_edges.rs:35:13: 40:6 } - bb10: { + bb11: { StorageLive(_11); // scope 0 at $DIR/match_false_edges.rs:38:14: 38:15 _11 = &((_2 as Some).0: i32); // scope 0 at $DIR/match_false_edges.rs:38:14: 38:15 _5 = &shallow _2; // scope 0 at $DIR/match_false_edges.rs:35:19: 35:26 StorageLive(_12); // scope 0 at $DIR/match_false_edges.rs:38:20: 38:29 StorageLive(_13); // scope 0 at $DIR/match_false_edges.rs:38:27: 38:28 _13 = (*_11); // scope 0 at $DIR/match_false_edges.rs:38:27: 38:28 - _12 = const guard2(move _13) -> [return: bb11, unwind: bb15]; // scope 0 at $DIR/match_false_edges.rs:38:20: 38:29 + _12 = const guard2(move _13) -> [return: bb12, unwind: bb1]; // scope 0 at $DIR/match_false_edges.rs:38:20: 38:29 // ty::Const // + ty: fn(i32) -> bool {guard2} // + val: Value(Scalar()) @@ -146,12 +150,12 @@ fn main() -> () { // + literal: Const { ty: fn(i32) -> bool {guard2}, val: Value(Scalar()) } } - bb11: { + bb12: { StorageDead(_13); // scope 0 at $DIR/match_false_edges.rs:38:28: 38:29 - switchInt(move _12) -> [false: bb13, otherwise: bb12]; // scope 0 at $DIR/match_false_edges.rs:38:20: 38:29 + switchInt(move _12) -> [false: bb14, otherwise: bb13]; // scope 0 at $DIR/match_false_edges.rs:38:20: 38:29 } - bb12: { + bb13: { StorageDead(_12); // scope 0 at $DIR/match_false_edges.rs:38:34: 38:35 FakeRead(ForMatchGuard, _5); // scope 0 at $DIR/match_false_edges.rs:38:28: 38:29 FakeRead(ForGuardBinding, _11); // scope 0 at $DIR/match_false_edges.rs:38:28: 38:29 @@ -166,16 +170,16 @@ fn main() -> () { // + literal: Const { ty: i32, val: Value(Scalar(0x00000003)) } StorageDead(_10); // scope 0 at $DIR/match_false_edges.rs:38:34: 38:35 StorageDead(_11); // scope 0 at $DIR/match_false_edges.rs:38:34: 38:35 - goto -> bb14; // scope 0 at $DIR/match_false_edges.rs:35:13: 40:6 + goto -> bb15; // scope 0 at $DIR/match_false_edges.rs:35:13: 40:6 } - bb13: { + bb14: { StorageDead(_12); // scope 0 at $DIR/match_false_edges.rs:38:34: 38:35 StorageDead(_11); // scope 0 at $DIR/match_false_edges.rs:38:34: 38:35 - falseEdges -> [real: bb3, imaginary: bb3]; // scope 0 at $DIR/match_false_edges.rs:38:20: 38:29 + falseEdges -> [real: bb4, imaginary: bb4]; // scope 0 at $DIR/match_false_edges.rs:38:20: 38:29 } - bb14: { + bb15: { StorageDead(_2); // scope 0 at $DIR/match_false_edges.rs:40:6: 40:7 StorageDead(_1); // scope 0 at $DIR/match_false_edges.rs:40:6: 40:7 _0 = const (); // scope 0 at $DIR/match_false_edges.rs:34:11: 41:2 @@ -187,8 +191,4 @@ fn main() -> () { // + literal: Const { ty: (), val: Value(Scalar()) } return; // scope 0 at $DIR/match_false_edges.rs:41:2: 41:2 } - - bb15 (cleanup): { - resume; // scope 0 at $DIR/match_false_edges.rs:34:1: 41:2 - } } diff --git a/src/test/mir-opt/nll/region-subtyping-basic/32bit/rustc.main.nll.0.mir b/src/test/mir-opt/nll/region-subtyping-basic/32bit/rustc.main.nll.0.mir index d6a3017d86537..3e0867d9b09d9 100644 --- a/src/test/mir-opt/nll/region-subtyping-basic/32bit/rustc.main.nll.0.mir +++ b/src/test/mir-opt/nll/region-subtyping-basic/32bit/rustc.main.nll.0.mir @@ -5,21 +5,21 @@ | '_#1r | Local | ['_#1r] | | Inferred Region Values -| '_#0r | U0 | {bb0[0..=8], bb1[0..=8], bb2[0], bb3[0..=1], bb4[0..=3], bb5[0..=3], bb6[0..=2], bb7[0..=5], bb8[0], '_#0r, '_#1r} -| '_#1r | U0 | {bb0[0..=8], bb1[0..=8], bb2[0], bb3[0..=1], bb4[0..=3], bb5[0..=3], bb6[0..=2], bb7[0..=5], bb8[0], '_#1r} +| '_#0r | U0 | {bb0[0..=8], bb1[0], bb2[0..=8], bb3[0], bb4[0..=1], bb5[0..=3], bb6[0..=3], bb7[0..=2], bb8[0..=5], '_#0r, '_#1r} +| '_#1r | U0 | {bb0[0..=8], bb1[0], bb2[0..=8], bb3[0], bb4[0..=1], bb5[0..=3], bb6[0..=3], bb7[0..=2], bb8[0..=5], '_#1r} | '_#2r | U0 | {} -| '_#3r | U0 | {bb1[0..=8], bb2[0], bb4[0..=2]} -| '_#4r | U0 | {bb1[1..=8], bb2[0], bb4[0..=2]} -| '_#5r | U0 | {bb1[4..=8], bb2[0], bb4[0..=2]} +| '_#3r | U0 | {bb2[0..=8], bb3[0], bb5[0..=2]} +| '_#4r | U0 | {bb2[1..=8], bb3[0], bb5[0..=2]} +| '_#5r | U0 | {bb2[4..=8], bb3[0], bb5[0..=2]} | | Inference Constraints -| '_#0r live at {bb0[0..=8], bb1[0..=8], bb2[0], bb3[0..=1], bb4[0..=3], bb5[0..=3], bb6[0..=2], bb7[0..=5], bb8[0]} -| '_#1r live at {bb0[0..=8], bb1[0..=8], bb2[0], bb3[0..=1], bb4[0..=3], bb5[0..=3], bb6[0..=2], bb7[0..=5], bb8[0]} -| '_#3r live at {bb1[0]} -| '_#4r live at {bb1[1..=3]} -| '_#5r live at {bb1[4..=8], bb2[0], bb4[0..=2]} -| '_#3r: '_#4r due to Assignment at Single(bb1[0]) -| '_#4r: '_#5r due to Assignment at Single(bb1[3]) +| '_#0r live at {bb0[0..=8], bb1[0], bb2[0..=8], bb3[0], bb4[0..=1], bb5[0..=3], bb6[0..=3], bb7[0..=2], bb8[0..=5]} +| '_#1r live at {bb0[0..=8], bb1[0], bb2[0..=8], bb3[0], bb4[0..=1], bb5[0..=3], bb6[0..=3], bb7[0..=2], bb8[0..=5]} +| '_#3r live at {bb2[0]} +| '_#4r live at {bb2[1..=3]} +| '_#5r live at {bb2[4..=8], bb3[0], bb5[0..=2]} +| '_#3r: '_#4r due to Assignment at Single(bb2[0]) +| '_#4r: '_#5r due to Assignment at Single(bb2[3]) | fn main() -> () { let mut _0: (); // return place in scope 0 at $DIR/region-subtyping-basic.rs:16:11: 16:11 @@ -76,34 +76,38 @@ fn main() -> () { // + literal: Const { ty: usize, val: Value(Scalar(0x00000000)) } _4 = Len(_1); // bb0[6]: scope 1 at $DIR/region-subtyping-basic.rs:18:14: 18:18 _5 = Lt(_3, _4); // bb0[7]: scope 1 at $DIR/region-subtyping-basic.rs:18:14: 18:18 - assert(move _5, "index out of bounds: the len is {} but the index is {}", move _4, _3) -> [success: bb1, unwind: bb8]; // bb0[8]: scope 1 at $DIR/region-subtyping-basic.rs:18:14: 18:18 + assert(move _5, "index out of bounds: the len is {} but the index is {}", move _4, _3) -> [success: bb2, unwind: bb1]; // bb0[8]: scope 1 at $DIR/region-subtyping-basic.rs:18:14: 18:18 } - bb1: { - _2 = &'_#3r _1[_3]; // bb1[0]: scope 1 at $DIR/region-subtyping-basic.rs:18:13: 18:18 - FakeRead(ForLet, _2); // bb1[1]: scope 1 at $DIR/region-subtyping-basic.rs:18:9: 18:10 - StorageLive(_6); // bb1[2]: scope 2 at $DIR/region-subtyping-basic.rs:19:9: 19:10 - _6 = _2; // bb1[3]: scope 2 at $DIR/region-subtyping-basic.rs:19:13: 19:14 - FakeRead(ForLet, _6); // bb1[4]: scope 2 at $DIR/region-subtyping-basic.rs:19:9: 19:10 - StorageLive(_7); // bb1[5]: scope 3 at $DIR/region-subtyping-basic.rs:20:8: 20:12 - _7 = const Const(Value(Scalar(0x01)): bool); // bb1[6]: scope 3 at $DIR/region-subtyping-basic.rs:20:8: 20:12 + bb1 (cleanup): { + resume; // bb1[0]: scope 0 at $DIR/region-subtyping-basic.rs:16:1: 25:2 + } + + bb2: { + _2 = &'_#3r _1[_3]; // bb2[0]: scope 1 at $DIR/region-subtyping-basic.rs:18:13: 18:18 + FakeRead(ForLet, _2); // bb2[1]: scope 1 at $DIR/region-subtyping-basic.rs:18:9: 18:10 + StorageLive(_6); // bb2[2]: scope 2 at $DIR/region-subtyping-basic.rs:19:9: 19:10 + _6 = _2; // bb2[3]: scope 2 at $DIR/region-subtyping-basic.rs:19:13: 19:14 + FakeRead(ForLet, _6); // bb2[4]: scope 2 at $DIR/region-subtyping-basic.rs:19:9: 19:10 + StorageLive(_7); // bb2[5]: scope 3 at $DIR/region-subtyping-basic.rs:20:8: 20:12 + _7 = const Const(Value(Scalar(0x01)): bool); // bb2[6]: scope 3 at $DIR/region-subtyping-basic.rs:20:8: 20:12 // ty::Const // + ty: bool // + val: Value(Scalar(0x01)) // mir::Constant // + span: $DIR/region-subtyping-basic.rs:20:8: 20:12 // + literal: Const { ty: bool, val: Value(Scalar(0x01)) } - FakeRead(ForMatchedPlace, _7); // bb1[7]: scope 3 at $DIR/region-subtyping-basic.rs:20:8: 20:12 - switchInt(_7) -> [Const(Value(Scalar(0x00)): bool): bb3, otherwise: bb2]; // bb1[8]: scope 3 at $DIR/region-subtyping-basic.rs:20:5: 24:6 + FakeRead(ForMatchedPlace, _7); // bb2[7]: scope 3 at $DIR/region-subtyping-basic.rs:20:8: 20:12 + switchInt(_7) -> [Const(Value(Scalar(0x00)): bool): bb4, otherwise: bb3]; // bb2[8]: scope 3 at $DIR/region-subtyping-basic.rs:20:5: 24:6 } - bb2: { - falseEdges -> [real: bb4, imaginary: bb3]; // bb2[0]: scope 3 at $DIR/region-subtyping-basic.rs:20:5: 24:6 + bb3: { + falseEdges -> [real: bb5, imaginary: bb4]; // bb3[0]: scope 3 at $DIR/region-subtyping-basic.rs:20:5: 24:6 } - bb3: { - StorageLive(_10); // bb3[0]: scope 3 at $DIR/region-subtyping-basic.rs:23:9: 23:18 - _10 = const Const(Value(Scalar()): fn(usize) -> bool {use_x})(const Const(Value(Scalar(0x00000016)): usize)) -> [return: bb6, unwind: bb8]; // bb3[1]: scope 3 at $DIR/region-subtyping-basic.rs:23:9: 23:18 + bb4: { + StorageLive(_10); // bb4[0]: scope 3 at $DIR/region-subtyping-basic.rs:23:9: 23:18 + _10 = const Const(Value(Scalar()): fn(usize) -> bool {use_x})(const Const(Value(Scalar(0x00000016)): usize)) -> [return: bb7, unwind: bb1]; // bb4[1]: scope 3 at $DIR/region-subtyping-basic.rs:23:9: 23:18 // ty::Const // + ty: fn(usize) -> bool {use_x} // + val: Value(Scalar()) @@ -118,11 +122,11 @@ fn main() -> () { // + literal: Const { ty: usize, val: Value(Scalar(0x00000016)) } } - bb4: { - StorageLive(_8); // bb4[0]: scope 3 at $DIR/region-subtyping-basic.rs:21:9: 21:18 - StorageLive(_9); // bb4[1]: scope 3 at $DIR/region-subtyping-basic.rs:21:15: 21:17 - _9 = (*_6); // bb4[2]: scope 3 at $DIR/region-subtyping-basic.rs:21:15: 21:17 - _8 = const Const(Value(Scalar()): fn(usize) -> bool {use_x})(move _9) -> [return: bb5, unwind: bb8]; // bb4[3]: scope 3 at $DIR/region-subtyping-basic.rs:21:9: 21:18 + bb5: { + StorageLive(_8); // bb5[0]: scope 3 at $DIR/region-subtyping-basic.rs:21:9: 21:18 + StorageLive(_9); // bb5[1]: scope 3 at $DIR/region-subtyping-basic.rs:21:15: 21:17 + _9 = (*_6); // bb5[2]: scope 3 at $DIR/region-subtyping-basic.rs:21:15: 21:17 + _8 = const Const(Value(Scalar()): fn(usize) -> bool {use_x})(move _9) -> [return: bb6, unwind: bb1]; // bb5[3]: scope 3 at $DIR/region-subtyping-basic.rs:21:9: 21:18 // ty::Const // + ty: fn(usize) -> bool {use_x} // + val: Value(Scalar()) @@ -131,41 +135,37 @@ fn main() -> () { // + literal: Const { ty: fn(usize) -> bool {use_x}, val: Value(Scalar()) } } - bb5: { - StorageDead(_9); // bb5[0]: scope 3 at $DIR/region-subtyping-basic.rs:21:17: 21:18 - StorageDead(_8); // bb5[1]: scope 3 at $DIR/region-subtyping-basic.rs:21:18: 21:19 - _0 = const Const(Value(Scalar()): ()); // bb5[2]: scope 3 at $DIR/region-subtyping-basic.rs:20:13: 22:6 + bb6: { + StorageDead(_9); // bb6[0]: scope 3 at $DIR/region-subtyping-basic.rs:21:17: 21:18 + StorageDead(_8); // bb6[1]: scope 3 at $DIR/region-subtyping-basic.rs:21:18: 21:19 + _0 = const Const(Value(Scalar()): ()); // bb6[2]: scope 3 at $DIR/region-subtyping-basic.rs:20:13: 22:6 // ty::Const // + ty: () // + val: Value(Scalar()) // mir::Constant // + span: $DIR/region-subtyping-basic.rs:20:13: 22:6 // + literal: Const { ty: (), val: Value(Scalar()) } - goto -> bb7; // bb5[3]: scope 3 at $DIR/region-subtyping-basic.rs:20:5: 24:6 + goto -> bb8; // bb6[3]: scope 3 at $DIR/region-subtyping-basic.rs:20:5: 24:6 } - bb6: { - StorageDead(_10); // bb6[0]: scope 3 at $DIR/region-subtyping-basic.rs:23:18: 23:19 - _0 = const Const(Value(Scalar()): ()); // bb6[1]: scope 3 at $DIR/region-subtyping-basic.rs:22:12: 24:6 + bb7: { + StorageDead(_10); // bb7[0]: scope 3 at $DIR/region-subtyping-basic.rs:23:18: 23:19 + _0 = const Const(Value(Scalar()): ()); // bb7[1]: scope 3 at $DIR/region-subtyping-basic.rs:22:12: 24:6 // ty::Const // + ty: () // + val: Value(Scalar()) // mir::Constant // + span: $DIR/region-subtyping-basic.rs:22:12: 24:6 // + literal: Const { ty: (), val: Value(Scalar()) } - goto -> bb7; // bb6[2]: scope 3 at $DIR/region-subtyping-basic.rs:20:5: 24:6 - } - - bb7: { - StorageDead(_6); // bb7[0]: scope 2 at $DIR/region-subtyping-basic.rs:25:1: 25:2 - StorageDead(_3); // bb7[1]: scope 1 at $DIR/region-subtyping-basic.rs:25:1: 25:2 - StorageDead(_2); // bb7[2]: scope 1 at $DIR/region-subtyping-basic.rs:25:1: 25:2 - StorageDead(_1); // bb7[3]: scope 0 at $DIR/region-subtyping-basic.rs:25:1: 25:2 - StorageDead(_7); // bb7[4]: scope 0 at $DIR/region-subtyping-basic.rs:25:1: 25:2 - return; // bb7[5]: scope 0 at $DIR/region-subtyping-basic.rs:25:2: 25:2 + goto -> bb8; // bb7[2]: scope 3 at $DIR/region-subtyping-basic.rs:20:5: 24:6 } - bb8 (cleanup): { - resume; // bb8[0]: scope 0 at $DIR/region-subtyping-basic.rs:16:1: 25:2 + bb8: { + StorageDead(_6); // bb8[0]: scope 2 at $DIR/region-subtyping-basic.rs:25:1: 25:2 + StorageDead(_3); // bb8[1]: scope 1 at $DIR/region-subtyping-basic.rs:25:1: 25:2 + StorageDead(_2); // bb8[2]: scope 1 at $DIR/region-subtyping-basic.rs:25:1: 25:2 + StorageDead(_1); // bb8[3]: scope 0 at $DIR/region-subtyping-basic.rs:25:1: 25:2 + StorageDead(_7); // bb8[4]: scope 0 at $DIR/region-subtyping-basic.rs:25:1: 25:2 + return; // bb8[5]: scope 0 at $DIR/region-subtyping-basic.rs:25:2: 25:2 } } diff --git a/src/test/mir-opt/nll/region-subtyping-basic/64bit/rustc.main.nll.0.mir b/src/test/mir-opt/nll/region-subtyping-basic/64bit/rustc.main.nll.0.mir index 66f2850566fff..61db4dba58627 100644 --- a/src/test/mir-opt/nll/region-subtyping-basic/64bit/rustc.main.nll.0.mir +++ b/src/test/mir-opt/nll/region-subtyping-basic/64bit/rustc.main.nll.0.mir @@ -5,21 +5,21 @@ | '_#1r | Local | ['_#1r] | | Inferred Region Values -| '_#0r | U0 | {bb0[0..=8], bb1[0..=8], bb2[0], bb3[0..=1], bb4[0..=3], bb5[0..=3], bb6[0..=2], bb7[0..=5], bb8[0], '_#0r, '_#1r} -| '_#1r | U0 | {bb0[0..=8], bb1[0..=8], bb2[0], bb3[0..=1], bb4[0..=3], bb5[0..=3], bb6[0..=2], bb7[0..=5], bb8[0], '_#1r} +| '_#0r | U0 | {bb0[0..=8], bb1[0], bb2[0..=8], bb3[0], bb4[0..=1], bb5[0..=3], bb6[0..=3], bb7[0..=2], bb8[0..=5], '_#0r, '_#1r} +| '_#1r | U0 | {bb0[0..=8], bb1[0], bb2[0..=8], bb3[0], bb4[0..=1], bb5[0..=3], bb6[0..=3], bb7[0..=2], bb8[0..=5], '_#1r} | '_#2r | U0 | {} -| '_#3r | U0 | {bb1[0..=8], bb2[0], bb4[0..=2]} -| '_#4r | U0 | {bb1[1..=8], bb2[0], bb4[0..=2]} -| '_#5r | U0 | {bb1[4..=8], bb2[0], bb4[0..=2]} +| '_#3r | U0 | {bb2[0..=8], bb3[0], bb5[0..=2]} +| '_#4r | U0 | {bb2[1..=8], bb3[0], bb5[0..=2]} +| '_#5r | U0 | {bb2[4..=8], bb3[0], bb5[0..=2]} | | Inference Constraints -| '_#0r live at {bb0[0..=8], bb1[0..=8], bb2[0], bb3[0..=1], bb4[0..=3], bb5[0..=3], bb6[0..=2], bb7[0..=5], bb8[0]} -| '_#1r live at {bb0[0..=8], bb1[0..=8], bb2[0], bb3[0..=1], bb4[0..=3], bb5[0..=3], bb6[0..=2], bb7[0..=5], bb8[0]} -| '_#3r live at {bb1[0]} -| '_#4r live at {bb1[1..=3]} -| '_#5r live at {bb1[4..=8], bb2[0], bb4[0..=2]} -| '_#3r: '_#4r due to Assignment at Single(bb1[0]) -| '_#4r: '_#5r due to Assignment at Single(bb1[3]) +| '_#0r live at {bb0[0..=8], bb1[0], bb2[0..=8], bb3[0], bb4[0..=1], bb5[0..=3], bb6[0..=3], bb7[0..=2], bb8[0..=5]} +| '_#1r live at {bb0[0..=8], bb1[0], bb2[0..=8], bb3[0], bb4[0..=1], bb5[0..=3], bb6[0..=3], bb7[0..=2], bb8[0..=5]} +| '_#3r live at {bb2[0]} +| '_#4r live at {bb2[1..=3]} +| '_#5r live at {bb2[4..=8], bb3[0], bb5[0..=2]} +| '_#3r: '_#4r due to Assignment at Single(bb2[0]) +| '_#4r: '_#5r due to Assignment at Single(bb2[3]) | fn main() -> () { let mut _0: (); // return place in scope 0 at $DIR/region-subtyping-basic.rs:16:11: 16:11 @@ -76,34 +76,38 @@ fn main() -> () { // + literal: Const { ty: usize, val: Value(Scalar(0x0000000000000000)) } _4 = Len(_1); // bb0[6]: scope 1 at $DIR/region-subtyping-basic.rs:18:14: 18:18 _5 = Lt(_3, _4); // bb0[7]: scope 1 at $DIR/region-subtyping-basic.rs:18:14: 18:18 - assert(move _5, "index out of bounds: the len is {} but the index is {}", move _4, _3) -> [success: bb1, unwind: bb8]; // bb0[8]: scope 1 at $DIR/region-subtyping-basic.rs:18:14: 18:18 + assert(move _5, "index out of bounds: the len is {} but the index is {}", move _4, _3) -> [success: bb2, unwind: bb1]; // bb0[8]: scope 1 at $DIR/region-subtyping-basic.rs:18:14: 18:18 } - bb1: { - _2 = &'_#3r _1[_3]; // bb1[0]: scope 1 at $DIR/region-subtyping-basic.rs:18:13: 18:18 - FakeRead(ForLet, _2); // bb1[1]: scope 1 at $DIR/region-subtyping-basic.rs:18:9: 18:10 - StorageLive(_6); // bb1[2]: scope 2 at $DIR/region-subtyping-basic.rs:19:9: 19:10 - _6 = _2; // bb1[3]: scope 2 at $DIR/region-subtyping-basic.rs:19:13: 19:14 - FakeRead(ForLet, _6); // bb1[4]: scope 2 at $DIR/region-subtyping-basic.rs:19:9: 19:10 - StorageLive(_7); // bb1[5]: scope 3 at $DIR/region-subtyping-basic.rs:20:8: 20:12 - _7 = const Const(Value(Scalar(0x01)): bool); // bb1[6]: scope 3 at $DIR/region-subtyping-basic.rs:20:8: 20:12 + bb1 (cleanup): { + resume; // bb1[0]: scope 0 at $DIR/region-subtyping-basic.rs:16:1: 25:2 + } + + bb2: { + _2 = &'_#3r _1[_3]; // bb2[0]: scope 1 at $DIR/region-subtyping-basic.rs:18:13: 18:18 + FakeRead(ForLet, _2); // bb2[1]: scope 1 at $DIR/region-subtyping-basic.rs:18:9: 18:10 + StorageLive(_6); // bb2[2]: scope 2 at $DIR/region-subtyping-basic.rs:19:9: 19:10 + _6 = _2; // bb2[3]: scope 2 at $DIR/region-subtyping-basic.rs:19:13: 19:14 + FakeRead(ForLet, _6); // bb2[4]: scope 2 at $DIR/region-subtyping-basic.rs:19:9: 19:10 + StorageLive(_7); // bb2[5]: scope 3 at $DIR/region-subtyping-basic.rs:20:8: 20:12 + _7 = const Const(Value(Scalar(0x01)): bool); // bb2[6]: scope 3 at $DIR/region-subtyping-basic.rs:20:8: 20:12 // ty::Const // + ty: bool // + val: Value(Scalar(0x01)) // mir::Constant // + span: $DIR/region-subtyping-basic.rs:20:8: 20:12 // + literal: Const { ty: bool, val: Value(Scalar(0x01)) } - FakeRead(ForMatchedPlace, _7); // bb1[7]: scope 3 at $DIR/region-subtyping-basic.rs:20:8: 20:12 - switchInt(_7) -> [Const(Value(Scalar(0x00)): bool): bb3, otherwise: bb2]; // bb1[8]: scope 3 at $DIR/region-subtyping-basic.rs:20:5: 24:6 + FakeRead(ForMatchedPlace, _7); // bb2[7]: scope 3 at $DIR/region-subtyping-basic.rs:20:8: 20:12 + switchInt(_7) -> [Const(Value(Scalar(0x00)): bool): bb4, otherwise: bb3]; // bb2[8]: scope 3 at $DIR/region-subtyping-basic.rs:20:5: 24:6 } - bb2: { - falseEdges -> [real: bb4, imaginary: bb3]; // bb2[0]: scope 3 at $DIR/region-subtyping-basic.rs:20:5: 24:6 + bb3: { + falseEdges -> [real: bb5, imaginary: bb4]; // bb3[0]: scope 3 at $DIR/region-subtyping-basic.rs:20:5: 24:6 } - bb3: { - StorageLive(_10); // bb3[0]: scope 3 at $DIR/region-subtyping-basic.rs:23:9: 23:18 - _10 = const Const(Value(Scalar()): fn(usize) -> bool {use_x})(const Const(Value(Scalar(0x0000000000000016)): usize)) -> [return: bb6, unwind: bb8]; // bb3[1]: scope 3 at $DIR/region-subtyping-basic.rs:23:9: 23:18 + bb4: { + StorageLive(_10); // bb4[0]: scope 3 at $DIR/region-subtyping-basic.rs:23:9: 23:18 + _10 = const Const(Value(Scalar()): fn(usize) -> bool {use_x})(const Const(Value(Scalar(0x0000000000000016)): usize)) -> [return: bb7, unwind: bb1]; // bb4[1]: scope 3 at $DIR/region-subtyping-basic.rs:23:9: 23:18 // ty::Const // + ty: fn(usize) -> bool {use_x} // + val: Value(Scalar()) @@ -118,11 +122,11 @@ fn main() -> () { // + literal: Const { ty: usize, val: Value(Scalar(0x0000000000000016)) } } - bb4: { - StorageLive(_8); // bb4[0]: scope 3 at $DIR/region-subtyping-basic.rs:21:9: 21:18 - StorageLive(_9); // bb4[1]: scope 3 at $DIR/region-subtyping-basic.rs:21:15: 21:17 - _9 = (*_6); // bb4[2]: scope 3 at $DIR/region-subtyping-basic.rs:21:15: 21:17 - _8 = const Const(Value(Scalar()): fn(usize) -> bool {use_x})(move _9) -> [return: bb5, unwind: bb8]; // bb4[3]: scope 3 at $DIR/region-subtyping-basic.rs:21:9: 21:18 + bb5: { + StorageLive(_8); // bb5[0]: scope 3 at $DIR/region-subtyping-basic.rs:21:9: 21:18 + StorageLive(_9); // bb5[1]: scope 3 at $DIR/region-subtyping-basic.rs:21:15: 21:17 + _9 = (*_6); // bb5[2]: scope 3 at $DIR/region-subtyping-basic.rs:21:15: 21:17 + _8 = const Const(Value(Scalar()): fn(usize) -> bool {use_x})(move _9) -> [return: bb6, unwind: bb1]; // bb5[3]: scope 3 at $DIR/region-subtyping-basic.rs:21:9: 21:18 // ty::Const // + ty: fn(usize) -> bool {use_x} // + val: Value(Scalar()) @@ -131,41 +135,37 @@ fn main() -> () { // + literal: Const { ty: fn(usize) -> bool {use_x}, val: Value(Scalar()) } } - bb5: { - StorageDead(_9); // bb5[0]: scope 3 at $DIR/region-subtyping-basic.rs:21:17: 21:18 - StorageDead(_8); // bb5[1]: scope 3 at $DIR/region-subtyping-basic.rs:21:18: 21:19 - _0 = const Const(Value(Scalar()): ()); // bb5[2]: scope 3 at $DIR/region-subtyping-basic.rs:20:13: 22:6 + bb6: { + StorageDead(_9); // bb6[0]: scope 3 at $DIR/region-subtyping-basic.rs:21:17: 21:18 + StorageDead(_8); // bb6[1]: scope 3 at $DIR/region-subtyping-basic.rs:21:18: 21:19 + _0 = const Const(Value(Scalar()): ()); // bb6[2]: scope 3 at $DIR/region-subtyping-basic.rs:20:13: 22:6 // ty::Const // + ty: () // + val: Value(Scalar()) // mir::Constant // + span: $DIR/region-subtyping-basic.rs:20:13: 22:6 // + literal: Const { ty: (), val: Value(Scalar()) } - goto -> bb7; // bb5[3]: scope 3 at $DIR/region-subtyping-basic.rs:20:5: 24:6 + goto -> bb8; // bb6[3]: scope 3 at $DIR/region-subtyping-basic.rs:20:5: 24:6 } - bb6: { - StorageDead(_10); // bb6[0]: scope 3 at $DIR/region-subtyping-basic.rs:23:18: 23:19 - _0 = const Const(Value(Scalar()): ()); // bb6[1]: scope 3 at $DIR/region-subtyping-basic.rs:22:12: 24:6 + bb7: { + StorageDead(_10); // bb7[0]: scope 3 at $DIR/region-subtyping-basic.rs:23:18: 23:19 + _0 = const Const(Value(Scalar()): ()); // bb7[1]: scope 3 at $DIR/region-subtyping-basic.rs:22:12: 24:6 // ty::Const // + ty: () // + val: Value(Scalar()) // mir::Constant // + span: $DIR/region-subtyping-basic.rs:22:12: 24:6 // + literal: Const { ty: (), val: Value(Scalar()) } - goto -> bb7; // bb6[2]: scope 3 at $DIR/region-subtyping-basic.rs:20:5: 24:6 - } - - bb7: { - StorageDead(_6); // bb7[0]: scope 2 at $DIR/region-subtyping-basic.rs:25:1: 25:2 - StorageDead(_3); // bb7[1]: scope 1 at $DIR/region-subtyping-basic.rs:25:1: 25:2 - StorageDead(_2); // bb7[2]: scope 1 at $DIR/region-subtyping-basic.rs:25:1: 25:2 - StorageDead(_1); // bb7[3]: scope 0 at $DIR/region-subtyping-basic.rs:25:1: 25:2 - StorageDead(_7); // bb7[4]: scope 0 at $DIR/region-subtyping-basic.rs:25:1: 25:2 - return; // bb7[5]: scope 0 at $DIR/region-subtyping-basic.rs:25:2: 25:2 + goto -> bb8; // bb7[2]: scope 3 at $DIR/region-subtyping-basic.rs:20:5: 24:6 } - bb8 (cleanup): { - resume; // bb8[0]: scope 0 at $DIR/region-subtyping-basic.rs:16:1: 25:2 + bb8: { + StorageDead(_6); // bb8[0]: scope 2 at $DIR/region-subtyping-basic.rs:25:1: 25:2 + StorageDead(_3); // bb8[1]: scope 1 at $DIR/region-subtyping-basic.rs:25:1: 25:2 + StorageDead(_2); // bb8[2]: scope 1 at $DIR/region-subtyping-basic.rs:25:1: 25:2 + StorageDead(_1); // bb8[3]: scope 0 at $DIR/region-subtyping-basic.rs:25:1: 25:2 + StorageDead(_7); // bb8[4]: scope 0 at $DIR/region-subtyping-basic.rs:25:1: 25:2 + return; // bb8[5]: scope 0 at $DIR/region-subtyping-basic.rs:25:2: 25:2 } } diff --git a/src/test/mir-opt/no-drop-for-inactive-variant/rustc.unwrap.SimplifyCfg-elaborate-drops.after.mir b/src/test/mir-opt/no-drop-for-inactive-variant/rustc.unwrap.SimplifyCfg-elaborate-drops.after.mir index cc8e01d298507..eb6911735a59e 100644 --- a/src/test/mir-opt/no-drop-for-inactive-variant/rustc.unwrap.SimplifyCfg-elaborate-drops.after.mir +++ b/src/test/mir-opt/no-drop-for-inactive-variant/rustc.unwrap.SimplifyCfg-elaborate-drops.after.mir @@ -14,12 +14,16 @@ fn unwrap(_1: std::option::Option) -> T { bb0: { _2 = discriminant(_1); // scope 0 at $DIR/no-drop-for-inactive-variant.rs:9:9: 9:16 - switchInt(move _2) -> [0isize: bb1, 1isize: bb3, otherwise: bb2]; // scope 0 at $DIR/no-drop-for-inactive-variant.rs:9:9: 9:16 + switchInt(move _2) -> [0isize: bb2, 1isize: bb4, otherwise: bb3]; // scope 0 at $DIR/no-drop-for-inactive-variant.rs:9:9: 9:16 } - bb1: { + bb1 (cleanup): { + resume; // scope 0 at $DIR/no-drop-for-inactive-variant.rs:7:1: 12:2 + } + + bb2: { StorageLive(_4); // scope 0 at $SRC_DIR/libstd/macros.rs:LL:COL - const std::rt::begin_panic::<&str>(const "explicit panic") -> bb4; // scope 0 at $SRC_DIR/libstd/macros.rs:LL:COL + const std::rt::begin_panic::<&str>(const "explicit panic") -> bb5; // scope 0 at $SRC_DIR/libstd/macros.rs:LL:COL // ty::Const // + ty: fn(&str) -> ! {std::rt::begin_panic::<&str>} // + val: Value(Scalar()) @@ -34,11 +38,11 @@ fn unwrap(_1: std::option::Option) -> T { // + literal: Const { ty: &str, val: Value(Slice { data: Allocation { bytes: [101, 120, 112, 108, 105, 99, 105, 116, 32, 112, 97, 110, 105, 99], relocations: Relocations(SortedMap { data: [] }), init_mask: InitMask { blocks: [16383], len: Size { raw: 14 } }, size: Size { raw: 14 }, align: Align { pow2: 0 }, mutability: Not, extra: () }, start: 0, end: 14 }) } } - bb2: { + bb3: { unreachable; // scope 0 at $DIR/no-drop-for-inactive-variant.rs:8:11: 8:14 } - bb3: { + bb4: { StorageLive(_3); // scope 0 at $DIR/no-drop-for-inactive-variant.rs:9:14: 9:15 _3 = move ((_1 as Some).0: T); // scope 0 at $DIR/no-drop-for-inactive-variant.rs:9:14: 9:15 _0 = move _3; // scope 1 at $DIR/no-drop-for-inactive-variant.rs:9:20: 9:21 @@ -47,11 +51,7 @@ fn unwrap(_1: std::option::Option) -> T { return; // scope 0 at $DIR/no-drop-for-inactive-variant.rs:12:2: 12:2 } - bb4 (cleanup): { - drop(_1) -> bb5; // scope 0 at $DIR/no-drop-for-inactive-variant.rs:12:1: 12:2 - } - bb5 (cleanup): { - resume; // scope 0 at $DIR/no-drop-for-inactive-variant.rs:7:1: 12:2 + drop(_1) -> bb1; // scope 0 at $DIR/no-drop-for-inactive-variant.rs:12:1: 12:2 } } diff --git a/src/test/mir-opt/no-spurious-drop-after-call/rustc.main.ElaborateDrops.before.mir b/src/test/mir-opt/no-spurious-drop-after-call/rustc.main.ElaborateDrops.before.mir index 0d619af101a18..0af213e425fe4 100644 --- a/src/test/mir-opt/no-spurious-drop-after-call/rustc.main.ElaborateDrops.before.mir +++ b/src/test/mir-opt/no-spurious-drop-after-call/rustc.main.ElaborateDrops.before.mir @@ -20,7 +20,7 @@ fn main() -> () { // + span: $DIR/no-spurious-drop-after-call.rs:9:20: 9:22 // + literal: Const { ty: &str, val: Value(Slice { data: Allocation { bytes: [], relocations: Relocations(SortedMap { data: [] }), init_mask: InitMask { blocks: [], len: Size { raw: 0 } }, size: Size { raw: 0 }, align: Align { pow2: 0 }, mutability: Not, extra: () }, start: 0, end: 0 }) } _3 = &(*_4); // scope 0 at $DIR/no-spurious-drop-after-call.rs:9:20: 9:22 - _2 = const ::to_string(move _3) -> bb1; // scope 0 at $DIR/no-spurious-drop-after-call.rs:9:20: 9:34 + _2 = const ::to_string(move _3) -> bb2; // scope 0 at $DIR/no-spurious-drop-after-call.rs:9:20: 9:34 // ty::Const // + ty: for<'r> fn(&'r str) -> std::string::String {::to_string} // + val: Value(Scalar()) @@ -29,9 +29,13 @@ fn main() -> () { // + literal: Const { ty: for<'r> fn(&'r str) -> std::string::String {::to_string}, val: Value(Scalar()) } } - bb1: { + bb1 (cleanup): { + resume; // scope 0 at $DIR/no-spurious-drop-after-call.rs:8:1: 10:2 + } + + bb2: { StorageDead(_3); // scope 0 at $DIR/no-spurious-drop-after-call.rs:9:33: 9:34 - _1 = const std::mem::drop::(move _2) -> [return: bb2, unwind: bb3]; // scope 0 at $DIR/no-spurious-drop-after-call.rs:9:5: 9:35 + _1 = const std::mem::drop::(move _2) -> [return: bb3, unwind: bb4]; // scope 0 at $DIR/no-spurious-drop-after-call.rs:9:5: 9:35 // ty::Const // + ty: fn(std::string::String) {std::mem::drop::} // + val: Value(Scalar()) @@ -40,7 +44,7 @@ fn main() -> () { // + literal: Const { ty: fn(std::string::String) {std::mem::drop::}, val: Value(Scalar()) } } - bb2: { + bb3: { StorageDead(_2); // scope 0 at $DIR/no-spurious-drop-after-call.rs:9:34: 9:35 StorageDead(_4); // scope 0 at $DIR/no-spurious-drop-after-call.rs:9:35: 9:36 StorageDead(_1); // scope 0 at $DIR/no-spurious-drop-after-call.rs:9:35: 9:36 @@ -54,11 +58,7 @@ fn main() -> () { return; // scope 0 at $DIR/no-spurious-drop-after-call.rs:10:2: 10:2 } - bb3 (cleanup): { - drop(_2) -> bb4; // scope 0 at $DIR/no-spurious-drop-after-call.rs:9:34: 9:35 - } - bb4 (cleanup): { - resume; // scope 0 at $DIR/no-spurious-drop-after-call.rs:8:1: 10:2 + drop(_2) -> bb1; // scope 0 at $DIR/no-spurious-drop-after-call.rs:9:34: 9:35 } } diff --git a/src/test/mir-opt/packed-struct-drop-aligned/32bit/rustc.main.SimplifyCfg-elaborate-drops.after.mir b/src/test/mir-opt/packed-struct-drop-aligned/32bit/rustc.main.SimplifyCfg-elaborate-drops.after.mir index b2da77320b8e1..21dab9ab92394 100644 --- a/src/test/mir-opt/packed-struct-drop-aligned/32bit/rustc.main.SimplifyCfg-elaborate-drops.after.mir +++ b/src/test/mir-opt/packed-struct-drop-aligned/32bit/rustc.main.SimplifyCfg-elaborate-drops.after.mir @@ -43,18 +43,18 @@ fn main() -> () { drop(_6) -> [return: bb4, unwind: bb3]; // scope 1 at $DIR/packed-struct-drop-aligned.rs:7:5: 7:8 } - bb1: { - StorageDead(_1); // scope 0 at $DIR/packed-struct-drop-aligned.rs:8:1: 8:2 - return; // scope 0 at $DIR/packed-struct-drop-aligned.rs:8:2: 8:2 + bb1 (cleanup): { + resume; // scope 0 at $DIR/packed-struct-drop-aligned.rs:5:1: 8:2 } - bb2 (cleanup): { - resume; // scope 0 at $DIR/packed-struct-drop-aligned.rs:5:1: 8:2 + bb2: { + StorageDead(_1); // scope 0 at $DIR/packed-struct-drop-aligned.rs:8:1: 8:2 + return; // scope 0 at $DIR/packed-struct-drop-aligned.rs:8:2: 8:2 } bb3 (cleanup): { (_1.0: Aligned) = move _4; // scope 1 at $DIR/packed-struct-drop-aligned.rs:7:5: 7:8 - drop(_1) -> bb2; // scope 0 at $DIR/packed-struct-drop-aligned.rs:8:1: 8:2 + drop(_1) -> bb1; // scope 0 at $DIR/packed-struct-drop-aligned.rs:8:1: 8:2 } bb4: { @@ -68,6 +68,6 @@ fn main() -> () { // mir::Constant // + span: $DIR/packed-struct-drop-aligned.rs:5:11: 8:2 // + literal: Const { ty: (), val: Value(Scalar()) } - drop(_1) -> [return: bb1, unwind: bb2]; // scope 0 at $DIR/packed-struct-drop-aligned.rs:8:1: 8:2 + drop(_1) -> [return: bb2, unwind: bb1]; // scope 0 at $DIR/packed-struct-drop-aligned.rs:8:1: 8:2 } } diff --git a/src/test/mir-opt/packed-struct-drop-aligned/64bit/rustc.main.SimplifyCfg-elaborate-drops.after.mir b/src/test/mir-opt/packed-struct-drop-aligned/64bit/rustc.main.SimplifyCfg-elaborate-drops.after.mir index b9466d88299d5..cf46f74c16df3 100644 --- a/src/test/mir-opt/packed-struct-drop-aligned/64bit/rustc.main.SimplifyCfg-elaborate-drops.after.mir +++ b/src/test/mir-opt/packed-struct-drop-aligned/64bit/rustc.main.SimplifyCfg-elaborate-drops.after.mir @@ -43,18 +43,18 @@ fn main() -> () { drop(_6) -> [return: bb4, unwind: bb3]; // scope 1 at $DIR/packed-struct-drop-aligned.rs:7:5: 7:8 } - bb1: { - StorageDead(_1); // scope 0 at $DIR/packed-struct-drop-aligned.rs:8:1: 8:2 - return; // scope 0 at $DIR/packed-struct-drop-aligned.rs:8:2: 8:2 + bb1 (cleanup): { + resume; // scope 0 at $DIR/packed-struct-drop-aligned.rs:5:1: 8:2 } - bb2 (cleanup): { - resume; // scope 0 at $DIR/packed-struct-drop-aligned.rs:5:1: 8:2 + bb2: { + StorageDead(_1); // scope 0 at $DIR/packed-struct-drop-aligned.rs:8:1: 8:2 + return; // scope 0 at $DIR/packed-struct-drop-aligned.rs:8:2: 8:2 } bb3 (cleanup): { (_1.0: Aligned) = move _4; // scope 1 at $DIR/packed-struct-drop-aligned.rs:7:5: 7:8 - drop(_1) -> bb2; // scope 0 at $DIR/packed-struct-drop-aligned.rs:8:1: 8:2 + drop(_1) -> bb1; // scope 0 at $DIR/packed-struct-drop-aligned.rs:8:1: 8:2 } bb4: { @@ -68,6 +68,6 @@ fn main() -> () { // mir::Constant // + span: $DIR/packed-struct-drop-aligned.rs:5:11: 8:2 // + literal: Const { ty: (), val: Value(Scalar()) } - drop(_1) -> [return: bb1, unwind: bb2]; // scope 0 at $DIR/packed-struct-drop-aligned.rs:8:1: 8:2 + drop(_1) -> [return: bb2, unwind: bb1]; // scope 0 at $DIR/packed-struct-drop-aligned.rs:8:1: 8:2 } } diff --git a/src/test/mir-opt/retag/rustc.main.SimplifyCfg-elaborate-drops.after.mir b/src/test/mir-opt/retag/rustc.main.SimplifyCfg-elaborate-drops.after.mir index 459c6b7a70a36..c8c5da37abe32 100644 --- a/src/test/mir-opt/retag/rustc.main.SimplifyCfg-elaborate-drops.after.mir +++ b/src/test/mir-opt/retag/rustc.main.SimplifyCfg-elaborate-drops.after.mir @@ -82,7 +82,7 @@ fn main() -> () { Retag(_7); // scope 1 at $DIR/retag.rs:32:29: 32:35 _6 = &mut (*_7); // scope 1 at $DIR/retag.rs:32:29: 32:35 Retag([2phase] _6); // scope 1 at $DIR/retag.rs:32:29: 32:35 - _3 = const Test::foo(move _4, move _6) -> [return: bb1, unwind: bb7]; // scope 1 at $DIR/retag.rs:32:17: 32:36 + _3 = const Test::foo(move _4, move _6) -> [return: bb2, unwind: bb3]; // scope 1 at $DIR/retag.rs:32:17: 32:36 // ty::Const // + ty: for<'r, 'x> fn(&'r Test, &'x mut i32) -> &'x mut i32 {Test::foo} // + val: Value(Scalar()) @@ -91,15 +91,23 @@ fn main() -> () { // + literal: Const { ty: for<'r, 'x> fn(&'r Test, &'x mut i32) -> &'x mut i32 {Test::foo}, val: Value(Scalar()) } } - bb1: { + bb1 (cleanup): { + resume; // scope 0 at $DIR/retag.rs:29:1: 51:2 + } + + bb2: { Retag(_3); // scope 1 at $DIR/retag.rs:32:17: 32:36 StorageDead(_6); // scope 1 at $DIR/retag.rs:32:35: 32:36 StorageDead(_4); // scope 1 at $DIR/retag.rs:32:35: 32:36 StorageDead(_7); // scope 1 at $DIR/retag.rs:32:36: 32:37 - drop(_5) -> [return: bb2, unwind: bb8]; // scope 1 at $DIR/retag.rs:32:36: 32:37 + drop(_5) -> [return: bb4, unwind: bb1]; // scope 1 at $DIR/retag.rs:32:36: 32:37 } - bb2: { + bb3 (cleanup): { + drop(_5) -> bb1; // scope 1 at $DIR/retag.rs:32:36: 32:37 + } + + bb4: { StorageDead(_5); // scope 1 at $DIR/retag.rs:32:36: 32:37 StorageLive(_8); // scope 2 at $DIR/retag.rs:33:13: 33:14 StorageLive(_9); // scope 2 at $DIR/retag.rs:33:19: 33:20 @@ -151,10 +159,10 @@ fn main() -> () { Retag(_18); // scope 6 at $DIR/retag.rs:44:16: 44:18 _17 = &(*_18); // scope 6 at $DIR/retag.rs:44:16: 44:18 Retag(_17); // scope 6 at $DIR/retag.rs:44:16: 44:18 - _15 = move _16(move _17) -> bb3; // scope 6 at $DIR/retag.rs:44:14: 44:19 + _15 = move _16(move _17) -> bb5; // scope 6 at $DIR/retag.rs:44:14: 44:19 } - bb3: { + bb5: { Retag(_15); // scope 6 at $DIR/retag.rs:44:14: 44:19 StorageDead(_17); // scope 6 at $DIR/retag.rs:44:18: 44:19 StorageDead(_16); // scope 6 at $DIR/retag.rs:44:18: 44:19 @@ -185,7 +193,7 @@ fn main() -> () { Retag(_23); // scope 7 at $DIR/retag.rs:47:21: 47:23 _22 = &(*_23); // scope 7 at $DIR/retag.rs:47:21: 47:23 Retag(_22); // scope 7 at $DIR/retag.rs:47:21: 47:23 - _19 = const Test::foo_shr(move _20, move _22) -> [return: bb4, unwind: bb6]; // scope 7 at $DIR/retag.rs:47:5: 47:24 + _19 = const Test::foo_shr(move _20, move _22) -> [return: bb6, unwind: bb7]; // scope 7 at $DIR/retag.rs:47:5: 47:24 // ty::Const // + ty: for<'r, 'x> fn(&'r Test, &'x i32) -> &'x i32 {Test::foo_shr} // + val: Value(Scalar()) @@ -194,15 +202,19 @@ fn main() -> () { // + literal: Const { ty: for<'r, 'x> fn(&'r Test, &'x i32) -> &'x i32 {Test::foo_shr}, val: Value(Scalar()) } } - bb4: { + bb6: { Retag(_19); // scope 7 at $DIR/retag.rs:47:5: 47:24 StorageDead(_22); // scope 7 at $DIR/retag.rs:47:23: 47:24 StorageDead(_20); // scope 7 at $DIR/retag.rs:47:23: 47:24 StorageDead(_23); // scope 7 at $DIR/retag.rs:47:24: 47:25 - drop(_21) -> [return: bb5, unwind: bb8]; // scope 7 at $DIR/retag.rs:47:24: 47:25 + drop(_21) -> [return: bb8, unwind: bb1]; // scope 7 at $DIR/retag.rs:47:24: 47:25 } - bb5: { + bb7 (cleanup): { + drop(_21) -> bb1; // scope 7 at $DIR/retag.rs:47:24: 47:25 + } + + bb8: { StorageDead(_21); // scope 7 at $DIR/retag.rs:47:24: 47:25 StorageDead(_19); // scope 7 at $DIR/retag.rs:47:24: 47:25 StorageLive(_25); // scope 7 at $DIR/retag.rs:50:9: 50:11 @@ -224,16 +236,4 @@ fn main() -> () { StorageDead(_1); // scope 0 at $DIR/retag.rs:51:1: 51:2 return; // scope 0 at $DIR/retag.rs:51:2: 51:2 } - - bb6 (cleanup): { - drop(_21) -> bb8; // scope 7 at $DIR/retag.rs:47:24: 47:25 - } - - bb7 (cleanup): { - drop(_5) -> bb8; // scope 1 at $DIR/retag.rs:32:36: 32:37 - } - - bb8 (cleanup): { - resume; // scope 0 at $DIR/retag.rs:29:1: 51:2 - } } diff --git a/src/test/mir-opt/simple-match/32bit/rustc.match_bool.mir_map.0.mir b/src/test/mir-opt/simple-match/32bit/rustc.match_bool.mir_map.0.mir index be4a472e9af50..3a7d6a7ca3c61 100644 --- a/src/test/mir-opt/simple-match/32bit/rustc.match_bool.mir_map.0.mir +++ b/src/test/mir-opt/simple-match/32bit/rustc.match_bool.mir_map.0.mir @@ -6,14 +6,18 @@ fn match_bool(_1: bool) -> usize { bb0: { FakeRead(ForMatchedPlace, _1); // scope 0 at $DIR/simple-match.rs:6:11: 6:12 - switchInt(_1) -> [false: bb2, otherwise: bb1]; // scope 0 at $DIR/simple-match.rs:7:9: 7:13 + switchInt(_1) -> [false: bb3, otherwise: bb2]; // scope 0 at $DIR/simple-match.rs:7:9: 7:13 } - bb1: { - falseEdges -> [real: bb3, imaginary: bb2]; // scope 0 at $DIR/simple-match.rs:7:9: 7:13 + bb1 (cleanup): { + resume; // scope 0 at $DIR/simple-match.rs:5:1: 10:2 } bb2: { + falseEdges -> [real: bb4, imaginary: bb3]; // scope 0 at $DIR/simple-match.rs:7:9: 7:13 + } + + bb3: { _0 = const 20usize; // scope 0 at $DIR/simple-match.rs:8:14: 8:16 // ty::Const // + ty: usize @@ -21,10 +25,10 @@ fn match_bool(_1: bool) -> usize { // mir::Constant // + span: $DIR/simple-match.rs:8:14: 8:16 // + literal: Const { ty: usize, val: Value(Scalar(0x00000014)) } - goto -> bb4; // scope 0 at $DIR/simple-match.rs:6:5: 9:6 + goto -> bb5; // scope 0 at $DIR/simple-match.rs:6:5: 9:6 } - bb3: { + bb4: { _0 = const 10usize; // scope 0 at $DIR/simple-match.rs:7:17: 7:19 // ty::Const // + ty: usize @@ -32,10 +36,14 @@ fn match_bool(_1: bool) -> usize { // mir::Constant // + span: $DIR/simple-match.rs:7:17: 7:19 // + literal: Const { ty: usize, val: Value(Scalar(0x0000000a)) } - goto -> bb4; // scope 0 at $DIR/simple-match.rs:6:5: 9:6 + goto -> bb5; // scope 0 at $DIR/simple-match.rs:6:5: 9:6 } - bb4: { + bb5: { + goto -> bb6; // scope 0 at $DIR/simple-match.rs:10:2: 10:2 + } + + bb6: { return; // scope 0 at $DIR/simple-match.rs:10:2: 10:2 } } diff --git a/src/test/mir-opt/simple-match/64bit/rustc.match_bool.mir_map.0.mir b/src/test/mir-opt/simple-match/64bit/rustc.match_bool.mir_map.0.mir index 1dde4386ab848..170181177b26a 100644 --- a/src/test/mir-opt/simple-match/64bit/rustc.match_bool.mir_map.0.mir +++ b/src/test/mir-opt/simple-match/64bit/rustc.match_bool.mir_map.0.mir @@ -6,14 +6,18 @@ fn match_bool(_1: bool) -> usize { bb0: { FakeRead(ForMatchedPlace, _1); // scope 0 at $DIR/simple-match.rs:6:11: 6:12 - switchInt(_1) -> [false: bb2, otherwise: bb1]; // scope 0 at $DIR/simple-match.rs:7:9: 7:13 + switchInt(_1) -> [false: bb3, otherwise: bb2]; // scope 0 at $DIR/simple-match.rs:7:9: 7:13 } - bb1: { - falseEdges -> [real: bb3, imaginary: bb2]; // scope 0 at $DIR/simple-match.rs:7:9: 7:13 + bb1 (cleanup): { + resume; // scope 0 at $DIR/simple-match.rs:5:1: 10:2 } bb2: { + falseEdges -> [real: bb4, imaginary: bb3]; // scope 0 at $DIR/simple-match.rs:7:9: 7:13 + } + + bb3: { _0 = const 20usize; // scope 0 at $DIR/simple-match.rs:8:14: 8:16 // ty::Const // + ty: usize @@ -21,10 +25,10 @@ fn match_bool(_1: bool) -> usize { // mir::Constant // + span: $DIR/simple-match.rs:8:14: 8:16 // + literal: Const { ty: usize, val: Value(Scalar(0x0000000000000014)) } - goto -> bb4; // scope 0 at $DIR/simple-match.rs:6:5: 9:6 + goto -> bb5; // scope 0 at $DIR/simple-match.rs:6:5: 9:6 } - bb3: { + bb4: { _0 = const 10usize; // scope 0 at $DIR/simple-match.rs:7:17: 7:19 // ty::Const // + ty: usize @@ -32,10 +36,14 @@ fn match_bool(_1: bool) -> usize { // mir::Constant // + span: $DIR/simple-match.rs:7:17: 7:19 // + literal: Const { ty: usize, val: Value(Scalar(0x000000000000000a)) } - goto -> bb4; // scope 0 at $DIR/simple-match.rs:6:5: 9:6 + goto -> bb5; // scope 0 at $DIR/simple-match.rs:6:5: 9:6 } - bb4: { + bb5: { + goto -> bb6; // scope 0 at $DIR/simple-match.rs:10:2: 10:2 + } + + bb6: { return; // scope 0 at $DIR/simple-match.rs:10:2: 10:2 } } diff --git a/src/test/mir-opt/simplify-arm/rustc.id_try.SimplifyArmIdentity.diff b/src/test/mir-opt/simplify-arm/rustc.id_try.SimplifyArmIdentity.diff index ba6e1ac24cb4e..64b5f17430023 100644 --- a/src/test/mir-opt/simplify-arm/rustc.id_try.SimplifyArmIdentity.diff +++ b/src/test/mir-opt/simplify-arm/rustc.id_try.SimplifyArmIdentity.diff @@ -61,7 +61,7 @@ - discriminant(_0) = 0; // scope 1 at $DIR/simplify-arm.rs:25:5: 25:10 - StorageDead(_11); // scope 1 at $DIR/simplify-arm.rs:25:9: 25:10 StorageDead(_2); // scope 0 at $DIR/simplify-arm.rs:26:1: 26:2 - goto -> bb7; // scope 0 at $DIR/simplify-arm.rs:26:2: 26:2 + goto -> bb5; // scope 0 at $DIR/simplify-arm.rs:26:2: 26:2 } bb3: { @@ -74,7 +74,7 @@ StorageLive(_8); // scope 3 at $DIR/simplify-arm.rs:24:14: 24:15 StorageLive(_9); // scope 3 at $DIR/simplify-arm.rs:24:14: 24:15 _9 = _6; // scope 3 at $DIR/simplify-arm.rs:24:14: 24:15 - _8 = const >::from(move _9) -> bb5; // scope 3 at $DIR/simplify-arm.rs:24:14: 24:15 + _8 = const >::from(move _9) -> bb6; // scope 3 at $DIR/simplify-arm.rs:24:14: 24:15 // ty::Const // + ty: fn(i32) -> i32 {>::from} // + val: Value(Scalar()) @@ -84,8 +84,12 @@ } bb5: { + return; // scope 0 at $DIR/simplify-arm.rs:26:2: 26:2 + } + + bb6: { StorageDead(_9); // scope 3 at $DIR/simplify-arm.rs:24:14: 24:15 - _0 = const as std::ops::Try>::from_error(move _8) -> bb6; // scope 3 at $DIR/simplify-arm.rs:24:14: 24:15 + _0 = const as std::ops::Try>::from_error(move _8) -> bb7; // scope 3 at $DIR/simplify-arm.rs:24:14: 24:15 // ty::Const // + ty: fn( as std::ops::Try>::Error) -> std::result::Result { as std::ops::Try>::from_error} // + val: Value(Scalar()) @@ -94,16 +98,12 @@ // + literal: Const { ty: fn( as std::ops::Try>::Error) -> std::result::Result { as std::ops::Try>::from_error}, val: Value(Scalar()) } } - bb6: { + bb7: { StorageDead(_8); // scope 3 at $DIR/simplify-arm.rs:24:14: 24:15 StorageDead(_6); // scope 0 at $DIR/simplify-arm.rs:24:14: 24:15 StorageDead(_3); // scope 0 at $DIR/simplify-arm.rs:24:15: 24:16 StorageDead(_2); // scope 0 at $DIR/simplify-arm.rs:26:1: 26:2 - goto -> bb7; // scope 0 at $DIR/simplify-arm.rs:26:2: 26:2 - } - - bb7: { - return; // scope 0 at $DIR/simplify-arm.rs:26:2: 26:2 + goto -> bb5; // scope 0 at $DIR/simplify-arm.rs:24:14: 24:15 } } diff --git a/src/test/mir-opt/simplify-arm/rustc.id_try.SimplifyBranchSame.diff b/src/test/mir-opt/simplify-arm/rustc.id_try.SimplifyBranchSame.diff index 4061c5e74ac61..01f57bec71a87 100644 --- a/src/test/mir-opt/simplify-arm/rustc.id_try.SimplifyBranchSame.diff +++ b/src/test/mir-opt/simplify-arm/rustc.id_try.SimplifyBranchSame.diff @@ -52,7 +52,7 @@ _0 = move _3; // scope 1 at $DIR/simplify-arm.rs:25:5: 25:10 StorageDead(_3); // scope 0 at $DIR/simplify-arm.rs:24:15: 24:16 StorageDead(_2); // scope 0 at $DIR/simplify-arm.rs:26:1: 26:2 - goto -> bb7; // scope 0 at $DIR/simplify-arm.rs:26:2: 26:2 + goto -> bb5; // scope 0 at $DIR/simplify-arm.rs:26:2: 26:2 } bb3: { @@ -65,7 +65,7 @@ StorageLive(_8); // scope 3 at $DIR/simplify-arm.rs:24:14: 24:15 StorageLive(_9); // scope 3 at $DIR/simplify-arm.rs:24:14: 24:15 _9 = _6; // scope 3 at $DIR/simplify-arm.rs:24:14: 24:15 - _8 = const >::from(move _9) -> bb5; // scope 3 at $DIR/simplify-arm.rs:24:14: 24:15 + _8 = const >::from(move _9) -> bb6; // scope 3 at $DIR/simplify-arm.rs:24:14: 24:15 // ty::Const // + ty: fn(i32) -> i32 {>::from} // + val: Value(Scalar()) @@ -75,8 +75,12 @@ } bb5: { + return; // scope 0 at $DIR/simplify-arm.rs:26:2: 26:2 + } + + bb6: { StorageDead(_9); // scope 3 at $DIR/simplify-arm.rs:24:14: 24:15 - _0 = const as std::ops::Try>::from_error(move _8) -> bb6; // scope 3 at $DIR/simplify-arm.rs:24:14: 24:15 + _0 = const as std::ops::Try>::from_error(move _8) -> bb7; // scope 3 at $DIR/simplify-arm.rs:24:14: 24:15 // ty::Const // + ty: fn( as std::ops::Try>::Error) -> std::result::Result { as std::ops::Try>::from_error} // + val: Value(Scalar()) @@ -85,16 +89,12 @@ // + literal: Const { ty: fn( as std::ops::Try>::Error) -> std::result::Result { as std::ops::Try>::from_error}, val: Value(Scalar()) } } - bb6: { + bb7: { StorageDead(_8); // scope 3 at $DIR/simplify-arm.rs:24:14: 24:15 StorageDead(_6); // scope 0 at $DIR/simplify-arm.rs:24:14: 24:15 StorageDead(_3); // scope 0 at $DIR/simplify-arm.rs:24:15: 24:16 StorageDead(_2); // scope 0 at $DIR/simplify-arm.rs:26:1: 26:2 - goto -> bb7; // scope 0 at $DIR/simplify-arm.rs:26:2: 26:2 - } - - bb7: { - return; // scope 0 at $DIR/simplify-arm.rs:26:2: 26:2 + goto -> bb5; // scope 0 at $DIR/simplify-arm.rs:24:14: 24:15 } } diff --git a/src/test/mir-opt/simplify_cfg/rustc.main.SimplifyCfg-early-opt.diff b/src/test/mir-opt/simplify_cfg/rustc.main.SimplifyCfg-early-opt.diff index 803635bd344fe..3b472ed3a0376 100644 --- a/src/test/mir-opt/simplify_cfg/rustc.main.SimplifyCfg-early-opt.diff +++ b/src/test/mir-opt/simplify_cfg/rustc.main.SimplifyCfg-early-opt.diff @@ -13,7 +13,7 @@ - - bb1: { StorageLive(_2); // scope 0 at $DIR/simplify_cfg.rs:7:12: 7:17 -- _2 = const bar() -> bb2; // scope 0 at $DIR/simplify_cfg.rs:7:12: 7:17 +- _2 = const bar() -> bb3; // scope 0 at $DIR/simplify_cfg.rs:7:12: 7:17 + _2 = const bar() -> bb1; // scope 0 at $DIR/simplify_cfg.rs:7:12: 7:17 // ty::Const // + ty: fn() -> bool {bar} @@ -23,18 +23,22 @@ // + literal: Const { ty: fn() -> bool {bar}, val: Value(Scalar()) } } -- bb2: { -- nop; // scope 0 at $DIR/simplify_cfg.rs:7:12: 7:17 -- switchInt(_2) -> [false: bb4, otherwise: bb3]; // scope 0 at $DIR/simplify_cfg.rs:7:9: 9:10 +- bb2 (cleanup): { +- resume; // scope 0 at $DIR/simplify_cfg.rs:5:1: 11:2 + bb1: { + switchInt(_2) -> [false: bb2, otherwise: bb3]; // scope 0 at $DIR/simplify_cfg.rs:7:9: 9:10 } - bb3: { -- goto -> bb5; // scope 0 at $DIR/simplify_cfg.rs:7:9: 9:10 +- nop; // scope 0 at $DIR/simplify_cfg.rs:7:12: 7:17 +- switchInt(_2) -> [false: bb5, otherwise: bb4]; // scope 0 at $DIR/simplify_cfg.rs:7:9: 9:10 - } - - bb4: { +- goto -> bb6; // scope 0 at $DIR/simplify_cfg.rs:7:9: 9:10 +- } +- +- bb5: { + bb2: { _1 = const (); // scope 0 at $DIR/simplify_cfg.rs:7:9: 9:10 // ty::Const @@ -47,7 +51,7 @@ goto -> bb0; // scope 0 at $DIR/simplify_cfg.rs:6:5: 10:6 } -- bb5: { +- bb6: { + bb3: { _0 = const (); // scope 0 at $DIR/simplify_cfg.rs:8:13: 8:18 // ty::Const @@ -58,10 +62,6 @@ // + literal: Const { ty: (), val: Value(Scalar()) } StorageDead(_2); // scope 0 at $DIR/simplify_cfg.rs:10:5: 10:6 return; // scope 0 at $DIR/simplify_cfg.rs:11:2: 11:2 -- } -- -- bb6 (cleanup): { -- resume; // scope 0 at $DIR/simplify_cfg.rs:5:1: 11:2 } } diff --git a/src/test/mir-opt/simplify_cfg/rustc.main.SimplifyCfg-initial.diff b/src/test/mir-opt/simplify_cfg/rustc.main.SimplifyCfg-initial.diff index b19b91653db5f..0cc2258f46364 100644 --- a/src/test/mir-opt/simplify_cfg/rustc.main.SimplifyCfg-initial.diff +++ b/src/test/mir-opt/simplify_cfg/rustc.main.SimplifyCfg-initial.diff @@ -9,17 +9,21 @@ bb0: { - goto -> bb1; // scope 0 at $DIR/simplify_cfg.rs:6:5: 10:6 -+ falseUnwind -> [real: bb1, cleanup: bb6]; // scope 0 at $DIR/simplify_cfg.rs:6:5: 10:6 ++ falseUnwind -> [real: bb1, cleanup: bb2]; // scope 0 at $DIR/simplify_cfg.rs:6:5: 10:6 } bb1: { -- falseUnwind -> [real: bb2, cleanup: bb11]; // scope 0 at $DIR/simplify_cfg.rs:6:5: 10:6 +- falseUnwind -> [real: bb3, cleanup: bb4]; // scope 0 at $DIR/simplify_cfg.rs:6:5: 10:6 - } - - bb2: { +- goto -> bb13; // scope 0 at $DIR/simplify_cfg.rs:11:2: 11:2 +- } +- +- bb3: { StorageLive(_2); // scope 0 at $DIR/simplify_cfg.rs:7:12: 7:17 -- _2 = const bar() -> [return: bb3, unwind: bb11]; // scope 0 at $DIR/simplify_cfg.rs:7:12: 7:17 -+ _2 = const bar() -> [return: bb2, unwind: bb6]; // scope 0 at $DIR/simplify_cfg.rs:7:12: 7:17 +- _2 = const bar() -> [return: bb5, unwind: bb4]; // scope 0 at $DIR/simplify_cfg.rs:7:12: 7:17 ++ _2 = const bar() -> [return: bb3, unwind: bb2]; // scope 0 at $DIR/simplify_cfg.rs:7:12: 7:17 // ty::Const // + ty: fn() -> bool {bar} // + val: Value(Scalar()) @@ -28,21 +32,26 @@ // + literal: Const { ty: fn() -> bool {bar}, val: Value(Scalar()) } } -- bb3: { -+ bb2: { - FakeRead(ForMatchedPlace, _2); // scope 0 at $DIR/simplify_cfg.rs:7:12: 7:17 -- switchInt(_2) -> [false: bb5, otherwise: bb4]; // scope 0 at $DIR/simplify_cfg.rs:7:9: 9:10 -+ switchInt(_2) -> [false: bb4, otherwise: bb3]; // scope 0 at $DIR/simplify_cfg.rs:7:9: 9:10 +- bb4 (cleanup): { ++ bb2 (cleanup): { + resume; // scope 0 at $DIR/simplify_cfg.rs:5:1: 11:2 } -- bb4: { -- falseEdges -> [real: bb6, imaginary: bb5]; // scope 0 at $DIR/simplify_cfg.rs:7:9: 9:10 +- bb5: { + bb3: { -+ falseEdges -> [real: bb5, imaginary: bb4]; // scope 0 at $DIR/simplify_cfg.rs:7:9: 9:10 + FakeRead(ForMatchedPlace, _2); // scope 0 at $DIR/simplify_cfg.rs:7:12: 7:17 +- switchInt(_2) -> [false: bb7, otherwise: bb6]; // scope 0 at $DIR/simplify_cfg.rs:7:9: 9:10 ++ switchInt(_2) -> [false: bb5, otherwise: bb4]; // scope 0 at $DIR/simplify_cfg.rs:7:9: 9:10 } -- bb5: { +- bb6: { +- falseEdges -> [real: bb8, imaginary: bb7]; // scope 0 at $DIR/simplify_cfg.rs:7:9: 9:10 + bb4: { ++ falseEdges -> [real: bb6, imaginary: bb5]; // scope 0 at $DIR/simplify_cfg.rs:7:9: 9:10 + } + +- bb7: { ++ bb5: { _1 = const (); // scope 0 at $DIR/simplify_cfg.rs:7:9: 9:10 // ty::Const // + ty: () @@ -50,13 +59,13 @@ // mir::Constant // + span: $DIR/simplify_cfg.rs:7:9: 9:10 // + literal: Const { ty: (), val: Value(Scalar()) } -- goto -> bb9; // scope 0 at $DIR/simplify_cfg.rs:7:9: 9:10 +- goto -> bb12; // scope 0 at $DIR/simplify_cfg.rs:7:9: 9:10 + StorageDead(_2); // scope 0 at $DIR/simplify_cfg.rs:10:5: 10:6 + goto -> bb0; // scope 0 at $DIR/simplify_cfg.rs:6:5: 10:6 } -- bb6: { -+ bb5: { +- bb8: { ++ bb6: { _0 = const (); // scope 0 at $DIR/simplify_cfg.rs:8:13: 8:18 // ty::Const // + ty: () @@ -64,30 +73,29 @@ // mir::Constant // + span: $DIR/simplify_cfg.rs:8:13: 8:18 // + literal: Const { ty: (), val: Value(Scalar()) } -- goto -> bb10; // scope 0 at $DIR/simplify_cfg.rs:8:13: 8:18 +- goto -> bb9; // scope 0 at $DIR/simplify_cfg.rs:8:13: 8:18 - } - -- bb7: { +- bb9: { + StorageDead(_2); // scope 0 at $DIR/simplify_cfg.rs:10:5: 10:6 +- goto -> bb2; // scope 0 at $DIR/simplify_cfg.rs:8:13: 8:18 +- } +- +- bb10: { - unreachable; // scope 0 at $DIR/simplify_cfg.rs:7:18: 9:10 - } - -- bb8: { -- goto -> bb9; // scope 0 at $DIR/simplify_cfg.rs:7:9: 9:10 +- bb11: { +- goto -> bb12; // scope 0 at $DIR/simplify_cfg.rs:7:9: 9:10 - } - -- bb9: { - StorageDead(_2); // scope 0 at $DIR/simplify_cfg.rs:10:5: 10:6 +- bb12: { +- StorageDead(_2); // scope 0 at $DIR/simplify_cfg.rs:10:5: 10:6 - goto -> bb1; // scope 0 at $DIR/simplify_cfg.rs:6:5: 10:6 - } - -- bb10: { -- StorageDead(_2); // scope 0 at $DIR/simplify_cfg.rs:10:5: 10:6 +- bb13: { return; // scope 0 at $DIR/simplify_cfg.rs:11:2: 11:2 } - -- bb11 (cleanup): { -+ bb6 (cleanup): { - resume; // scope 0 at $DIR/simplify_cfg.rs:5:1: 11:2 - } } diff --git a/src/test/mir-opt/simplify_try/rustc.try_identity.SimplifyArmIdentity.diff b/src/test/mir-opt/simplify_try/rustc.try_identity.SimplifyArmIdentity.diff index 58c5313909f6b..97050122ca96e 100644 --- a/src/test/mir-opt/simplify_try/rustc.try_identity.SimplifyArmIdentity.diff +++ b/src/test/mir-opt/simplify_try/rustc.try_identity.SimplifyArmIdentity.diff @@ -83,7 +83,7 @@ + _0 = move _3; // scope 8 at $SRC_DIR/libcore/result.rs:LL:COL StorageDead(_3); // scope 0 at $DIR/simplify_try.rs:6:15: 6:16 StorageDead(_2); // scope 0 at $DIR/simplify_try.rs:8:1: 8:2 - goto -> bb3; // scope 0 at $DIR/simplify_try.rs:8:2: 8:2 + goto -> bb3; // scope 0 at $DIR/simplify_try.rs:6:14: 6:15 } bb3: { diff --git a/src/test/mir-opt/storage_live_dead_in_statics/rustc.XXX.mir_map.0.mir b/src/test/mir-opt/storage_live_dead_in_statics/rustc.XXX.mir_map.0.mir index 8891f19e45904..62b7535f2b575 100644 --- a/src/test/mir-opt/storage_live_dead_in_statics/rustc.XXX.mir_map.0.mir +++ b/src/test/mir-opt/storage_live_dead_in_statics/rustc.XXX.mir_map.0.mir @@ -663,4 +663,8 @@ static XXX: &Foo = { StorageDead(_1); // scope 0 at $DIR/storage_live_dead_in_statics.rs:23:1: 23:2 return; // scope 0 at $DIR/storage_live_dead_in_statics.rs:5:1: 23:3 } + + bb1 (cleanup): { + resume; // scope 0 at $DIR/storage_live_dead_in_statics.rs:5:1: 23:3 + } } diff --git a/src/test/mir-opt/uniform_array_move_out/rustc.move_out_by_subslice.mir_map.0.mir b/src/test/mir-opt/uniform_array_move_out/rustc.move_out_by_subslice.mir_map.0.mir index d6478c628dee1..de29cd61019f1 100644 --- a/src/test/mir-opt/uniform_array_move_out/rustc.move_out_by_subslice.mir_map.0.mir +++ b/src/test/mir-opt/uniform_array_move_out/rustc.move_out_by_subslice.mir_map.0.mir @@ -28,10 +28,22 @@ fn move_out_by_subslice() -> () { // + span: $DIR/uniform_array_move_out.rs:11:18: 11:19 // + literal: Const { ty: i32, val: Value(Scalar(0x00000001)) } _2 = move _3; // scope 0 at $DIR/uniform_array_move_out.rs:11:14: 11:19 - drop(_3) -> [return: bb1, unwind: bb9]; // scope 0 at $DIR/uniform_array_move_out.rs:11:18: 11:19 + drop(_3) -> [return: bb4, unwind: bb2]; // scope 0 at $DIR/uniform_array_move_out.rs:11:18: 11:19 } - bb1: { + bb1 (cleanup): { + resume; // scope 0 at $DIR/uniform_array_move_out.rs:10:1: 13:2 + } + + bb2 (cleanup): { + drop(_2) -> bb1; // scope 0 at $DIR/uniform_array_move_out.rs:11:26: 11:27 + } + + bb3 (cleanup): { + drop(_3) -> bb2; // scope 0 at $DIR/uniform_array_move_out.rs:11:18: 11:19 + } + + bb4: { StorageDead(_3); // scope 0 at $DIR/uniform_array_move_out.rs:11:18: 11:19 StorageLive(_4); // scope 0 at $DIR/uniform_array_move_out.rs:11:21: 11:26 StorageLive(_5); // scope 0 at $DIR/uniform_array_move_out.rs:11:21: 11:26 @@ -44,21 +56,29 @@ fn move_out_by_subslice() -> () { // + span: $DIR/uniform_array_move_out.rs:11:25: 11:26 // + literal: Const { ty: i32, val: Value(Scalar(0x00000002)) } _4 = move _5; // scope 0 at $DIR/uniform_array_move_out.rs:11:21: 11:26 - drop(_5) -> [return: bb2, unwind: bb8]; // scope 0 at $DIR/uniform_array_move_out.rs:11:25: 11:26 + drop(_5) -> [return: bb7, unwind: bb5]; // scope 0 at $DIR/uniform_array_move_out.rs:11:25: 11:26 + } + + bb5 (cleanup): { + drop(_4) -> bb2; // scope 0 at $DIR/uniform_array_move_out.rs:11:26: 11:27 + } + + bb6 (cleanup): { + drop(_5) -> bb5; // scope 0 at $DIR/uniform_array_move_out.rs:11:25: 11:26 } - bb2: { + bb7: { StorageDead(_5); // scope 0 at $DIR/uniform_array_move_out.rs:11:25: 11:26 _1 = [move _2, move _4]; // scope 0 at $DIR/uniform_array_move_out.rs:11:13: 11:27 - drop(_4) -> [return: bb3, unwind: bb9]; // scope 0 at $DIR/uniform_array_move_out.rs:11:26: 11:27 + drop(_4) -> [return: bb8, unwind: bb2]; // scope 0 at $DIR/uniform_array_move_out.rs:11:26: 11:27 } - bb3: { + bb8: { StorageDead(_4); // scope 0 at $DIR/uniform_array_move_out.rs:11:26: 11:27 - drop(_2) -> [return: bb4, unwind: bb10]; // scope 0 at $DIR/uniform_array_move_out.rs:11:26: 11:27 + drop(_2) -> [return: bb9, unwind: bb1]; // scope 0 at $DIR/uniform_array_move_out.rs:11:26: 11:27 } - bb4: { + bb9: { StorageDead(_2); // scope 0 at $DIR/uniform_array_move_out.rs:11:26: 11:27 FakeRead(ForLet, _1); // scope 0 at $DIR/uniform_array_move_out.rs:11:9: 11:10 StorageLive(_6); // scope 1 at $DIR/uniform_array_move_out.rs:12:10: 12:17 @@ -70,32 +90,28 @@ fn move_out_by_subslice() -> () { // mir::Constant // + span: $DIR/uniform_array_move_out.rs:10:27: 13:2 // + literal: Const { ty: (), val: Value(Scalar()) } - drop(_6) -> [return: bb5, unwind: bb7]; // scope 1 at $DIR/uniform_array_move_out.rs:13:1: 13:2 - } - - bb5: { - StorageDead(_6); // scope 1 at $DIR/uniform_array_move_out.rs:13:1: 13:2 - drop(_1) -> [return: bb6, unwind: bb10]; // scope 0 at $DIR/uniform_array_move_out.rs:13:1: 13:2 + drop(_6) -> [return: bb12, unwind: bb10]; // scope 1 at $DIR/uniform_array_move_out.rs:13:1: 13:2 } - bb6: { - StorageDead(_1); // scope 0 at $DIR/uniform_array_move_out.rs:13:1: 13:2 - return; // scope 0 at $DIR/uniform_array_move_out.rs:13:2: 13:2 + bb10 (cleanup): { + drop(_1) -> bb1; // scope 0 at $DIR/uniform_array_move_out.rs:13:1: 13:2 } - bb7 (cleanup): { - drop(_1) -> bb10; // scope 0 at $DIR/uniform_array_move_out.rs:13:1: 13:2 + bb11 (cleanup): { + drop(_6) -> bb10; // scope 1 at $DIR/uniform_array_move_out.rs:13:1: 13:2 } - bb8 (cleanup): { - drop(_4) -> bb9; // scope 0 at $DIR/uniform_array_move_out.rs:11:26: 11:27 + bb12: { + StorageDead(_6); // scope 1 at $DIR/uniform_array_move_out.rs:13:1: 13:2 + drop(_1) -> [return: bb13, unwind: bb1]; // scope 0 at $DIR/uniform_array_move_out.rs:13:1: 13:2 } - bb9 (cleanup): { - drop(_2) -> bb10; // scope 0 at $DIR/uniform_array_move_out.rs:11:26: 11:27 + bb13: { + StorageDead(_1); // scope 0 at $DIR/uniform_array_move_out.rs:13:1: 13:2 + goto -> bb14; // scope 0 at $DIR/uniform_array_move_out.rs:13:2: 13:2 } - bb10 (cleanup): { - resume; // scope 0 at $DIR/uniform_array_move_out.rs:10:1: 13:2 + bb14: { + return; // scope 0 at $DIR/uniform_array_move_out.rs:13:2: 13:2 } } diff --git a/src/test/mir-opt/uniform_array_move_out/rustc.move_out_from_end.mir_map.0.mir b/src/test/mir-opt/uniform_array_move_out/rustc.move_out_from_end.mir_map.0.mir index bba616de9a68e..aeab0e892ae8b 100644 --- a/src/test/mir-opt/uniform_array_move_out/rustc.move_out_from_end.mir_map.0.mir +++ b/src/test/mir-opt/uniform_array_move_out/rustc.move_out_from_end.mir_map.0.mir @@ -28,10 +28,22 @@ fn move_out_from_end() -> () { // + span: $DIR/uniform_array_move_out.rs:5:18: 5:19 // + literal: Const { ty: i32, val: Value(Scalar(0x00000001)) } _2 = move _3; // scope 0 at $DIR/uniform_array_move_out.rs:5:14: 5:19 - drop(_3) -> [return: bb1, unwind: bb9]; // scope 0 at $DIR/uniform_array_move_out.rs:5:18: 5:19 + drop(_3) -> [return: bb4, unwind: bb2]; // scope 0 at $DIR/uniform_array_move_out.rs:5:18: 5:19 } - bb1: { + bb1 (cleanup): { + resume; // scope 0 at $DIR/uniform_array_move_out.rs:4:1: 7:2 + } + + bb2 (cleanup): { + drop(_2) -> bb1; // scope 0 at $DIR/uniform_array_move_out.rs:5:26: 5:27 + } + + bb3 (cleanup): { + drop(_3) -> bb2; // scope 0 at $DIR/uniform_array_move_out.rs:5:18: 5:19 + } + + bb4: { StorageDead(_3); // scope 0 at $DIR/uniform_array_move_out.rs:5:18: 5:19 StorageLive(_4); // scope 0 at $DIR/uniform_array_move_out.rs:5:21: 5:26 StorageLive(_5); // scope 0 at $DIR/uniform_array_move_out.rs:5:21: 5:26 @@ -44,21 +56,29 @@ fn move_out_from_end() -> () { // + span: $DIR/uniform_array_move_out.rs:5:25: 5:26 // + literal: Const { ty: i32, val: Value(Scalar(0x00000002)) } _4 = move _5; // scope 0 at $DIR/uniform_array_move_out.rs:5:21: 5:26 - drop(_5) -> [return: bb2, unwind: bb8]; // scope 0 at $DIR/uniform_array_move_out.rs:5:25: 5:26 + drop(_5) -> [return: bb7, unwind: bb5]; // scope 0 at $DIR/uniform_array_move_out.rs:5:25: 5:26 + } + + bb5 (cleanup): { + drop(_4) -> bb2; // scope 0 at $DIR/uniform_array_move_out.rs:5:26: 5:27 + } + + bb6 (cleanup): { + drop(_5) -> bb5; // scope 0 at $DIR/uniform_array_move_out.rs:5:25: 5:26 } - bb2: { + bb7: { StorageDead(_5); // scope 0 at $DIR/uniform_array_move_out.rs:5:25: 5:26 _1 = [move _2, move _4]; // scope 0 at $DIR/uniform_array_move_out.rs:5:13: 5:27 - drop(_4) -> [return: bb3, unwind: bb9]; // scope 0 at $DIR/uniform_array_move_out.rs:5:26: 5:27 + drop(_4) -> [return: bb8, unwind: bb2]; // scope 0 at $DIR/uniform_array_move_out.rs:5:26: 5:27 } - bb3: { + bb8: { StorageDead(_4); // scope 0 at $DIR/uniform_array_move_out.rs:5:26: 5:27 - drop(_2) -> [return: bb4, unwind: bb10]; // scope 0 at $DIR/uniform_array_move_out.rs:5:26: 5:27 + drop(_2) -> [return: bb9, unwind: bb1]; // scope 0 at $DIR/uniform_array_move_out.rs:5:26: 5:27 } - bb4: { + bb9: { StorageDead(_2); // scope 0 at $DIR/uniform_array_move_out.rs:5:26: 5:27 FakeRead(ForLet, _1); // scope 0 at $DIR/uniform_array_move_out.rs:5:9: 5:10 StorageLive(_6); // scope 1 at $DIR/uniform_array_move_out.rs:6:14: 6:16 @@ -70,32 +90,28 @@ fn move_out_from_end() -> () { // mir::Constant // + span: $DIR/uniform_array_move_out.rs:4:24: 7:2 // + literal: Const { ty: (), val: Value(Scalar()) } - drop(_6) -> [return: bb5, unwind: bb7]; // scope 1 at $DIR/uniform_array_move_out.rs:7:1: 7:2 - } - - bb5: { - StorageDead(_6); // scope 1 at $DIR/uniform_array_move_out.rs:7:1: 7:2 - drop(_1) -> [return: bb6, unwind: bb10]; // scope 0 at $DIR/uniform_array_move_out.rs:7:1: 7:2 + drop(_6) -> [return: bb12, unwind: bb10]; // scope 1 at $DIR/uniform_array_move_out.rs:7:1: 7:2 } - bb6: { - StorageDead(_1); // scope 0 at $DIR/uniform_array_move_out.rs:7:1: 7:2 - return; // scope 0 at $DIR/uniform_array_move_out.rs:7:2: 7:2 + bb10 (cleanup): { + drop(_1) -> bb1; // scope 0 at $DIR/uniform_array_move_out.rs:7:1: 7:2 } - bb7 (cleanup): { - drop(_1) -> bb10; // scope 0 at $DIR/uniform_array_move_out.rs:7:1: 7:2 + bb11 (cleanup): { + drop(_6) -> bb10; // scope 1 at $DIR/uniform_array_move_out.rs:7:1: 7:2 } - bb8 (cleanup): { - drop(_4) -> bb9; // scope 0 at $DIR/uniform_array_move_out.rs:5:26: 5:27 + bb12: { + StorageDead(_6); // scope 1 at $DIR/uniform_array_move_out.rs:7:1: 7:2 + drop(_1) -> [return: bb13, unwind: bb1]; // scope 0 at $DIR/uniform_array_move_out.rs:7:1: 7:2 } - bb9 (cleanup): { - drop(_2) -> bb10; // scope 0 at $DIR/uniform_array_move_out.rs:5:26: 5:27 + bb13: { + StorageDead(_1); // scope 0 at $DIR/uniform_array_move_out.rs:7:1: 7:2 + goto -> bb14; // scope 0 at $DIR/uniform_array_move_out.rs:7:2: 7:2 } - bb10 (cleanup): { - resume; // scope 0 at $DIR/uniform_array_move_out.rs:4:1: 7:2 + bb14: { + return; // scope 0 at $DIR/uniform_array_move_out.rs:7:2: 7:2 } } diff --git a/src/test/mir-opt/unusual-item-types/32bit/rustc.E-V-{{constant}}.mir_map.0.mir b/src/test/mir-opt/unusual-item-types/32bit/rustc.E-V-{{constant}}.mir_map.0.mir index b17f379f4b6a9..c800ccb1ae51f 100644 --- a/src/test/mir-opt/unusual-item-types/32bit/rustc.E-V-{{constant}}.mir_map.0.mir +++ b/src/test/mir-opt/unusual-item-types/32bit/rustc.E-V-{{constant}}.mir_map.0.mir @@ -13,4 +13,8 @@ E::V::{{constant}}#0: isize = { // + literal: Const { ty: isize, val: Value(Scalar(0x00000005)) } return; // scope 0 at $DIR/unusual-item-types.rs:22:9: 22:10 } + + bb1 (cleanup): { + resume; // scope 0 at $DIR/unusual-item-types.rs:22:9: 22:10 + } } diff --git a/src/test/mir-opt/unusual-item-types/32bit/rustc.ptr-drop_in_place.std__vec__Vec_i32_.AddMovesForPackedDrops.before.mir b/src/test/mir-opt/unusual-item-types/32bit/rustc.ptr-drop_in_place.std__vec__Vec_i32_.AddMovesForPackedDrops.before.mir index d56d15062d208..28f14399a6309 100644 --- a/src/test/mir-opt/unusual-item-types/32bit/rustc.ptr-drop_in_place.std__vec__Vec_i32_.AddMovesForPackedDrops.before.mir +++ b/src/test/mir-opt/unusual-item-types/32bit/rustc.ptr-drop_in_place.std__vec__Vec_i32_.AddMovesForPackedDrops.before.mir @@ -6,7 +6,7 @@ fn std::intrinsics::drop_in_place(_1: *mut std::vec::Vec) -> () { let mut _3: (); // in scope 0 at $SRC_DIR/libcore/ptr/mod.rs:LL:COL bb0: { - goto -> bb6; // scope 0 at $SRC_DIR/libcore/ptr/mod.rs:LL:COL + goto -> bb7; // scope 0 at $SRC_DIR/libcore/ptr/mod.rs:LL:COL } bb1: { @@ -22,16 +22,20 @@ fn std::intrinsics::drop_in_place(_1: *mut std::vec::Vec) -> () { } bb4 (cleanup): { - drop(((*_1).0: alloc::raw_vec::RawVec)) -> bb2; // scope 0 at $SRC_DIR/libcore/ptr/mod.rs:LL:COL + goto -> bb2; // scope 0 at $SRC_DIR/libcore/ptr/mod.rs:LL:COL } - bb5: { - drop(((*_1).0: alloc::raw_vec::RawVec)) -> [return: bb3, unwind: bb2]; // scope 0 at $SRC_DIR/libcore/ptr/mod.rs:LL:COL + bb5 (cleanup): { + drop(((*_1).0: alloc::raw_vec::RawVec)) -> bb4; // scope 0 at $SRC_DIR/libcore/ptr/mod.rs:LL:COL } bb6: { + drop(((*_1).0: alloc::raw_vec::RawVec)) -> [return: bb3, unwind: bb4]; // scope 0 at $SRC_DIR/libcore/ptr/mod.rs:LL:COL + } + + bb7: { _2 = &mut (*_1); // scope 0 at $SRC_DIR/libcore/ptr/mod.rs:LL:COL - _3 = const as std::ops::Drop>::drop(move _2) -> [return: bb5, unwind: bb4]; // scope 0 at $SRC_DIR/libcore/ptr/mod.rs:LL:COL + _3 = const as std::ops::Drop>::drop(move _2) -> [return: bb6, unwind: bb5]; // scope 0 at $SRC_DIR/libcore/ptr/mod.rs:LL:COL // ty::Const // + ty: for<'r> fn(&'r mut std::vec::Vec) { as std::ops::Drop>::drop} // + val: Value(Scalar()) diff --git a/src/test/mir-opt/unusual-item-types/32bit/rustc.{{impl}}-ASSOCIATED_CONSTANT.mir_map.0.mir b/src/test/mir-opt/unusual-item-types/32bit/rustc.{{impl}}-ASSOCIATED_CONSTANT.mir_map.0.mir index 7349307f94c71..f4a5cc0b3279a 100644 --- a/src/test/mir-opt/unusual-item-types/32bit/rustc.{{impl}}-ASSOCIATED_CONSTANT.mir_map.0.mir +++ b/src/test/mir-opt/unusual-item-types/32bit/rustc.{{impl}}-ASSOCIATED_CONSTANT.mir_map.0.mir @@ -13,4 +13,8 @@ const ::ASSOCIATED_CONSTANT: i32 = // + literal: Const { ty: i32, val: Value(Scalar(0x00000002)) } return; // scope 0 at $DIR/unusual-item-types.rs:10:5: 10:40 } + + bb1 (cleanup): { + resume; // scope 0 at $DIR/unusual-item-types.rs:10:5: 10:40 + } } diff --git a/src/test/mir-opt/unusual-item-types/64bit/rustc.E-V-{{constant}}.mir_map.0.mir b/src/test/mir-opt/unusual-item-types/64bit/rustc.E-V-{{constant}}.mir_map.0.mir index 12073d612a195..e635cd2b01bbd 100644 --- a/src/test/mir-opt/unusual-item-types/64bit/rustc.E-V-{{constant}}.mir_map.0.mir +++ b/src/test/mir-opt/unusual-item-types/64bit/rustc.E-V-{{constant}}.mir_map.0.mir @@ -13,4 +13,8 @@ E::V::{{constant}}#0: isize = { // + literal: Const { ty: isize, val: Value(Scalar(0x0000000000000005)) } return; // scope 0 at $DIR/unusual-item-types.rs:22:9: 22:10 } + + bb1 (cleanup): { + resume; // scope 0 at $DIR/unusual-item-types.rs:22:9: 22:10 + } } diff --git a/src/test/mir-opt/unusual-item-types/64bit/rustc.ptr-drop_in_place.std__vec__Vec_i32_.AddMovesForPackedDrops.before.mir b/src/test/mir-opt/unusual-item-types/64bit/rustc.ptr-drop_in_place.std__vec__Vec_i32_.AddMovesForPackedDrops.before.mir index d56d15062d208..28f14399a6309 100644 --- a/src/test/mir-opt/unusual-item-types/64bit/rustc.ptr-drop_in_place.std__vec__Vec_i32_.AddMovesForPackedDrops.before.mir +++ b/src/test/mir-opt/unusual-item-types/64bit/rustc.ptr-drop_in_place.std__vec__Vec_i32_.AddMovesForPackedDrops.before.mir @@ -6,7 +6,7 @@ fn std::intrinsics::drop_in_place(_1: *mut std::vec::Vec) -> () { let mut _3: (); // in scope 0 at $SRC_DIR/libcore/ptr/mod.rs:LL:COL bb0: { - goto -> bb6; // scope 0 at $SRC_DIR/libcore/ptr/mod.rs:LL:COL + goto -> bb7; // scope 0 at $SRC_DIR/libcore/ptr/mod.rs:LL:COL } bb1: { @@ -22,16 +22,20 @@ fn std::intrinsics::drop_in_place(_1: *mut std::vec::Vec) -> () { } bb4 (cleanup): { - drop(((*_1).0: alloc::raw_vec::RawVec)) -> bb2; // scope 0 at $SRC_DIR/libcore/ptr/mod.rs:LL:COL + goto -> bb2; // scope 0 at $SRC_DIR/libcore/ptr/mod.rs:LL:COL } - bb5: { - drop(((*_1).0: alloc::raw_vec::RawVec)) -> [return: bb3, unwind: bb2]; // scope 0 at $SRC_DIR/libcore/ptr/mod.rs:LL:COL + bb5 (cleanup): { + drop(((*_1).0: alloc::raw_vec::RawVec)) -> bb4; // scope 0 at $SRC_DIR/libcore/ptr/mod.rs:LL:COL } bb6: { + drop(((*_1).0: alloc::raw_vec::RawVec)) -> [return: bb3, unwind: bb4]; // scope 0 at $SRC_DIR/libcore/ptr/mod.rs:LL:COL + } + + bb7: { _2 = &mut (*_1); // scope 0 at $SRC_DIR/libcore/ptr/mod.rs:LL:COL - _3 = const as std::ops::Drop>::drop(move _2) -> [return: bb5, unwind: bb4]; // scope 0 at $SRC_DIR/libcore/ptr/mod.rs:LL:COL + _3 = const as std::ops::Drop>::drop(move _2) -> [return: bb6, unwind: bb5]; // scope 0 at $SRC_DIR/libcore/ptr/mod.rs:LL:COL // ty::Const // + ty: for<'r> fn(&'r mut std::vec::Vec) { as std::ops::Drop>::drop} // + val: Value(Scalar()) diff --git a/src/test/mir-opt/unusual-item-types/64bit/rustc.{{impl}}-ASSOCIATED_CONSTANT.mir_map.0.mir b/src/test/mir-opt/unusual-item-types/64bit/rustc.{{impl}}-ASSOCIATED_CONSTANT.mir_map.0.mir index 7349307f94c71..f4a5cc0b3279a 100644 --- a/src/test/mir-opt/unusual-item-types/64bit/rustc.{{impl}}-ASSOCIATED_CONSTANT.mir_map.0.mir +++ b/src/test/mir-opt/unusual-item-types/64bit/rustc.{{impl}}-ASSOCIATED_CONSTANT.mir_map.0.mir @@ -13,4 +13,8 @@ const ::ASSOCIATED_CONSTANT: i32 = // + literal: Const { ty: i32, val: Value(Scalar(0x00000002)) } return; // scope 0 at $DIR/unusual-item-types.rs:10:5: 10:40 } + + bb1 (cleanup): { + resume; // scope 0 at $DIR/unusual-item-types.rs:10:5: 10:40 + } } diff --git a/src/test/mir-opt/while-storage/rustc.while_loop.PreCodegen.after.mir b/src/test/mir-opt/while-storage/rustc.while_loop.PreCodegen.after.mir index 4742a0fb63169..3ddf82c2fb2c9 100644 --- a/src/test/mir-opt/while-storage/rustc.while_loop.PreCodegen.after.mir +++ b/src/test/mir-opt/while-storage/rustc.while_loop.PreCodegen.after.mir @@ -70,7 +70,7 @@ fn while_loop(_1: bool) -> () { // + span: $DIR/while-storage.rs:12:13: 12:18 // + literal: Const { ty: (), val: Value(Scalar()) } StorageDead(_4); // scope 0 at $DIR/while-storage.rs:14:5: 14:6 - goto -> bb7; // scope 0 at $DIR/while-storage.rs:1:1: 1:1 + goto -> bb7; // scope 0 at $DIR/while-storage.rs:12:13: 12:18 } bb7: { diff --git a/src/test/ui/async-await/async-fn-size-moved-locals.rs b/src/test/ui/async-await/async-fn-size-moved-locals.rs index 000acf14a3fbc..636fafc2bc44a 100644 --- a/src/test/ui/async-await/async-fn-size-moved-locals.rs +++ b/src/test/ui/async-await/async-fn-size-moved-locals.rs @@ -114,5 +114,5 @@ fn main() { assert_eq!(1026, std::mem::size_of_val(&single_with_noop())); assert_eq!(3078, std::mem::size_of_val(&joined())); assert_eq!(3079, std::mem::size_of_val(&joined_with_noop())); - assert_eq!(6157, std::mem::size_of_val(&mixed_sizes())); + assert_eq!(7181, std::mem::size_of_val(&mixed_sizes())); } diff --git a/src/test/ui/async-await/issue-73137.rs b/src/test/ui/async-await/issue-73137.rs new file mode 100644 index 0000000000000..18374460df79b --- /dev/null +++ b/src/test/ui/async-await/issue-73137.rs @@ -0,0 +1,42 @@ +// Regression test for + +// run-pass +// edition:2018 + +#![allow(dead_code)] +#![feature(wake_trait)] +use std::future::Future; +use std::task::{Waker, Wake, Context}; +use std::sync::Arc; + +struct DummyWaker; +impl Wake for DummyWaker { + fn wake(self: Arc) {} +} + +struct Foo { + a: usize, + b: &'static u32, +} + +#[inline(never)] +fn nop(_: T) {} + +fn main() { + let mut fut = Box::pin(async { + let action = Foo { + b: &42, + a: async { 0 }.await, + }; + + // An error in the generator transform caused `b` to be overwritten with `a` when `b` was + // borrowed. + nop(&action.b); + assert_ne!(0usize, unsafe { std::mem::transmute(action.b) }); + + async {}.await; + }); + let waker = Waker::from(Arc::new(DummyWaker)); + let mut cx = Context::from_waker(&waker); + let _ = fut.as_mut().poll(&mut cx); +} diff --git a/src/test/ui/generator/size-moved-locals.rs b/src/test/ui/generator/size-moved-locals.rs index a5786c2999eb4..74c60d98154dd 100644 --- a/src/test/ui/generator/size-moved-locals.rs +++ b/src/test/ui/generator/size-moved-locals.rs @@ -72,6 +72,6 @@ fn overlap_x_and_y() -> impl Generator { fn main() { assert_eq!(1025, std::mem::size_of_val(&move_before_yield())); assert_eq!(1026, std::mem::size_of_val(&move_before_yield_with_noop())); - assert_eq!(1027, std::mem::size_of_val(&overlap_move_points())); + assert_eq!(2051, std::mem::size_of_val(&overlap_move_points())); assert_eq!(1026, std::mem::size_of_val(&overlap_x_and_y())); } diff --git a/src/test/ui/match/issue-72896.rs b/src/test/ui/match/issue-72896.rs new file mode 100644 index 0000000000000..3a8b82037310a --- /dev/null +++ b/src/test/ui/match/issue-72896.rs @@ -0,0 +1,23 @@ +// run-pass +trait EnumSetType { + type Repr; +} + +enum Enum8 { } +impl EnumSetType for Enum8 { + type Repr = u8; +} + +#[derive(PartialEq, Eq)] +struct EnumSet { + __enumset_underlying: T::Repr, +} + +const CONST_SET: EnumSet = EnumSet { __enumset_underlying: 3 }; + +fn main() { + match CONST_SET { + CONST_SET => { /* ok */ } + _ => panic!("match fell through?"), + } +} diff --git a/src/tools/cargo b/src/tools/cargo index 9fcb8c1d20c17..744bd1fbb666f 160000 --- a/src/tools/cargo +++ b/src/tools/cargo @@ -1 +1 @@ -Subproject commit 9fcb8c1d20c17f51054f7aa4e08ff28d381fe096 +Subproject commit 744bd1fbb666f33b20b09d5bacc5047957c8ed42