diff --git a/Cargo.lock b/Cargo.lock index 6b9bb721e0186..9f82b2e164027 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3756,6 +3756,7 @@ dependencies = [ "rustc_monomorphize", "rustc_parse", "rustc_passes", + "rustc_pattern_analysis", "rustc_privacy", "rustc_query_system", "rustc_resolve", @@ -4229,6 +4230,7 @@ dependencies = [ "rustc_infer", "rustc_macros", "rustc_middle", + "rustc_pattern_analysis", "rustc_session", "rustc_span", "rustc_target", @@ -4364,6 +4366,26 @@ dependencies = [ "tracing", ] +[[package]] +name = "rustc_pattern_analysis" +version = "0.0.0" +dependencies = [ + "rustc_apfloat", + "rustc_arena", + "rustc_data_structures", + "rustc_errors", + "rustc_fluent_macro", + "rustc_hir", + "rustc_index", + "rustc_macros", + "rustc_middle", + "rustc_session", + "rustc_span", + "rustc_target", + "smallvec", + "tracing", +] + [[package]] name = "rustc_privacy" version = "0.0.0" diff --git a/compiler/rustc_ast_pretty/src/pprust/state.rs b/compiler/rustc_ast_pretty/src/pprust/state.rs index 0fae2c78e27ee..4a2a693862b13 100644 --- a/compiler/rustc_ast_pretty/src/pprust/state.rs +++ b/compiler/rustc_ast_pretty/src/pprust/state.rs @@ -7,6 +7,7 @@ mod item; use crate::pp::Breaks::{Consistent, Inconsistent}; use crate::pp::{self, Breaks}; +use crate::pprust::state::expr::FixupContext; use rustc_ast::attr::AttrIdGenerator; use rustc_ast::ptr::P; use rustc_ast::token::{self, BinOpToken, CommentKind, Delimiter, Nonterminal, Token, TokenKind}; @@ -811,7 +812,7 @@ pub trait PrintState<'a>: std::ops::Deref + std::ops::Dere } fn expr_to_string(&self, e: &ast::Expr) -> String { - Self::to_string(|s| s.print_expr(e)) + Self::to_string(|s| s.print_expr(e, FixupContext::default())) } fn meta_item_lit_to_string(&self, lit: &ast::MetaItemLit) -> String { @@ -916,7 +917,7 @@ impl<'a> State<'a> { } fn commasep_exprs(&mut self, b: Breaks, exprs: &[P]) { - self.commasep_cmnt(b, exprs, |s, e| s.print_expr(e), |e| e.span) + self.commasep_cmnt(b, exprs, |s, e| s.print_expr(e, FixupContext::default()), |e| e.span) } pub fn print_opt_lifetime(&mut self, lifetime: &Option) { @@ -953,7 +954,7 @@ impl<'a> State<'a> { match generic_arg { GenericArg::Lifetime(lt) => self.print_lifetime(*lt), GenericArg::Type(ty) => self.print_type(ty), - GenericArg::Const(ct) => self.print_expr(&ct.value), + GenericArg::Const(ct) => self.print_expr(&ct.value, FixupContext::default()), } } @@ -1020,12 +1021,12 @@ impl<'a> State<'a> { self.word("["); self.print_type(ty); self.word("; "); - self.print_expr(&length.value); + self.print_expr(&length.value, FixupContext::default()); self.word("]"); } ast::TyKind::Typeof(e) => { self.word("typeof("); - self.print_expr(&e.value); + self.print_expr(&e.value, FixupContext::default()); self.word(")"); } ast::TyKind::Infer => { @@ -1081,7 +1082,7 @@ impl<'a> State<'a> { if let Some((init, els)) = loc.kind.init_else_opt() { self.nbsp(); self.word_space("="); - self.print_expr(init); + self.print_expr(init, FixupContext::default()); if let Some(els) = els { self.cbox(INDENT_UNIT); self.ibox(INDENT_UNIT); @@ -1095,14 +1096,14 @@ impl<'a> State<'a> { ast::StmtKind::Item(item) => self.print_item(item), ast::StmtKind::Expr(expr) => { self.space_if_not_bol(); - self.print_expr_outer_attr_style(expr, false); + self.print_expr_outer_attr_style(expr, false, FixupContext::default()); if classify::expr_requires_semi_to_be_stmt(expr) { self.word(";"); } } ast::StmtKind::Semi(expr) => { self.space_if_not_bol(); - self.print_expr_outer_attr_style(expr, false); + self.print_expr_outer_attr_style(expr, false, FixupContext::default()); self.word(";"); } ast::StmtKind::Empty => { @@ -1154,7 +1155,7 @@ impl<'a> State<'a> { ast::StmtKind::Expr(expr) if i == blk.stmts.len() - 1 => { self.maybe_print_comment(st.span.lo()); self.space_if_not_bol(); - self.print_expr_outer_attr_style(expr, false); + self.print_expr_outer_attr_style(expr, false, FixupContext::default()); self.maybe_print_trailing_comment(expr.span, Some(blk.span.hi())); } _ => self.print_stmt(st), @@ -1167,13 +1168,41 @@ impl<'a> State<'a> { } /// Print a `let pat = expr` expression. - fn print_let(&mut self, pat: &ast::Pat, expr: &ast::Expr) { + /// + /// Parentheses are inserted surrounding `expr` if a round-trip through the + /// parser would otherwise work out the wrong way in a condition position. + /// + /// For example each of the following would mean the wrong thing without + /// parentheses. + /// + /// ```ignore (illustrative) + /// if let _ = (Struct {}) {} + /// + /// if let _ = (true && false) {} + /// ``` + /// + /// In a match guard, the second case still requires parens, but the first + /// case no longer does because anything until `=>` is considered part of + /// the match guard expression. Parsing of the expression is not terminated + /// by `{` in that position. + /// + /// ```ignore (illustrative) + /// match () { + /// () if let _ = Struct {} => {} + /// () if let _ = (true && false) => {} + /// } + /// ``` + fn print_let(&mut self, pat: &ast::Pat, expr: &ast::Expr, fixup: FixupContext) { self.word("let "); self.print_pat(pat); self.space(); self.word_space("="); - let npals = || parser::needs_par_as_let_scrutinee(expr.precedence().order()); - self.print_expr_cond_paren(expr, Self::cond_needs_par(expr) || npals()) + self.print_expr_cond_paren( + expr, + fixup.parenthesize_exterior_struct_lit && parser::contains_exterior_struct_lit(expr) + || parser::needs_par_as_let_scrutinee(expr.precedence().order()), + FixupContext::default(), + ); } fn print_mac(&mut self, m: &ast::MacCall) { @@ -1220,7 +1249,7 @@ impl<'a> State<'a> { print_reg_or_class(s, reg); s.pclose(); s.space(); - s.print_expr(expr); + s.print_expr(expr, FixupContext::default()); } InlineAsmOperand::Out { reg, late, expr } => { s.word(if *late { "lateout" } else { "out" }); @@ -1229,7 +1258,7 @@ impl<'a> State<'a> { s.pclose(); s.space(); match expr { - Some(expr) => s.print_expr(expr), + Some(expr) => s.print_expr(expr, FixupContext::default()), None => s.word("_"), } } @@ -1239,7 +1268,7 @@ impl<'a> State<'a> { print_reg_or_class(s, reg); s.pclose(); s.space(); - s.print_expr(expr); + s.print_expr(expr, FixupContext::default()); } InlineAsmOperand::SplitInOut { reg, late, in_expr, out_expr } => { s.word(if *late { "inlateout" } else { "inout" }); @@ -1247,18 +1276,18 @@ impl<'a> State<'a> { print_reg_or_class(s, reg); s.pclose(); s.space(); - s.print_expr(in_expr); + s.print_expr(in_expr, FixupContext::default()); s.space(); s.word_space("=>"); match out_expr { - Some(out_expr) => s.print_expr(out_expr), + Some(out_expr) => s.print_expr(out_expr, FixupContext::default()), None => s.word("_"), } } InlineAsmOperand::Const { anon_const } => { s.word("const"); s.space(); - s.print_expr(&anon_const.value); + s.print_expr(&anon_const.value, FixupContext::default()); } InlineAsmOperand::Sym { sym } => { s.word("sym"); @@ -1452,10 +1481,10 @@ impl<'a> State<'a> { self.print_pat(inner); } } - PatKind::Lit(e) => self.print_expr(e), + PatKind::Lit(e) => self.print_expr(e, FixupContext::default()), PatKind::Range(begin, end, Spanned { node: end_kind, .. }) => { if let Some(e) = begin { - self.print_expr(e); + self.print_expr(e, FixupContext::default()); } match end_kind { RangeEnd::Included(RangeSyntax::DotDotDot) => self.word("..."), @@ -1463,7 +1492,7 @@ impl<'a> State<'a> { RangeEnd::Excluded => self.word(".."), } if let Some(e) = end { - self.print_expr(e); + self.print_expr(e, FixupContext::default()); } } PatKind::Slice(elts) => { @@ -1617,7 +1646,7 @@ impl<'a> State<'a> { if let Some(default) = default { s.space(); s.word_space("="); - s.print_expr(&default.value); + s.print_expr(&default.value, FixupContext::default()); } } } diff --git a/compiler/rustc_ast_pretty/src/pprust/state/expr.rs b/compiler/rustc_ast_pretty/src/pprust/state/expr.rs index 5397278bbb1c6..f5ffcddb83d74 100644 --- a/compiler/rustc_ast_pretty/src/pprust/state/expr.rs +++ b/compiler/rustc_ast_pretty/src/pprust/state/expr.rs @@ -12,6 +12,19 @@ use rustc_ast::{ }; use std::fmt::Write; +#[derive(Copy, Clone, Debug)] +pub(crate) struct FixupContext { + pub parenthesize_exterior_struct_lit: bool, +} + +/// The default amount of fixing is minimal fixing. Fixups should be turned on +/// in a targetted fashion where needed. +impl Default for FixupContext { + fn default() -> Self { + FixupContext { parenthesize_exterior_struct_lit: false } + } +} + impl<'a> State<'a> { fn print_else(&mut self, els: Option<&ast::Expr>) { if let Some(_else) = els { @@ -55,21 +68,22 @@ impl<'a> State<'a> { self.pclose() } - fn print_expr_maybe_paren(&mut self, expr: &ast::Expr, prec: i8) { - self.print_expr_cond_paren(expr, expr.precedence().order() < prec) + fn print_expr_maybe_paren(&mut self, expr: &ast::Expr, prec: i8, fixup: FixupContext) { + self.print_expr_cond_paren(expr, expr.precedence().order() < prec, fixup); } /// Prints an expr using syntax that's acceptable in a condition position, such as the `cond` in /// `if cond { ... }`. fn print_expr_as_cond(&mut self, expr: &ast::Expr) { - self.print_expr_cond_paren(expr, Self::cond_needs_par(expr)) + let fixup = FixupContext { parenthesize_exterior_struct_lit: true }; + self.print_expr_cond_paren(expr, Self::cond_needs_par(expr), fixup) } /// Does `expr` need parentheses when printed in a condition position? /// /// These cases need parens due to the parse error observed in #26461: `if return {}` /// parses as the erroneous construct `if (return {})`, not `if (return) {}`. - pub(super) fn cond_needs_par(expr: &ast::Expr) -> bool { + fn cond_needs_par(expr: &ast::Expr) -> bool { match expr.kind { ast::ExprKind::Break(..) | ast::ExprKind::Closure(..) @@ -80,11 +94,32 @@ impl<'a> State<'a> { } /// Prints `expr` or `(expr)` when `needs_par` holds. - pub(super) fn print_expr_cond_paren(&mut self, expr: &ast::Expr, needs_par: bool) { + pub(super) fn print_expr_cond_paren( + &mut self, + expr: &ast::Expr, + needs_par: bool, + fixup: FixupContext, + ) { if needs_par { self.popen(); } - self.print_expr(expr); + + // If we are surrounding the whole cond in parentheses, such as: + // + // if (return Struct {}) {} + // + // then there is no need for parenthesizing the individual struct + // expressions within. On the other hand if the whole cond is not + // parenthesized, then print_expr must parenthesize exterior struct + // literals. + // + // if x == (Struct {}) {} + // + let fixup = FixupContext { + parenthesize_exterior_struct_lit: fixup.parenthesize_exterior_struct_lit && !needs_par, + }; + self.print_expr(expr, fixup); + if needs_par { self.pclose(); } @@ -111,7 +146,7 @@ impl<'a> State<'a> { self.ibox(0); self.print_block_with_attrs(block, attrs); } else { - self.print_expr(&expr.value); + self.print_expr(&expr.value, FixupContext::default()); } self.end(); } @@ -119,9 +154,9 @@ impl<'a> State<'a> { fn print_expr_repeat(&mut self, element: &ast::Expr, count: &ast::AnonConst) { self.ibox(INDENT_UNIT); self.word("["); - self.print_expr(element); + self.print_expr(element, FixupContext::default()); self.word_space(";"); - self.print_expr(&count.value); + self.print_expr(&count.value, FixupContext::default()); self.word("]"); self.end(); } @@ -161,7 +196,7 @@ impl<'a> State<'a> { self.print_ident(field.ident); self.word_nbsp(":"); } - self.print_expr(&field.expr); + self.print_expr(&field.expr, FixupContext::default()); if !is_last || has_rest { self.word_space(","); } else { @@ -174,7 +209,7 @@ impl<'a> State<'a> { } self.word(".."); if let ast::StructRest::Base(expr) = rest { - self.print_expr(expr); + self.print_expr(expr, FixupContext::default()); } self.space(); } @@ -192,13 +227,13 @@ impl<'a> State<'a> { self.pclose() } - fn print_expr_call(&mut self, func: &ast::Expr, args: &[P]) { + fn print_expr_call(&mut self, func: &ast::Expr, args: &[P], fixup: FixupContext) { let prec = match func.kind { ast::ExprKind::Field(..) => parser::PREC_FORCE_PAREN, _ => parser::PREC_POSTFIX, }; - self.print_expr_maybe_paren(func, prec); + self.print_expr_maybe_paren(func, prec, fixup); self.print_call_post(args) } @@ -207,8 +242,9 @@ impl<'a> State<'a> { segment: &ast::PathSegment, receiver: &ast::Expr, base_args: &[P], + fixup: FixupContext, ) { - self.print_expr_maybe_paren(receiver, parser::PREC_POSTFIX); + self.print_expr_maybe_paren(receiver, parser::PREC_POSTFIX, fixup); self.word("."); self.print_ident(segment.ident); if let Some(args) = &segment.args { @@ -217,7 +253,13 @@ impl<'a> State<'a> { self.print_call_post(base_args) } - fn print_expr_binary(&mut self, op: ast::BinOp, lhs: &ast::Expr, rhs: &ast::Expr) { + fn print_expr_binary( + &mut self, + op: ast::BinOp, + lhs: &ast::Expr, + rhs: &ast::Expr, + fixup: FixupContext, + ) { let assoc_op = AssocOp::from_ast_binop(op.node); let prec = assoc_op.precedence() as i8; let fixity = assoc_op.fixity(); @@ -253,15 +295,15 @@ impl<'a> State<'a> { _ => left_prec, }; - self.print_expr_maybe_paren(lhs, left_prec); + self.print_expr_maybe_paren(lhs, left_prec, fixup); self.space(); self.word_space(op.node.as_str()); - self.print_expr_maybe_paren(rhs, right_prec) + self.print_expr_maybe_paren(rhs, right_prec, fixup) } - fn print_expr_unary(&mut self, op: ast::UnOp, expr: &ast::Expr) { + fn print_expr_unary(&mut self, op: ast::UnOp, expr: &ast::Expr, fixup: FixupContext) { self.word(op.as_str()); - self.print_expr_maybe_paren(expr, parser::PREC_PREFIX) + self.print_expr_maybe_paren(expr, parser::PREC_PREFIX, fixup) } fn print_expr_addr_of( @@ -269,6 +311,7 @@ impl<'a> State<'a> { kind: ast::BorrowKind, mutability: ast::Mutability, expr: &ast::Expr, + fixup: FixupContext, ) { self.word("&"); match kind { @@ -278,14 +321,19 @@ impl<'a> State<'a> { self.print_mutability(mutability, true); } } - self.print_expr_maybe_paren(expr, parser::PREC_PREFIX) + self.print_expr_maybe_paren(expr, parser::PREC_PREFIX, fixup) } - pub(super) fn print_expr(&mut self, expr: &ast::Expr) { - self.print_expr_outer_attr_style(expr, true) + pub(super) fn print_expr(&mut self, expr: &ast::Expr, fixup: FixupContext) { + self.print_expr_outer_attr_style(expr, true, fixup) } - pub(super) fn print_expr_outer_attr_style(&mut self, expr: &ast::Expr, is_inline: bool) { + pub(super) fn print_expr_outer_attr_style( + &mut self, + expr: &ast::Expr, + is_inline: bool, + fixup: FixupContext, + ) { self.maybe_print_comment(expr.span.lo()); let attrs = &expr.attrs; @@ -314,19 +362,19 @@ impl<'a> State<'a> { self.print_expr_tup(exprs); } ast::ExprKind::Call(func, args) => { - self.print_expr_call(func, args); + self.print_expr_call(func, args, fixup); } ast::ExprKind::MethodCall(box ast::MethodCall { seg, receiver, args, .. }) => { - self.print_expr_method_call(seg, receiver, args); + self.print_expr_method_call(seg, receiver, args, fixup); } ast::ExprKind::Binary(op, lhs, rhs) => { - self.print_expr_binary(*op, lhs, rhs); + self.print_expr_binary(*op, lhs, rhs, fixup); } ast::ExprKind::Unary(op, expr) => { - self.print_expr_unary(*op, expr); + self.print_expr_unary(*op, expr, fixup); } ast::ExprKind::AddrOf(k, m, expr) => { - self.print_expr_addr_of(*k, *m, expr); + self.print_expr_addr_of(*k, *m, expr, fixup); } ast::ExprKind::Lit(token_lit) => { self.print_token_literal(*token_lit, expr.span); @@ -337,7 +385,7 @@ impl<'a> State<'a> { } ast::ExprKind::Cast(expr, ty) => { let prec = AssocOp::As.precedence() as i8; - self.print_expr_maybe_paren(expr, prec); + self.print_expr_maybe_paren(expr, prec, fixup); self.space(); self.word_space("as"); self.print_type(ty); @@ -345,7 +393,7 @@ impl<'a> State<'a> { ast::ExprKind::Type(expr, ty) => { self.word("type_ascribe!("); self.ibox(0); - self.print_expr(expr); + self.print_expr(expr, FixupContext::default()); self.word(","); self.space_if_not_bol(); @@ -355,7 +403,7 @@ impl<'a> State<'a> { self.word(")"); } ast::ExprKind::Let(pat, scrutinee, _, _) => { - self.print_let(pat, scrutinee); + self.print_let(pat, scrutinee, fixup); } ast::ExprKind::If(test, blk, elseopt) => self.print_if(test, blk, elseopt.as_deref()), ast::ExprKind::While(test, blk, opt_label) => { @@ -428,7 +476,7 @@ impl<'a> State<'a> { self.print_fn_params_and_ret(fn_decl, true); self.space(); - self.print_expr(body); + self.print_expr(body, FixupContext::default()); self.end(); // need to close a box // a box will be closed by print_expr, but we didn't want an overall @@ -456,33 +504,33 @@ impl<'a> State<'a> { self.print_block_with_attrs(blk, attrs); } ast::ExprKind::Await(expr, _) => { - self.print_expr_maybe_paren(expr, parser::PREC_POSTFIX); + self.print_expr_maybe_paren(expr, parser::PREC_POSTFIX, fixup); self.word(".await"); } ast::ExprKind::Assign(lhs, rhs, _) => { let prec = AssocOp::Assign.precedence() as i8; - self.print_expr_maybe_paren(lhs, prec + 1); + self.print_expr_maybe_paren(lhs, prec + 1, fixup); self.space(); self.word_space("="); - self.print_expr_maybe_paren(rhs, prec); + self.print_expr_maybe_paren(rhs, prec, fixup); } ast::ExprKind::AssignOp(op, lhs, rhs) => { let prec = AssocOp::Assign.precedence() as i8; - self.print_expr_maybe_paren(lhs, prec + 1); + self.print_expr_maybe_paren(lhs, prec + 1, fixup); self.space(); self.word(op.node.as_str()); self.word_space("="); - self.print_expr_maybe_paren(rhs, prec); + self.print_expr_maybe_paren(rhs, prec, fixup); } ast::ExprKind::Field(expr, ident) => { - self.print_expr_maybe_paren(expr, parser::PREC_POSTFIX); + self.print_expr_maybe_paren(expr, parser::PREC_POSTFIX, fixup); self.word("."); self.print_ident(*ident); } ast::ExprKind::Index(expr, index, _) => { - self.print_expr_maybe_paren(expr, parser::PREC_POSTFIX); + self.print_expr_maybe_paren(expr, parser::PREC_POSTFIX, fixup); self.word("["); - self.print_expr(index); + self.print_expr(index, FixupContext::default()); self.word("]"); } ast::ExprKind::Range(start, end, limits) => { @@ -492,14 +540,14 @@ impl<'a> State<'a> { // a "normal" binop gets parenthesized. (`LOr` is the lowest-precedence binop.) let fake_prec = AssocOp::LOr.precedence() as i8; if let Some(e) = start { - self.print_expr_maybe_paren(e, fake_prec); + self.print_expr_maybe_paren(e, fake_prec, fixup); } match limits { ast::RangeLimits::HalfOpen => self.word(".."), ast::RangeLimits::Closed => self.word("..="), } if let Some(e) = end { - self.print_expr_maybe_paren(e, fake_prec); + self.print_expr_maybe_paren(e, fake_prec, fixup); } } ast::ExprKind::Underscore => self.word("_"), @@ -513,7 +561,7 @@ impl<'a> State<'a> { } if let Some(expr) = opt_expr { self.space(); - self.print_expr_maybe_paren(expr, parser::PREC_JUMP); + self.print_expr_maybe_paren(expr, parser::PREC_JUMP, fixup); } } ast::ExprKind::Continue(opt_label) => { @@ -527,7 +575,7 @@ impl<'a> State<'a> { self.word("return"); if let Some(expr) = result { self.word(" "); - self.print_expr_maybe_paren(expr, parser::PREC_JUMP); + self.print_expr_maybe_paren(expr, parser::PREC_JUMP, fixup); } } ast::ExprKind::Yeet(result) => { @@ -536,13 +584,13 @@ impl<'a> State<'a> { self.word("yeet"); if let Some(expr) = result { self.word(" "); - self.print_expr_maybe_paren(expr, parser::PREC_JUMP); + self.print_expr_maybe_paren(expr, parser::PREC_JUMP, fixup); } } ast::ExprKind::Become(result) => { self.word("become"); self.word(" "); - self.print_expr_maybe_paren(result, parser::PREC_JUMP); + self.print_expr_maybe_paren(result, parser::PREC_JUMP, fixup); } ast::ExprKind::InlineAsm(a) => { // FIXME: This should have its own syntax, distinct from a macro invocation. @@ -557,7 +605,7 @@ impl<'a> State<'a> { self.word(reconstruct_format_args_template_string(&fmt.template)); for arg in fmt.arguments.all_args() { self.word_space(","); - self.print_expr(&arg.expr); + self.print_expr(&arg.expr, FixupContext::default()); } self.end(); self.pclose(); @@ -584,7 +632,7 @@ impl<'a> State<'a> { ast::ExprKind::MacCall(m) => self.print_mac(m), ast::ExprKind::Paren(e) => { self.popen(); - self.print_expr(e); + self.print_expr(e, FixupContext::default()); self.pclose(); } ast::ExprKind::Yield(e) => { @@ -592,11 +640,11 @@ impl<'a> State<'a> { if let Some(expr) = e { self.space(); - self.print_expr_maybe_paren(expr, parser::PREC_JUMP); + self.print_expr_maybe_paren(expr, parser::PREC_JUMP, fixup); } } ast::ExprKind::Try(e) => { - self.print_expr_maybe_paren(e, parser::PREC_POSTFIX); + self.print_expr_maybe_paren(e, parser::PREC_POSTFIX, fixup); self.word("?") } ast::ExprKind::TryBlock(blk) => { @@ -628,7 +676,7 @@ impl<'a> State<'a> { self.space(); if let Some(e) = &arm.guard { self.word_space("if"); - self.print_expr(e); + self.print_expr(e, FixupContext::default()); self.space(); } @@ -652,7 +700,7 @@ impl<'a> State<'a> { } _ => { self.end(); // Close the ibox for the pattern. - self.print_expr(body); + self.print_expr(body, FixupContext::default()); self.word(","); } } diff --git a/compiler/rustc_ast_pretty/src/pprust/state/item.rs b/compiler/rustc_ast_pretty/src/pprust/state/item.rs index fd5b529b1d4de..ea5d22a344877 100644 --- a/compiler/rustc_ast_pretty/src/pprust/state/item.rs +++ b/compiler/rustc_ast_pretty/src/pprust/state/item.rs @@ -1,4 +1,5 @@ use crate::pp::Breaks::Inconsistent; +use crate::pprust::state::expr::FixupContext; use crate::pprust::state::{AnnNode, PrintState, State, INDENT_UNIT}; use ast::StaticItem; @@ -97,7 +98,7 @@ impl<'a> State<'a> { self.end(); // end the head-ibox if let Some(body) = body { self.word_space("="); - self.print_expr(body); + self.print_expr(body, FixupContext::default()); } self.print_where_clause(&generics.where_clause); self.word(";"); @@ -514,7 +515,7 @@ impl<'a> State<'a> { if let Some(d) = &v.disr_expr { self.space(); self.word_space("="); - self.print_expr(&d.value) + self.print_expr(&d.value, FixupContext::default()) } } diff --git a/compiler/rustc_codegen_gcc/example/mini_core.rs b/compiler/rustc_codegen_gcc/example/mini_core.rs index 3432852034325..db94bc1c86af9 100644 --- a/compiler/rustc_codegen_gcc/example/mini_core.rs +++ b/compiler/rustc_codegen_gcc/example/mini_core.rs @@ -4,7 +4,7 @@ thread_local )] #![no_core] -#![allow(dead_code, internal_features)] +#![allow(dead_code, internal_features, ambiguous_wide_pointer_comparisons)] #[no_mangle] unsafe extern "C" fn _Unwind_Resume() { diff --git a/compiler/rustc_driver_impl/Cargo.toml b/compiler/rustc_driver_impl/Cargo.toml index f2a8c54b6d541..490429845538d 100644 --- a/compiler/rustc_driver_impl/Cargo.toml +++ b/compiler/rustc_driver_impl/Cargo.toml @@ -38,6 +38,7 @@ rustc_mir_transform = { path = "../rustc_mir_transform" } rustc_monomorphize = { path = "../rustc_monomorphize" } rustc_parse = { path = "../rustc_parse" } rustc_passes = { path = "../rustc_passes" } +rustc_pattern_analysis = { path = "../rustc_pattern_analysis" } rustc_privacy = { path = "../rustc_privacy" } rustc_query_system = { path = "../rustc_query_system" } rustc_resolve = { path = "../rustc_resolve" } diff --git a/compiler/rustc_driver_impl/src/lib.rs b/compiler/rustc_driver_impl/src/lib.rs index 1f60400b5132d..8b7a4dbff9d58 100644 --- a/compiler/rustc_driver_impl/src/lib.rs +++ b/compiler/rustc_driver_impl/src/lib.rs @@ -128,6 +128,7 @@ pub static DEFAULT_LOCALE_RESOURCES: &[&str] = &[ rustc_monomorphize::DEFAULT_LOCALE_RESOURCE, rustc_parse::DEFAULT_LOCALE_RESOURCE, rustc_passes::DEFAULT_LOCALE_RESOURCE, + rustc_pattern_analysis::DEFAULT_LOCALE_RESOURCE, rustc_privacy::DEFAULT_LOCALE_RESOURCE, rustc_query_system::DEFAULT_LOCALE_RESOURCE, rustc_resolve::DEFAULT_LOCALE_RESOURCE, diff --git a/compiler/rustc_llvm/llvm-wrapper/PassWrapper.cpp b/compiler/rustc_llvm/llvm-wrapper/PassWrapper.cpp index 55e1c84c8a208..2601d96b8c869 100644 --- a/compiler/rustc_llvm/llvm-wrapper/PassWrapper.cpp +++ b/compiler/rustc_llvm/llvm-wrapper/PassWrapper.cpp @@ -841,7 +841,11 @@ LLVMRustOptimize( // cargo run tests in multhreading mode by default // so use atomics for coverage counters Options.Atomic = true; +#if LLVM_VERSION_GE(18, 0) + MPM.addPass(InstrProfilingLoweringPass(Options, false)); +#else MPM.addPass(InstrProfiling(Options, false)); +#endif } ); } diff --git a/compiler/rustc_mir_build/Cargo.toml b/compiler/rustc_mir_build/Cargo.toml index db54223405230..6d681dc295efb 100644 --- a/compiler/rustc_mir_build/Cargo.toml +++ b/compiler/rustc_mir_build/Cargo.toml @@ -17,6 +17,7 @@ rustc_index = { path = "../rustc_index" } rustc_infer = { path = "../rustc_infer" } rustc_macros = { path = "../rustc_macros" } rustc_middle = { path = "../rustc_middle" } +rustc_pattern_analysis = { path = "../rustc_pattern_analysis" } rustc_session = { path = "../rustc_session" } rustc_span = { path = "../rustc_span" } rustc_target = { path = "../rustc_target" } diff --git a/compiler/rustc_mir_build/messages.ftl b/compiler/rustc_mir_build/messages.ftl index c8d6c2114e9eb..615b553434fe6 100644 --- a/compiler/rustc_mir_build/messages.ftl +++ b/compiler/rustc_mir_build/messages.ftl @@ -237,15 +237,6 @@ mir_build_non_const_path = runtime values cannot be referenced in patterns mir_build_non_exhaustive_match_all_arms_guarded = match arms with guards don't count towards exhaustivity -mir_build_non_exhaustive_omitted_pattern = some variants are not matched explicitly - .help = ensure that all variants are matched explicitly by adding the suggested match arms - .note = the matched value is of type `{$scrut_ty}` and the `non_exhaustive_omitted_patterns` attribute was found - -mir_build_non_exhaustive_omitted_pattern_lint_on_arm = the lint level must be set on the whole match - .help = it no longer has any effect to set the lint level on an individual match arm - .label = remove this attribute - .suggestion = set the lint level on the whole match - mir_build_non_exhaustive_patterns_type_not_empty = non-exhaustive patterns: type `{$ty}` is non-empty .def_note = `{$peeled_ty}` defined here .type_note = the matched value is of type `{$ty}` @@ -260,10 +251,6 @@ mir_build_non_partial_eq_match = mir_build_nontrivial_structural_match = to use a constant of type `{$non_sm_ty}` in a pattern, the constant's initializer must be trivial or `{$non_sm_ty}` must be annotated with `#[derive(PartialEq, Eq)]` -mir_build_overlapping_range_endpoints = multiple patterns overlap on their endpoints - .range = ... with this range - .note = you likely meant to write mutually exclusive ranges - mir_build_pattern_not_covered = refutable pattern in {$origin} .pattern_ty = the matched value is of type `{$pattern_ty}` @@ -317,13 +304,6 @@ mir_build_unconditional_recursion = function cannot return without recursing mir_build_unconditional_recursion_call_site_label = recursive call site -mir_build_uncovered = {$count -> - [1] pattern `{$witness_1}` - [2] patterns `{$witness_1}` and `{$witness_2}` - [3] patterns `{$witness_1}`, `{$witness_2}` and `{$witness_3}` - *[other] patterns `{$witness_1}`, `{$witness_2}`, `{$witness_3}` and {$remainder} more - } not covered - mir_build_union_field_requires_unsafe = access to union field is unsafe and requires unsafe block .note = the field may not be properly initialized: using uninitialized data will cause undefined behavior diff --git a/compiler/rustc_mir_build/src/errors.rs b/compiler/rustc_mir_build/src/errors.rs index 8d2a559e73ce4..9baae706dff06 100644 --- a/compiler/rustc_mir_build/src/errors.rs +++ b/compiler/rustc_mir_build/src/errors.rs @@ -1,15 +1,12 @@ -use crate::{ - fluent_generated as fluent, - thir::pattern::{deconstruct_pat::WitnessPat, MatchCheckCtxt}, -}; +use crate::fluent_generated as fluent; use rustc_errors::DiagnosticArgValue; use rustc_errors::{ error_code, AddToDiagnostic, Applicability, Diagnostic, DiagnosticBuilder, ErrorGuaranteed, Handler, IntoDiagnostic, MultiSpan, SubdiagnosticMessage, }; use rustc_macros::{Diagnostic, LintDiagnostic, Subdiagnostic}; -use rustc_middle::thir::Pat; use rustc_middle::ty::{self, Ty}; +use rustc_pattern_analysis::{cx::MatchCheckCtxt, errors::Uncovered}; use rustc_span::symbol::Symbol; use rustc_span::Span; @@ -812,94 +809,6 @@ pub struct NonPartialEqMatch<'tcx> { pub non_peq_ty: Ty<'tcx>, } -#[derive(LintDiagnostic)] -#[diag(mir_build_overlapping_range_endpoints)] -#[note] -pub struct OverlappingRangeEndpoints<'tcx> { - #[label(mir_build_range)] - pub range: Span, - #[subdiagnostic] - pub overlap: Vec>, -} - -pub struct Overlap<'tcx> { - pub span: Span, - pub range: Pat<'tcx>, -} - -impl<'tcx> AddToDiagnostic for Overlap<'tcx> { - fn add_to_diagnostic_with(self, diag: &mut Diagnostic, _: F) - where - F: Fn(&mut Diagnostic, SubdiagnosticMessage) -> SubdiagnosticMessage, - { - let Overlap { span, range } = self; - - // FIXME(mejrs) unfortunately `#[derive(LintDiagnostic)]` - // does not support `#[subdiagnostic(eager)]`... - let message = format!("this range overlaps on `{range}`..."); - diag.span_label(span, message); - } -} - -#[derive(LintDiagnostic)] -#[diag(mir_build_non_exhaustive_omitted_pattern)] -#[help] -#[note] -pub(crate) struct NonExhaustiveOmittedPattern<'tcx> { - pub scrut_ty: Ty<'tcx>, - #[subdiagnostic] - pub uncovered: Uncovered<'tcx>, -} - -#[derive(LintDiagnostic)] -#[diag(mir_build_non_exhaustive_omitted_pattern_lint_on_arm)] -#[help] -pub(crate) struct NonExhaustiveOmittedPatternLintOnArm { - #[label] - pub lint_span: Span, - #[suggestion(code = "#[{lint_level}({lint_name})]\n", applicability = "maybe-incorrect")] - pub suggest_lint_on_match: Option, - pub lint_level: &'static str, - pub lint_name: &'static str, -} - -#[derive(Subdiagnostic)] -#[label(mir_build_uncovered)] -pub(crate) struct Uncovered<'tcx> { - #[primary_span] - span: Span, - count: usize, - witness_1: Pat<'tcx>, - witness_2: Pat<'tcx>, - witness_3: Pat<'tcx>, - remainder: usize, -} - -impl<'tcx> Uncovered<'tcx> { - pub fn new<'p>( - span: Span, - cx: &MatchCheckCtxt<'p, 'tcx>, - witnesses: Vec>, - ) -> Self { - let witness_1 = witnesses.get(0).unwrap().to_diagnostic_pat(cx); - Self { - span, - count: witnesses.len(), - // Substitute dummy values if witnesses is smaller than 3. These will never be read. - witness_2: witnesses - .get(1) - .map(|w| w.to_diagnostic_pat(cx)) - .unwrap_or_else(|| witness_1.clone()), - witness_3: witnesses - .get(2) - .map(|w| w.to_diagnostic_pat(cx)) - .unwrap_or_else(|| witness_1.clone()), - witness_1, - remainder: witnesses.len().saturating_sub(3), - } - } -} - #[derive(Diagnostic)] #[diag(mir_build_pattern_not_covered, code = "E0005")] pub(crate) struct PatternNotCovered<'s, 'tcx> { diff --git a/compiler/rustc_mir_build/src/thir/pattern/check_match.rs b/compiler/rustc_mir_build/src/thir/pattern/check_match.rs index fcd548821f2f5..09e0b30a5c7dc 100644 --- a/compiler/rustc_mir_build/src/thir/pattern/check_match.rs +++ b/compiler/rustc_mir_build/src/thir/pattern/check_match.rs @@ -1,7 +1,9 @@ -use super::deconstruct_pat::{Constructor, DeconstructedPat, WitnessPat}; -use super::usefulness::{ - compute_match_usefulness, MatchArm, MatchCheckCtxt, Usefulness, UsefulnessReport, -}; +use rustc_pattern_analysis::constructor::Constructor; +use rustc_pattern_analysis::cx::MatchCheckCtxt; +use rustc_pattern_analysis::errors::Uncovered; +use rustc_pattern_analysis::pat::{DeconstructedPat, WitnessPat}; +use rustc_pattern_analysis::usefulness::{Usefulness, UsefulnessReport}; +use rustc_pattern_analysis::{analyze_match, MatchArm}; use crate::errors::*; @@ -284,7 +286,7 @@ impl<'thir, 'p, 'tcx> MatchVisitor<'thir, 'p, 'tcx> { check_borrow_conflicts_in_at_patterns(self, pat); check_for_bindings_named_same_as_variants(self, pat, refutable); }); - Ok(cx.pattern_arena.alloc(DeconstructedPat::from_pat(cx, pat))) + Ok(cx.pattern_arena.alloc(cx.lower_pat(pat))) } } @@ -433,7 +435,7 @@ impl<'thir, 'p, 'tcx> MatchVisitor<'thir, 'p, 'tcx> { } let scrut_ty = scrut.ty; - let report = compute_match_usefulness(&cx, &tarms, scrut_ty); + let report = analyze_match(&cx, &tarms, scrut_ty); match source { // Don't report arm reachability of desugared `match $iter.into_iter() { iter => .. }` @@ -547,7 +549,7 @@ impl<'thir, 'p, 'tcx> MatchVisitor<'thir, 'p, 'tcx> { let cx = self.new_cx(refutability, None, scrut, pat.span); let pat = self.lower_pattern(&cx, pat)?; let arms = [MatchArm { pat, hir_id: self.lint_level, has_guard: false }]; - let report = compute_match_usefulness(&cx, &arms, pat.ty()); + let report = analyze_match(&cx, &arms, pat.ty()); Ok((cx, report)) } @@ -924,7 +926,7 @@ fn report_non_exhaustive_match<'p, 'tcx>( pattern = if witnesses.len() < 4 { witnesses .iter() - .map(|witness| witness.to_diagnostic_pat(cx).to_string()) + .map(|witness| cx.hoist_witness_pat(witness).to_string()) .collect::>() .join(" | ") } else { @@ -948,7 +950,7 @@ fn report_non_exhaustive_match<'p, 'tcx>( if !is_empty_match { let mut non_exhaustive_tys = FxHashSet::default(); // Look at the first witness. - collect_non_exhaustive_tys(cx.tcx, &witnesses[0], &mut non_exhaustive_tys); + collect_non_exhaustive_tys(cx, &witnesses[0], &mut non_exhaustive_tys); for ty in non_exhaustive_tys { if ty.is_ptr_sized_integral() { @@ -1083,13 +1085,13 @@ fn joined_uncovered_patterns<'p, 'tcx>( witnesses: &[WitnessPat<'tcx>], ) -> String { const LIMIT: usize = 3; - let pat_to_str = |pat: &WitnessPat<'tcx>| pat.to_diagnostic_pat(cx).to_string(); + let pat_to_str = |pat: &WitnessPat<'tcx>| cx.hoist_witness_pat(pat).to_string(); match witnesses { [] => bug!(), - [witness] => format!("`{}`", witness.to_diagnostic_pat(cx)), + [witness] => format!("`{}`", cx.hoist_witness_pat(witness)), [head @ .., tail] if head.len() < LIMIT => { let head: Vec<_> = head.iter().map(pat_to_str).collect(); - format!("`{}` and `{}`", head.join("`, `"), tail.to_diagnostic_pat(cx)) + format!("`{}` and `{}`", head.join("`, `"), cx.hoist_witness_pat(tail)) } _ => { let (head, tail) = witnesses.split_at(LIMIT); @@ -1100,7 +1102,7 @@ fn joined_uncovered_patterns<'p, 'tcx>( } fn collect_non_exhaustive_tys<'tcx>( - tcx: TyCtxt<'tcx>, + cx: &MatchCheckCtxt<'_, 'tcx>, pat: &WitnessPat<'tcx>, non_exhaustive_tys: &mut FxHashSet>, ) { @@ -1108,13 +1110,13 @@ fn collect_non_exhaustive_tys<'tcx>( non_exhaustive_tys.insert(pat.ty()); } if let Constructor::IntRange(range) = pat.ctor() { - if range.is_beyond_boundaries(pat.ty(), tcx) { + if cx.is_range_beyond_boundaries(range, pat.ty()) { // The range denotes the values before `isize::MIN` or the values after `usize::MAX`/`isize::MAX`. non_exhaustive_tys.insert(pat.ty()); } } pat.iter_fields() - .for_each(|field_pat| collect_non_exhaustive_tys(tcx, field_pat, non_exhaustive_tys)) + .for_each(|field_pat| collect_non_exhaustive_tys(cx, field_pat, non_exhaustive_tys)) } fn report_adt_defined_here<'tcx>( diff --git a/compiler/rustc_mir_build/src/thir/pattern/deconstruct_pat.rs b/compiler/rustc_mir_build/src/thir/pattern/deconstruct_pat.rs deleted file mode 100644 index ef20b0f039b79..0000000000000 --- a/compiler/rustc_mir_build/src/thir/pattern/deconstruct_pat.rs +++ /dev/null @@ -1,1964 +0,0 @@ -//! As explained in [`super::usefulness`], values and patterns are made from constructors applied to -//! fields. This file defines a `Constructor` enum, a `Fields` struct, and various operations to -//! manipulate them and convert them from/to patterns. -//! -//! There are two important bits of core logic in this file: constructor inclusion and constructor -//! splitting. Constructor inclusion, i.e. whether a constructor is included in/covered by another, -//! is straightforward and defined in [`Constructor::is_covered_by`]. -//! -//! Constructor splitting is mentioned in [`super::usefulness`] but not detailed. We describe it -//! precisely here. -//! -//! -//! -//! # Constructor grouping and splitting -//! -//! As explained in the corresponding section in [`super::usefulness`], to make usefulness tractable -//! we need to group together constructors that have the same effect when they are used to -//! specialize the matrix. -//! -//! Example: -//! ```compile_fail,E0004 -//! match (0, false) { -//! (0 ..=100, true) => {} -//! (50..=150, false) => {} -//! (0 ..=200, _) => {} -//! } -//! ``` -//! -//! In this example we can restrict specialization to 5 cases: `0..50`, `50..=100`, `101..=150`, -//! `151..=200` and `200..`. -//! -//! In [`super::usefulness`], we had said that `specialize` only takes value-only constructors. We -//! now relax this restriction: we allow `specialize` to take constructors like `0..50` as long as -//! we're careful to only do that with constructors that make sense. For example, `specialize(0..50, -//! (0..=100, true))` is sensible, but `specialize(50..=200, (0..=100, true))` is not. -//! -//! Constructor splitting looks at the constructors in the first column of the matrix and constructs -//! such a sensible set of constructors. Formally, we want to find a smallest disjoint set of -//! constructors: -//! - Whose union covers the whole type, and -//! - That have no non-trivial intersection with any of the constructors in the column (i.e. they're -//! each either disjoint with or covered by any given column constructor). -//! -//! We compute this in two steps: first [`ConstructorSet::for_ty`] determines the set of all -//! possible constructors for the type. Then [`ConstructorSet::split`] looks at the column of -//! constructors and splits the set into groups accordingly. The precise invariants of -//! [`ConstructorSet::split`] is described in [`SplitConstructorSet`]. -//! -//! Constructor splitting has two interesting special cases: integer range splitting (see -//! [`IntRange::split`]) and slice splitting (see [`Slice::split`]). -//! -//! -//! -//! # The `Missing` constructor -//! -//! We detail a special case of constructor splitting that is a bit subtle. Take the following: -//! -//! ``` -//! enum Direction { North, South, East, West } -//! # let wind = (Direction::North, 0u8); -//! match wind { -//! (Direction::North, 50..) => {} -//! (_, _) => {} -//! } -//! ``` -//! -//! Here we expect constructor splitting to output two cases: `North`, and "everything else". This -//! "everything else" is represented by [`Constructor::Missing`]. Unlike other constructors, it's a -//! bit contextual: to know the exact list of constructors it represents we have to look at the -//! column. In practice however we don't need to, because by construction it only matches rows that -//! have wildcards. This is how this constructor is special: the only constructor that covers it is -//! `Wildcard`. -//! -//! The only place where we care about which constructors `Missing` represents is in diagnostics -//! (see `super::usefulness::WitnessMatrix::apply_constructor`). -//! -//! We choose whether to specialize with `Missing` in -//! `super::usefulness::compute_exhaustiveness_and_reachability`. -//! -//! -//! -//! ## Empty types, empty constructors, and the `exhaustive_patterns` feature -//! -//! An empty type is a type that has no valid value, like `!`, `enum Void {}`, or `Result`. -//! They require careful handling. -//! -//! First, for soundness reasons related to the possible existence of invalid values, by default we -//! don't treat empty types as empty. We force them to be matched with wildcards. Except if the -//! `exhaustive_patterns` feature is turned on, in which case we do treat them as empty. And also -//! except if the type has no constructors (like `enum Void {}` but not like `Result`), we -//! specifically allow `match void {}` to be exhaustive. There are additionally considerations of -//! place validity that are handled in `super::usefulness`. Yes this is a bit tricky. -//! -//! The second thing is that regardless of the above, it is always allowed to use all the -//! constructors of a type. For example, all the following is ok: -//! -//! ```rust,ignore(example) -//! # #![feature(never_type)] -//! # #![feature(exhaustive_patterns)] -//! fn foo(x: Option) { -//! match x { -//! None => {} -//! Some(_) => {} -//! } -//! } -//! fn bar(x: &[!]) -> u32 { -//! match x { -//! [] => 1, -//! [_] => 2, -//! [_, _] => 3, -//! } -//! } -//! ``` -//! -//! Moreover, take the following: -//! -//! ```rust -//! # #![feature(never_type)] -//! # #![feature(exhaustive_patterns)] -//! # let x = None::; -//! match x { -//! None => {} -//! } -//! ``` -//! -//! On a normal type, we would identify `Some` as missing and tell the user. If `x: Option` -//! however (and `exhaustive_patterns` is on), it's ok to omit `Some`. When listing the constructors -//! of a type, we must therefore track which can be omitted. -//! -//! Let's call "empty" a constructor that matches no valid value for the type, like `Some` for the -//! type `Option`. What this all means is that `ConstructorSet` must know which constructors are -//! empty. The difference between empty and nonempty constructors is that empty constructors need -//! not be present for the match to be exhaustive. -//! -//! A final remark: empty constructors of arity 0 break specialization, we must avoid them. The -//! reason is that if we specialize by them, nothing remains to witness the emptiness; the rest of -//! the algorithm can't distinguish them from a nonempty constructor. The only known case where this -//! could happen is the `[..]` pattern on `[!; N]` with `N > 0` so we must take care to not emit it. -//! -//! This is all handled by [`ConstructorSet::for_ty`] and [`ConstructorSet::split`]. The invariants -//! of [`SplitConstructorSet`] are also of interest. -//! -//! -//! -//! ## Opaque patterns -//! -//! Some patterns, such as constants that are not allowed to be matched structurally, cannot be -//! inspected, which we handle with `Constructor::Opaque`. Since we know nothing of these patterns, -//! we assume they never cover each other. In order to respect the invariants of -//! [`SplitConstructorSet`], we give each `Opaque` constructor a unique id so we can recognize it. - -use std::cell::Cell; -use std::cmp::{self, max, min, Ordering}; -use std::fmt; -use std::iter::once; - -use smallvec::{smallvec, SmallVec}; - -use rustc_apfloat::ieee::{DoubleS, IeeeFloat, SingleS}; -use rustc_data_structures::captures::Captures; -use rustc_data_structures::fx::FxHashSet; -use rustc_hir::RangeEnd; -use rustc_index::{Idx, IndexVec}; -use rustc_middle::middle::stability::EvalResult; -use rustc_middle::mir; -use rustc_middle::mir::interpret::Scalar; -use rustc_middle::thir::{FieldPat, Pat, PatKind, PatRange, PatRangeBoundary}; -use rustc_middle::ty::layout::IntegerExt; -use rustc_middle::ty::{self, Ty, TyCtxt, VariantDef}; -use rustc_span::{Span, DUMMY_SP}; -use rustc_target::abi::{FieldIdx, Integer, VariantIdx, FIRST_VARIANT}; - -use self::Constructor::*; -use self::MaybeInfiniteInt::*; -use self::SliceKind::*; - -use super::usefulness::{MatchCheckCtxt, PatCtxt}; - -/// Recursively expand this pattern into its subpatterns. Only useful for or-patterns. -fn expand_or_pat<'p, 'tcx>(pat: &'p Pat<'tcx>) -> Vec<&'p Pat<'tcx>> { - fn expand<'p, 'tcx>(pat: &'p Pat<'tcx>, vec: &mut Vec<&'p Pat<'tcx>>) { - if let PatKind::Or { pats } = &pat.kind { - for pat in pats.iter() { - expand(pat, vec); - } - } else { - vec.push(pat) - } - } - - let mut pats = Vec::new(); - expand(pat, &mut pats); - pats -} - -/// Whether we have seen a constructor in the column or not. -#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)] -enum Presence { - Unseen, - Seen, -} - -/// A possibly infinite integer. Values are encoded such that the ordering on `u128` matches the -/// natural order on the original type. For example, `-128i8` is encoded as `0` and `127i8` as -/// `255`. See `signed_bias` for details. -#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)] -pub(crate) enum MaybeInfiniteInt { - NegInfinity, - /// Encoded value. DO NOT CONSTRUCT BY HAND; use `new_finite`. - Finite(u128), - /// The integer after `u128::MAX`. We need it to represent `x..=u128::MAX` as an exclusive range. - JustAfterMax, - PosInfinity, -} - -impl MaybeInfiniteInt { - // The return value of `signed_bias` should be XORed with a value to encode/decode it. - fn signed_bias(tcx: TyCtxt<'_>, ty: Ty<'_>) -> u128 { - match *ty.kind() { - ty::Int(ity) => { - let bits = Integer::from_int_ty(&tcx, ity).size().bits() as u128; - 1u128 << (bits - 1) - } - _ => 0, - } - } - - fn new_finite(tcx: TyCtxt<'_>, ty: Ty<'_>, bits: u128) -> Self { - let bias = Self::signed_bias(tcx, ty); - // Perform a shift if the underlying types are signed, which makes the interval arithmetic - // type-independent. - let x = bits ^ bias; - Finite(x) - } - fn from_pat_range_bdy<'tcx>( - bdy: PatRangeBoundary<'tcx>, - ty: Ty<'tcx>, - tcx: TyCtxt<'tcx>, - param_env: ty::ParamEnv<'tcx>, - ) -> Self { - match bdy { - PatRangeBoundary::NegInfinity => NegInfinity, - PatRangeBoundary::Finite(value) => { - let bits = value.eval_bits(tcx, param_env); - Self::new_finite(tcx, ty, bits) - } - PatRangeBoundary::PosInfinity => PosInfinity, - } - } - - /// Used only for diagnostics. - /// Note: it is possible to get `isize/usize::MAX+1` here, as explained in the doc for - /// [`IntRange::split`]. This cannot be represented as a `Const`, so we represent it with - /// `PosInfinity`. - fn to_diagnostic_pat_range_bdy<'tcx>( - self, - ty: Ty<'tcx>, - tcx: TyCtxt<'tcx>, - ) -> PatRangeBoundary<'tcx> { - match self { - NegInfinity => PatRangeBoundary::NegInfinity, - Finite(x) => { - let bias = Self::signed_bias(tcx, ty); - let bits = x ^ bias; - let size = ty.primitive_size(tcx); - match Scalar::try_from_uint(bits, size) { - Some(scalar) => { - let value = mir::Const::from_scalar(tcx, scalar, ty); - PatRangeBoundary::Finite(value) - } - // The value doesn't fit. Since `x >= 0` and 0 always encodes the minimum value - // for a type, the problem isn't that the value is too small. So it must be too - // large. - None => PatRangeBoundary::PosInfinity, - } - } - JustAfterMax | PosInfinity => PatRangeBoundary::PosInfinity, - } - } - - /// Note: this will not turn a finite value into an infinite one or vice-versa. - pub(crate) fn minus_one(self) -> Self { - match self { - Finite(n) => match n.checked_sub(1) { - Some(m) => Finite(m), - None => bug!(), - }, - JustAfterMax => Finite(u128::MAX), - x => x, - } - } - /// Note: this will not turn a finite value into an infinite one or vice-versa. - pub(crate) fn plus_one(self) -> Self { - match self { - Finite(n) => match n.checked_add(1) { - Some(m) => Finite(m), - None => JustAfterMax, - }, - JustAfterMax => bug!(), - x => x, - } - } -} - -/// An exclusive interval, used for precise integer exhaustiveness checking. `IntRange`s always -/// store a contiguous range. -/// -/// `IntRange` is never used to encode an empty range or a "range" that wraps around the (offset) -/// space: i.e., `range.lo < range.hi`. -#[derive(Clone, Copy, PartialEq, Eq)] -pub(crate) struct IntRange { - pub(crate) lo: MaybeInfiniteInt, // Must not be `PosInfinity`. - pub(crate) hi: MaybeInfiniteInt, // Must not be `NegInfinity`. -} - -impl IntRange { - #[inline] - pub(super) fn is_integral(ty: Ty<'_>) -> bool { - matches!(ty.kind(), ty::Char | ty::Int(_) | ty::Uint(_)) - } - - /// Best effort; will not know that e.g. `255u8..` is a singleton. - pub(super) fn is_singleton(&self) -> bool { - // Since `lo` and `hi` can't be the same `Infinity` and `plus_one` never changes from finite - // to infinite, this correctly only detects ranges that contain exacly one `Finite(x)`. - self.lo.plus_one() == self.hi - } - - #[inline] - fn from_bits<'tcx>(tcx: TyCtxt<'tcx>, ty: Ty<'tcx>, bits: u128) -> IntRange { - let x = MaybeInfiniteInt::new_finite(tcx, ty, bits); - IntRange { lo: x, hi: x.plus_one() } - } - - #[inline] - fn from_range(lo: MaybeInfiniteInt, mut hi: MaybeInfiniteInt, end: RangeEnd) -> IntRange { - if end == RangeEnd::Included { - hi = hi.plus_one(); - } - if lo >= hi { - // This should have been caught earlier by E0030. - bug!("malformed range pattern: {lo:?}..{hi:?}"); - } - IntRange { lo, hi } - } - - fn is_subrange(&self, other: &Self) -> bool { - other.lo <= self.lo && self.hi <= other.hi - } - - fn intersection(&self, other: &Self) -> Option { - if self.lo < other.hi && other.lo < self.hi { - Some(IntRange { lo: max(self.lo, other.lo), hi: min(self.hi, other.hi) }) - } else { - None - } - } - - /// Partition a range of integers into disjoint subranges. This does constructor splitting for - /// integer ranges as explained at the top of the file. - /// - /// This returns an output that covers `self`. The output is split so that the only - /// intersections between an output range and a column range are inclusions. No output range - /// straddles the boundary of one of the inputs. - /// - /// Additionally, we track for each output range whether it is covered by one of the column ranges or not. - /// - /// The following input: - /// ```text - /// (--------------------------) // `self` - /// (------) (----------) (-) - /// (------) (--------) - /// ``` - /// is first intersected with `self`: - /// ```text - /// (--------------------------) // `self` - /// (----) (----------) (-) - /// (------) (--------) - /// ``` - /// and then iterated over as follows: - /// ```text - /// (-(--)-(-)-(------)-)--(-)- - /// ``` - /// where each sequence of dashes is an output range, and dashes outside parentheses are marked - /// as `Presence::Missing`. - /// - /// ## `isize`/`usize` - /// - /// Whereas a wildcard of type `i32` stands for the range `i32::MIN..=i32::MAX`, a `usize` - /// wildcard stands for `0..PosInfinity` and a `isize` wildcard stands for - /// `NegInfinity..PosInfinity`. In other words, as far as `IntRange` is concerned, there are - /// values before `isize::MIN` and after `usize::MAX`/`isize::MAX`. - /// This is to avoid e.g. `0..(u32::MAX as usize)` from being exhaustive on one architecture and - /// not others. This was decided in . - /// - /// These infinities affect splitting subtly: it is possible to get `NegInfinity..0` and - /// `usize::MAX+1..PosInfinity` in the output. Diagnostics must be careful to handle these - /// fictitious ranges sensibly. - fn split( - &self, - column_ranges: impl Iterator, - ) -> impl Iterator { - // The boundaries of ranges in `column_ranges` intersected with `self`. - // We do parenthesis matching for input ranges. A boundary counts as +1 if it starts - // a range and -1 if it ends it. When the count is > 0 between two boundaries, we - // are within an input range. - let mut boundaries: Vec<(MaybeInfiniteInt, isize)> = column_ranges - .filter_map(|r| self.intersection(&r)) - .flat_map(|r| [(r.lo, 1), (r.hi, -1)]) - .collect(); - // We sort by boundary, and for each boundary we sort the "closing parentheses" first. The - // order of +1/-1 for a same boundary value is actually irrelevant, because we only look at - // the accumulated count between distinct boundary values. - boundaries.sort_unstable(); - - // Accumulate parenthesis counts. - let mut paren_counter = 0isize; - // Gather pairs of adjacent boundaries. - let mut prev_bdy = self.lo; - boundaries - .into_iter() - // End with the end of the range. The count is ignored. - .chain(once((self.hi, 0))) - // List pairs of adjacent boundaries and the count between them. - .map(move |(bdy, delta)| { - // `delta` affects the count as we cross `bdy`, so the relevant count between - // `prev_bdy` and `bdy` is untouched by `delta`. - let ret = (prev_bdy, paren_counter, bdy); - prev_bdy = bdy; - paren_counter += delta; - ret - }) - // Skip empty ranges. - .filter(|&(prev_bdy, _, bdy)| prev_bdy != bdy) - // Convert back to ranges. - .map(move |(prev_bdy, paren_count, bdy)| { - use Presence::*; - let presence = if paren_count > 0 { Seen } else { Unseen }; - let range = IntRange { lo: prev_bdy, hi: bdy }; - (presence, range) - }) - } - - /// Whether the range denotes the fictitious values before `isize::MIN` or after - /// `usize::MAX`/`isize::MAX` (see doc of [`IntRange::split`] for why these exist). - pub(crate) fn is_beyond_boundaries<'tcx>(&self, ty: Ty<'tcx>, tcx: TyCtxt<'tcx>) -> bool { - ty.is_ptr_sized_integral() && { - // The two invalid ranges are `NegInfinity..isize::MIN` (represented as - // `NegInfinity..0`), and `{u,i}size::MAX+1..PosInfinity`. `to_diagnostic_pat_range_bdy` - // converts `MAX+1` to `PosInfinity`, and we couldn't have `PosInfinity` in `self.lo` - // otherwise. - let lo = self.lo.to_diagnostic_pat_range_bdy(ty, tcx); - matches!(lo, PatRangeBoundary::PosInfinity) - || matches!(self.hi, MaybeInfiniteInt::Finite(0)) - } - } - /// Only used for displaying the range. - pub(super) fn to_diagnostic_pat<'tcx>(&self, ty: Ty<'tcx>, tcx: TyCtxt<'tcx>) -> Pat<'tcx> { - let kind = if matches!((self.lo, self.hi), (NegInfinity, PosInfinity)) { - PatKind::Wild - } else if self.is_singleton() { - let lo = self.lo.to_diagnostic_pat_range_bdy(ty, tcx); - let value = lo.as_finite().unwrap(); - PatKind::Constant { value } - } else { - // We convert to an inclusive range for diagnostics. - let mut end = RangeEnd::Included; - let mut lo = self.lo.to_diagnostic_pat_range_bdy(ty, tcx); - if matches!(lo, PatRangeBoundary::PosInfinity) { - // The only reason to get `PosInfinity` here is the special case where - // `to_diagnostic_pat_range_bdy` found `{u,i}size::MAX+1`. So the range denotes the - // fictitious values after `{u,i}size::MAX` (see [`IntRange::split`] for why we do - // this). We show this to the user as `usize::MAX..` which is slightly incorrect but - // probably clear enough. - let c = ty.numeric_max_val(tcx).unwrap(); - let value = mir::Const::from_ty_const(c, tcx); - lo = PatRangeBoundary::Finite(value); - } - let hi = if matches!(self.hi, MaybeInfiniteInt::Finite(0)) { - // The range encodes `..ty::MIN`, so we can't convert it to an inclusive range. - end = RangeEnd::Excluded; - self.hi - } else { - self.hi.minus_one() - }; - let hi = hi.to_diagnostic_pat_range_bdy(ty, tcx); - PatKind::Range(Box::new(PatRange { lo, hi, end, ty })) - }; - - Pat { ty, span: DUMMY_SP, kind } - } -} - -/// Note: this will render signed ranges incorrectly. To render properly, convert to a pattern -/// first. -impl fmt::Debug for IntRange { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - if let Finite(lo) = self.lo { - write!(f, "{lo}")?; - } - write!(f, "{}", RangeEnd::Excluded)?; - if let Finite(hi) = self.hi { - write!(f, "{hi}")?; - } - Ok(()) - } -} - -#[derive(Copy, Clone, Debug, PartialEq, Eq)] -enum SliceKind { - /// Patterns of length `n` (`[x, y]`). - FixedLen(usize), - /// Patterns using the `..` notation (`[x, .., y]`). - /// Captures any array constructor of `length >= i + j`. - /// In the case where `array_len` is `Some(_)`, - /// this indicates that we only care about the first `i` and the last `j` values of the array, - /// and everything in between is a wildcard `_`. - VarLen(usize, usize), -} - -impl SliceKind { - fn arity(self) -> usize { - match self { - FixedLen(length) => length, - VarLen(prefix, suffix) => prefix + suffix, - } - } - - /// Whether this pattern includes patterns of length `other_len`. - fn covers_length(self, other_len: usize) -> bool { - match self { - FixedLen(len) => len == other_len, - VarLen(prefix, suffix) => prefix + suffix <= other_len, - } - } -} - -/// A constructor for array and slice patterns. -#[derive(Copy, Clone, Debug, PartialEq, Eq)] -pub(super) struct Slice { - /// `None` if the matched value is a slice, `Some(n)` if it is an array of size `n`. - array_len: Option, - /// The kind of pattern it is: fixed-length `[x, y]` or variable length `[x, .., y]`. - kind: SliceKind, -} - -impl Slice { - fn new(array_len: Option, kind: SliceKind) -> Self { - let kind = match (array_len, kind) { - // If the middle `..` has length 0, we effectively have a fixed-length pattern. - (Some(len), VarLen(prefix, suffix)) if prefix + suffix == len => FixedLen(len), - (Some(len), VarLen(prefix, suffix)) if prefix + suffix > len => bug!( - "Slice pattern of length {} longer than its array length {len}", - prefix + suffix - ), - _ => kind, - }; - Slice { array_len, kind } - } - - fn arity(self) -> usize { - self.kind.arity() - } - - /// See `Constructor::is_covered_by` - fn is_covered_by(self, other: Self) -> bool { - other.kind.covers_length(self.arity()) - } - - /// This computes constructor splitting for variable-length slices, as explained at the top of - /// the file. - /// - /// A slice pattern `[x, .., y]` behaves like the infinite or-pattern `[x, y] | [x, _, y] | [x, - /// _, _, y] | etc`. The corresponding value constructors are fixed-length array constructors of - /// corresponding lengths. We obviously can't list this infinitude of constructors. - /// Thankfully, it turns out that for each finite set of slice patterns, all sufficiently large - /// array lengths are equivalent. - /// - /// Let's look at an example, where we are trying to split the last pattern: - /// ``` - /// # fn foo(x: &[bool]) { - /// match x { - /// [true, true, ..] => {} - /// [.., false, false] => {} - /// [..] => {} - /// } - /// # } - /// ``` - /// Here are the results of specialization for the first few lengths: - /// ``` - /// # fn foo(x: &[bool]) { match x { - /// // length 0 - /// [] => {} - /// // length 1 - /// [_] => {} - /// // length 2 - /// [true, true] => {} - /// [false, false] => {} - /// [_, _] => {} - /// // length 3 - /// [true, true, _ ] => {} - /// [_, false, false] => {} - /// [_, _, _ ] => {} - /// // length 4 - /// [true, true, _, _ ] => {} - /// [_, _, false, false] => {} - /// [_, _, _, _ ] => {} - /// // length 5 - /// [true, true, _, _, _ ] => {} - /// [_, _, _, false, false] => {} - /// [_, _, _, _, _ ] => {} - /// # _ => {} - /// # }} - /// ``` - /// - /// We see that above length 4, we are simply inserting columns full of wildcards in the middle. - /// This means that specialization and witness computation with slices of length `l >= 4` will - /// give equivalent results regardless of `l`. This applies to any set of slice patterns: there - /// will be a length `L` above which all lengths behave the same. This is exactly what we need - /// for constructor splitting. - /// - /// A variable-length slice pattern covers all lengths from its arity up to infinity. As we just - /// saw, we can split this in two: lengths below `L` are treated individually with a - /// fixed-length slice each; lengths above `L` are grouped into a single variable-length slice - /// constructor. - /// - /// For each variable-length slice pattern `p` with a prefix of length `plâ‚š` and suffix of - /// length `slâ‚š`, only the first `plâ‚š` and the last `slâ‚š` elements are examined. Therefore, as - /// long as `L` is positive (to avoid concerns about empty types), all elements after the - /// maximum prefix length and before the maximum suffix length are not examined by any - /// variable-length pattern, and therefore can be ignored. This gives us a way to compute `L`. - /// - /// Additionally, if fixed-length patterns exist, we must pick an `L` large enough to miss them, - /// so we can pick `L = max(max(FIXED_LEN)+1, max(PREFIX_LEN) + max(SUFFIX_LEN))`. - /// `max_slice` below will be made to have this arity `L`. - /// - /// If `self` is fixed-length, it is returned as-is. - /// - /// Additionally, we track for each output slice whether it is covered by one of the column slices or not. - fn split( - self, - column_slices: impl Iterator, - ) -> impl Iterator { - // Range of lengths below `L`. - let smaller_lengths; - let arity = self.arity(); - let mut max_slice = self.kind; - // Tracks the smallest variable-length slice we've seen. Any slice arity above it is - // therefore `Presence::Seen` in the column. - let mut min_var_len = usize::MAX; - // Tracks the fixed-length slices we've seen, to mark them as `Presence::Seen`. - let mut seen_fixed_lens = FxHashSet::default(); - match &mut max_slice { - VarLen(max_prefix_len, max_suffix_len) => { - // A length larger than any fixed-length slice encountered. - // We start at 1 in case the subtype is empty because in that case the zero-length - // slice must be treated separately from the rest. - let mut fixed_len_upper_bound = 1; - // We grow `max_slice` to be larger than all slices encountered, as described above. - // `L` is `max_slice.arity()`. For diagnostics, we keep the prefix and suffix - // lengths separate. - for slice in column_slices { - match slice.kind { - FixedLen(len) => { - fixed_len_upper_bound = cmp::max(fixed_len_upper_bound, len + 1); - seen_fixed_lens.insert(len); - } - VarLen(prefix, suffix) => { - *max_prefix_len = cmp::max(*max_prefix_len, prefix); - *max_suffix_len = cmp::max(*max_suffix_len, suffix); - min_var_len = cmp::min(min_var_len, prefix + suffix); - } - } - } - // If `fixed_len_upper_bound >= L`, we set `L` to `fixed_len_upper_bound`. - if let Some(delta) = - fixed_len_upper_bound.checked_sub(*max_prefix_len + *max_suffix_len) - { - *max_prefix_len += delta - } - - // We cap the arity of `max_slice` at the array size. - match self.array_len { - Some(len) if max_slice.arity() >= len => max_slice = FixedLen(len), - _ => {} - } - - smaller_lengths = match self.array_len { - // The only admissible fixed-length slice is one of the array size. Whether `max_slice` - // is fixed-length or variable-length, it will be the only relevant slice to output - // here. - Some(_) => 0..0, // empty range - // We need to cover all arities in the range `(arity..infinity)`. We split that - // range into two: lengths smaller than `max_slice.arity()` are treated - // independently as fixed-lengths slices, and lengths above are captured by - // `max_slice`. - None => self.arity()..max_slice.arity(), - }; - } - FixedLen(_) => { - // No need to split here. We only track presence. - for slice in column_slices { - match slice.kind { - FixedLen(len) => { - if len == arity { - seen_fixed_lens.insert(len); - } - } - VarLen(prefix, suffix) => { - min_var_len = cmp::min(min_var_len, prefix + suffix); - } - } - } - smaller_lengths = 0..0; - } - }; - - smaller_lengths.map(FixedLen).chain(once(max_slice)).map(move |kind| { - let arity = kind.arity(); - let seen = if min_var_len <= arity || seen_fixed_lens.contains(&arity) { - Presence::Seen - } else { - Presence::Unseen - }; - (seen, Slice::new(self.array_len, kind)) - }) - } -} - -/// A globally unique id to distinguish `Opaque` patterns. -#[derive(Clone, Debug, PartialEq, Eq)] -pub(super) struct OpaqueId(u32); - -impl OpaqueId { - fn new() -> Self { - use std::sync::atomic::{AtomicU32, Ordering}; - static OPAQUE_ID: AtomicU32 = AtomicU32::new(0); - OpaqueId(OPAQUE_ID.fetch_add(1, Ordering::SeqCst)) - } -} - -/// A value can be decomposed into a constructor applied to some fields. This struct represents -/// the constructor. See also `Fields`. -/// -/// `pat_constructor` retrieves the constructor corresponding to a pattern. -/// `specialize_constructor` returns the list of fields corresponding to a pattern, given a -/// constructor. `Constructor::apply` reconstructs the pattern from a pair of `Constructor` and -/// `Fields`. -#[derive(Clone, Debug, PartialEq)] -pub(super) enum Constructor<'tcx> { - /// The constructor for patterns that have a single constructor, like tuples, struct patterns, - /// and references. Fixed-length arrays are treated separately with `Slice`. - Single, - /// Enum variants. - Variant(VariantIdx), - /// Booleans - Bool(bool), - /// Ranges of integer literal values (`2`, `2..=5` or `2..5`). - IntRange(IntRange), - /// Ranges of floating-point literal values (`2.0..=5.2`). - F32Range(IeeeFloat, IeeeFloat, RangeEnd), - F64Range(IeeeFloat, IeeeFloat, RangeEnd), - /// String literals. Strings are not quite the same as `&[u8]` so we treat them separately. - Str(mir::Const<'tcx>), - /// Array and slice patterns. - Slice(Slice), - /// Constants that must not be matched structurally. They are treated as black boxes for the - /// purposes of exhaustiveness: we must not inspect them, and they don't count towards making a - /// match exhaustive. - /// Carries an id that must be unique within a match. We need this to ensure the invariants of - /// [`SplitConstructorSet`]. - Opaque(OpaqueId), - /// Or-pattern. - Or, - /// Wildcard pattern. - Wildcard, - /// Fake extra constructor for enums that aren't allowed to be matched exhaustively. Also used - /// for those types for which we cannot list constructors explicitly, like `f64` and `str`. - NonExhaustive, - /// Fake extra constructor for variants that should not be mentioned in diagnostics. - /// We use this for variants behind an unstable gate as well as - /// `#[doc(hidden)]` ones. - Hidden, - /// Fake extra constructor for constructors that are not seen in the matrix, as explained at the - /// top of the file. - Missing, -} - -impl<'tcx> Constructor<'tcx> { - pub(super) fn is_non_exhaustive(&self) -> bool { - matches!(self, NonExhaustive) - } - - pub(super) fn as_variant(&self) -> Option { - match self { - Variant(i) => Some(*i), - _ => None, - } - } - fn as_bool(&self) -> Option { - match self { - Bool(b) => Some(*b), - _ => None, - } - } - pub(super) fn as_int_range(&self) -> Option<&IntRange> { - match self { - IntRange(range) => Some(range), - _ => None, - } - } - fn as_slice(&self) -> Option { - match self { - Slice(slice) => Some(*slice), - _ => None, - } - } - - fn variant_index_for_adt(&self, adt: ty::AdtDef<'tcx>) -> VariantIdx { - match *self { - Variant(idx) => idx, - Single => { - assert!(!adt.is_enum()); - FIRST_VARIANT - } - _ => bug!("bad constructor {:?} for adt {:?}", self, adt), - } - } - - /// The number of fields for this constructor. This must be kept in sync with - /// `Fields::wildcards`. - pub(super) fn arity(&self, pcx: &PatCtxt<'_, '_, 'tcx>) -> usize { - match self { - Single | Variant(_) => match pcx.ty.kind() { - ty::Tuple(fs) => fs.len(), - ty::Ref(..) => 1, - ty::Adt(adt, ..) => { - if adt.is_box() { - // The only legal patterns of type `Box` (outside `std`) are `_` and box - // patterns. If we're here we can assume this is a box pattern. - 1 - } else { - let variant = &adt.variant(self.variant_index_for_adt(*adt)); - Fields::list_variant_nonhidden_fields(pcx.cx, pcx.ty, variant).count() - } - } - _ => bug!("Unexpected type for `Single` constructor: {:?}", pcx.ty), - }, - Slice(slice) => slice.arity(), - Bool(..) - | IntRange(..) - | F32Range(..) - | F64Range(..) - | Str(..) - | Opaque(..) - | NonExhaustive - | Hidden - | Missing { .. } - | Wildcard => 0, - Or => bug!("The `Or` constructor doesn't have a fixed arity"), - } - } - - /// Returns whether `self` is covered by `other`, i.e. whether `self` is a subset of `other`. - /// For the simple cases, this is simply checking for equality. For the "grouped" constructors, - /// this checks for inclusion. - // We inline because this has a single call site in `Matrix::specialize_constructor`. - #[inline] - pub(super) fn is_covered_by<'p>(&self, pcx: &PatCtxt<'_, 'p, 'tcx>, other: &Self) -> bool { - match (self, other) { - (Wildcard, _) => { - span_bug!( - pcx.cx.scrut_span, - "Constructor splitting should not have returned `Wildcard`" - ) - } - // Wildcards cover anything - (_, Wildcard) => true, - // Only a wildcard pattern can match these special constructors. - (Missing { .. } | NonExhaustive | Hidden, _) => false, - - (Single, Single) => true, - (Variant(self_id), Variant(other_id)) => self_id == other_id, - (Bool(self_b), Bool(other_b)) => self_b == other_b, - - (IntRange(self_range), IntRange(other_range)) => self_range.is_subrange(other_range), - (F32Range(self_from, self_to, self_end), F32Range(other_from, other_to, other_end)) => { - self_from.ge(other_from) - && match self_to.partial_cmp(other_to) { - Some(Ordering::Less) => true, - Some(Ordering::Equal) => other_end == self_end, - _ => false, - } - } - (F64Range(self_from, self_to, self_end), F64Range(other_from, other_to, other_end)) => { - self_from.ge(other_from) - && match self_to.partial_cmp(other_to) { - Some(Ordering::Less) => true, - Some(Ordering::Equal) => other_end == self_end, - _ => false, - } - } - (Str(self_val), Str(other_val)) => { - // FIXME Once valtrees are available we can directly use the bytes - // in the `Str` variant of the valtree for the comparison here. - self_val == other_val - } - (Slice(self_slice), Slice(other_slice)) => self_slice.is_covered_by(*other_slice), - - // Opaque constructors don't interact with anything unless they come from the - // syntactically identical pattern. - (Opaque(self_id), Opaque(other_id)) => self_id == other_id, - (Opaque(..), _) | (_, Opaque(..)) => false, - - _ => span_bug!( - pcx.cx.scrut_span, - "trying to compare incompatible constructors {:?} and {:?}", - self, - other - ), - } - } -} - -#[derive(Debug, Clone, Copy)] -pub(super) enum VariantVisibility { - /// Variant that doesn't fit the other cases, i.e. most variants. - Visible, - /// Variant behind an unstable gate or with the `#[doc(hidden)]` attribute. It will not be - /// mentioned in diagnostics unless the user mentioned it first. - Hidden, - /// Variant that matches no value. E.g. `Some::>` if the `exhaustive_patterns` feature - /// is enabled. Like `Hidden`, it will not be mentioned in diagnostics unless the user mentioned - /// it first. - Empty, -} - -/// Describes the set of all constructors for a type. For details, in particular about the emptiness -/// of constructors, see the top of the file. -/// -/// In terms of division of responsibility, [`ConstructorSet::split`] handles all of the -/// `exhaustive_patterns` feature. -#[derive(Debug)] -pub(super) enum ConstructorSet { - /// The type has a single constructor, e.g. `&T` or a struct. `empty` tracks whether the - /// constructor is empty. - Single { empty: bool }, - /// This type has the following list of constructors. If `variants` is empty and - /// `non_exhaustive` is false, don't use this; use `NoConstructors` instead. - Variants { variants: IndexVec, non_exhaustive: bool }, - /// Booleans. - Bool, - /// The type is spanned by integer values. The range or ranges give the set of allowed values. - /// The second range is only useful for `char`. - Integers { range_1: IntRange, range_2: Option }, - /// The type is matched by slices. `array_len` is the compile-time length of the array, if - /// known. If `subtype_is_empty`, all constructors are empty except possibly the zero-length - /// slice `[]`. - Slice { array_len: Option, subtype_is_empty: bool }, - /// The constructors cannot be listed, and the type cannot be matched exhaustively. E.g. `str`, - /// floats. - Unlistable, - /// The type has no constructors (not even empty ones). This is `!` and empty enums. - NoConstructors, -} - -/// Describes the result of analyzing the constructors in a column of a match. -/// -/// `present` is morally the set of constructors present in the column, and `missing` is the set of -/// constructors that exist in the type but are not present in the column. -/// -/// More formally, if we discard wildcards from the column, this respects the following constraints: -/// 1. the union of `present`, `missing` and `missing_empty` covers all the constructors of the type -/// 2. each constructor in `present` is covered by something in the column -/// 3. no constructor in `missing` or `missing_empty` is covered by anything in the column -/// 4. each constructor in the column is equal to the union of one or more constructors in `present` -/// 5. `missing` does not contain empty constructors (see discussion about emptiness at the top of -/// the file); -/// 6. `missing_empty` contains only empty constructors -/// 7. constructors in `present`, `missing` and `missing_empty` are split for the column; in other -/// words, they are either fully included in or fully disjoint from each constructor in the -/// column. In yet other words, there are no non-trivial intersections like between `0..10` and -/// `5..15`. -/// -/// We must be particularly careful with weird constructors like `Opaque`: they're not formally part -/// of the `ConstructorSet` for the type, yet if we forgot to include them in `present` we would be -/// ignoring any row with `Opaque`s in the algorithm. Hence the importance of point 4. -#[derive(Debug)] -pub(super) struct SplitConstructorSet<'tcx> { - pub(super) present: SmallVec<[Constructor<'tcx>; 1]>, - pub(super) missing: Vec>, - pub(super) missing_empty: Vec>, -} - -impl ConstructorSet { - /// Creates a set that represents all the constructors of `ty`. - /// - /// See at the top of the file for considerations of emptiness. - #[instrument(level = "debug", skip(cx), ret)] - pub(super) fn for_ty<'p, 'tcx>(cx: &MatchCheckCtxt<'p, 'tcx>, ty: Ty<'tcx>) -> Self { - let make_range = |start, end| { - IntRange::from_range( - MaybeInfiniteInt::new_finite(cx.tcx, ty, start), - MaybeInfiniteInt::new_finite(cx.tcx, ty, end), - RangeEnd::Included, - ) - }; - // This determines the set of all possible constructors for the type `ty`. For numbers, - // arrays and slices we use ranges and variable-length slices when appropriate. - match ty.kind() { - ty::Bool => Self::Bool, - ty::Char => { - // The valid Unicode Scalar Value ranges. - Self::Integers { - range_1: make_range('\u{0000}' as u128, '\u{D7FF}' as u128), - range_2: Some(make_range('\u{E000}' as u128, '\u{10FFFF}' as u128)), - } - } - &ty::Int(ity) => { - let range = if ty.is_ptr_sized_integral() { - // The min/max values of `isize` are not allowed to be observed. - IntRange { lo: NegInfinity, hi: PosInfinity } - } else { - let bits = Integer::from_int_ty(&cx.tcx, ity).size().bits() as u128; - let min = 1u128 << (bits - 1); - let max = min - 1; - make_range(min, max) - }; - Self::Integers { range_1: range, range_2: None } - } - &ty::Uint(uty) => { - let range = if ty.is_ptr_sized_integral() { - // The max value of `usize` is not allowed to be observed. - let lo = MaybeInfiniteInt::new_finite(cx.tcx, ty, 0); - IntRange { lo, hi: PosInfinity } - } else { - let size = Integer::from_uint_ty(&cx.tcx, uty).size(); - let max = size.truncate(u128::MAX); - make_range(0, max) - }; - Self::Integers { range_1: range, range_2: None } - } - ty::Slice(sub_ty) => { - Self::Slice { array_len: None, subtype_is_empty: cx.is_uninhabited(*sub_ty) } - } - ty::Array(sub_ty, len) => { - // We treat arrays of a constant but unknown length like slices. - Self::Slice { - array_len: len.try_eval_target_usize(cx.tcx, cx.param_env).map(|l| l as usize), - subtype_is_empty: cx.is_uninhabited(*sub_ty), - } - } - ty::Adt(def, args) if def.is_enum() => { - let is_declared_nonexhaustive = cx.is_foreign_non_exhaustive_enum(ty); - if def.variants().is_empty() && !is_declared_nonexhaustive { - Self::NoConstructors - } else { - let mut variants = - IndexVec::from_elem(VariantVisibility::Visible, def.variants()); - for (idx, v) in def.variants().iter_enumerated() { - let variant_def_id = def.variant(idx).def_id; - // Visibly uninhabited variants. - let is_inhabited = v - .inhabited_predicate(cx.tcx, *def) - .instantiate(cx.tcx, args) - .apply(cx.tcx, cx.param_env, cx.module); - // Variants that depend on a disabled unstable feature. - let is_unstable = matches!( - cx.tcx.eval_stability(variant_def_id, None, DUMMY_SP, None), - EvalResult::Deny { .. } - ); - // Foreign `#[doc(hidden)]` variants. - let is_doc_hidden = - cx.tcx.is_doc_hidden(variant_def_id) && !variant_def_id.is_local(); - let visibility = if !is_inhabited { - // FIXME: handle empty+hidden - VariantVisibility::Empty - } else if is_unstable || is_doc_hidden { - VariantVisibility::Hidden - } else { - VariantVisibility::Visible - }; - variants[idx] = visibility; - } - - Self::Variants { variants, non_exhaustive: is_declared_nonexhaustive } - } - } - ty::Adt(..) | ty::Tuple(..) | ty::Ref(..) => { - Self::Single { empty: cx.is_uninhabited(ty) } - } - ty::Never => Self::NoConstructors, - // This type is one for which we cannot list constructors, like `str` or `f64`. - // FIXME(Nadrieril): which of these are actually allowed? - ty::Float(_) - | ty::Str - | ty::Foreign(_) - | ty::RawPtr(_) - | ty::FnDef(_, _) - | ty::FnPtr(_) - | ty::Dynamic(_, _, _) - | ty::Closure(_, _) - | ty::Coroutine(_, _, _) - | ty::Alias(_, _) - | ty::Param(_) - | ty::Error(_) => Self::Unlistable, - ty::CoroutineWitness(_, _) | ty::Bound(_, _) | ty::Placeholder(_) | ty::Infer(_) => { - bug!("Encountered unexpected type in `ConstructorSet::for_ty`: {ty:?}") - } - } - } - - /// This analyzes a column of constructors to 1/ determine which constructors of the type (if - /// any) are missing; 2/ split constructors to handle non-trivial intersections e.g. on ranges - /// or slices. This can get subtle; see [`SplitConstructorSet`] for details of this operation - /// and its invariants. - #[instrument(level = "debug", skip(self, pcx, ctors), ret)] - pub(super) fn split<'a, 'tcx>( - &self, - pcx: &PatCtxt<'_, '_, 'tcx>, - ctors: impl Iterator> + Clone, - ) -> SplitConstructorSet<'tcx> - where - 'tcx: 'a, - { - let mut present: SmallVec<[_; 1]> = SmallVec::new(); - // Empty constructors found missing. - let mut missing_empty = Vec::new(); - // Nonempty constructors found missing. - let mut missing = Vec::new(); - // Constructors in `ctors`, except wildcards and opaques. - let mut seen = Vec::new(); - for ctor in ctors.cloned() { - match ctor { - Opaque(..) => present.push(ctor), - Wildcard => {} // discard wildcards - _ => seen.push(ctor), - } - } - - match self { - ConstructorSet::Single { empty } => { - if !seen.is_empty() { - present.push(Single); - } else if *empty { - missing_empty.push(Single); - } else { - missing.push(Single); - } - } - ConstructorSet::Variants { variants, non_exhaustive } => { - let seen_set: FxHashSet<_> = seen.iter().map(|c| c.as_variant().unwrap()).collect(); - let mut skipped_a_hidden_variant = false; - - for (idx, visibility) in variants.iter_enumerated() { - let ctor = Variant(idx); - if seen_set.contains(&idx) { - present.push(ctor); - } else { - // We only put visible variants directly into `missing`. - match visibility { - VariantVisibility::Visible => missing.push(ctor), - VariantVisibility::Hidden => skipped_a_hidden_variant = true, - VariantVisibility::Empty => missing_empty.push(ctor), - } - } - } - - if skipped_a_hidden_variant { - missing.push(Hidden); - } - if *non_exhaustive { - missing.push(NonExhaustive); - } - } - ConstructorSet::Bool => { - let mut seen_false = false; - let mut seen_true = false; - for b in seen.iter().map(|ctor| ctor.as_bool().unwrap()) { - if b { - seen_true = true; - } else { - seen_false = true; - } - } - if seen_false { - present.push(Bool(false)); - } else { - missing.push(Bool(false)); - } - if seen_true { - present.push(Bool(true)); - } else { - missing.push(Bool(true)); - } - } - ConstructorSet::Integers { range_1, range_2 } => { - let seen_ranges: Vec<_> = - seen.iter().map(|ctor| ctor.as_int_range().unwrap().clone()).collect(); - for (seen, splitted_range) in range_1.split(seen_ranges.iter().cloned()) { - match seen { - Presence::Unseen => missing.push(IntRange(splitted_range)), - Presence::Seen => present.push(IntRange(splitted_range)), - } - } - if let Some(range_2) = range_2 { - for (seen, splitted_range) in range_2.split(seen_ranges.into_iter()) { - match seen { - Presence::Unseen => missing.push(IntRange(splitted_range)), - Presence::Seen => present.push(IntRange(splitted_range)), - } - } - } - } - ConstructorSet::Slice { array_len, subtype_is_empty } => { - let seen_slices = seen.iter().map(|c| c.as_slice().unwrap()); - let base_slice = Slice::new(*array_len, VarLen(0, 0)); - for (seen, splitted_slice) in base_slice.split(seen_slices) { - let ctor = Slice(splitted_slice); - match seen { - Presence::Seen => present.push(ctor), - Presence::Unseen => { - if *subtype_is_empty && splitted_slice.arity() != 0 { - // We have subpatterns of an empty type, so the constructor is - // empty. - missing_empty.push(ctor); - } else { - missing.push(ctor); - } - } - } - } - } - ConstructorSet::Unlistable => { - // Since we can't list constructors, we take the ones in the column. This might list - // some constructors several times but there's not much we can do. - present.extend(seen); - missing.push(NonExhaustive); - } - ConstructorSet::NoConstructors => { - // In a `MaybeInvalid` place even an empty pattern may be reachable. We therefore - // add a dummy empty constructor here, which will be ignored if the place is - // `ValidOnly`. - missing_empty.push(NonExhaustive); - } - } - - // We have now grouped all the constructors into 3 buckets: present, missing, missing_empty. - // In the absence of the `exhaustive_patterns` feature however, we don't count nested empty - // types as empty. Only non-nested `!` or `enum Foo {}` are considered empty. - if !pcx.cx.tcx.features().exhaustive_patterns - && !(pcx.is_top_level && matches!(self, Self::NoConstructors)) - { - // Treat all missing constructors as nonempty. - missing.extend(missing_empty.drain(..)); - } - - SplitConstructorSet { present, missing, missing_empty } - } -} - -/// A value can be decomposed into a constructor applied to some fields. This struct represents -/// those fields, generalized to allow patterns in each field. See also `Constructor`. -/// -/// This is constructed for a constructor using [`Fields::wildcards()`]. The idea is that -/// [`Fields::wildcards()`] constructs a list of fields where all entries are wildcards, and then -/// given a pattern we fill some of the fields with its subpatterns. -/// In the following example `Fields::wildcards` returns `[_, _, _, _]`. Then in -/// `extract_pattern_arguments` we fill some of the entries, and the result is -/// `[Some(0), _, _, _]`. -/// ```compile_fail,E0004 -/// # fn foo() -> [Option; 4] { [None; 4] } -/// let x: [Option; 4] = foo(); -/// match x { -/// [Some(0), ..] => {} -/// } -/// ``` -/// -/// Note that the number of fields of a constructor may not match the fields declared in the -/// original struct/variant. This happens if a private or `non_exhaustive` field is uninhabited, -/// because the code mustn't observe that it is uninhabited. In that case that field is not -/// included in `fields`. For that reason, when you have a `FieldIdx` you must use -/// `index_with_declared_idx`. -#[derive(Debug, Clone, Copy)] -pub(super) struct Fields<'p, 'tcx> { - fields: &'p [DeconstructedPat<'p, 'tcx>], -} - -impl<'p, 'tcx> Fields<'p, 'tcx> { - fn empty() -> Self { - Fields { fields: &[] } - } - - fn singleton(cx: &MatchCheckCtxt<'p, 'tcx>, field: DeconstructedPat<'p, 'tcx>) -> Self { - let field: &_ = cx.pattern_arena.alloc(field); - Fields { fields: std::slice::from_ref(field) } - } - - pub(super) fn from_iter( - cx: &MatchCheckCtxt<'p, 'tcx>, - fields: impl IntoIterator>, - ) -> Self { - let fields: &[_] = cx.pattern_arena.alloc_from_iter(fields); - Fields { fields } - } - - fn wildcards_from_tys( - cx: &MatchCheckCtxt<'p, 'tcx>, - tys: impl IntoIterator>, - ) -> Self { - Fields::from_iter(cx, tys.into_iter().map(|ty| DeconstructedPat::wildcard(ty, DUMMY_SP))) - } - - // In the cases of either a `#[non_exhaustive]` field list or a non-public field, we hide - // uninhabited fields in order not to reveal the uninhabitedness of the whole variant. - // This lists the fields we keep along with their types. - fn list_variant_nonhidden_fields<'a>( - cx: &'a MatchCheckCtxt<'p, 'tcx>, - ty: Ty<'tcx>, - variant: &'a VariantDef, - ) -> impl Iterator)> + Captures<'a> + Captures<'p> { - let ty::Adt(adt, args) = ty.kind() else { bug!() }; - // Whether we must not match the fields of this variant exhaustively. - let is_non_exhaustive = variant.is_field_list_non_exhaustive() && !adt.did().is_local(); - - variant.fields.iter().enumerate().filter_map(move |(i, field)| { - let ty = field.ty(cx.tcx, args); - // `field.ty()` doesn't normalize after substituting. - let ty = cx.tcx.normalize_erasing_regions(cx.param_env, ty); - let is_visible = adt.is_enum() || field.vis.is_accessible_from(cx.module, cx.tcx); - let is_uninhabited = cx.tcx.features().exhaustive_patterns && cx.is_uninhabited(ty); - - if is_uninhabited && (!is_visible || is_non_exhaustive) { - None - } else { - Some((FieldIdx::new(i), ty)) - } - }) - } - - /// Creates a new list of wildcard fields for a given constructor. The result must have a - /// length of `constructor.arity()`. - #[instrument(level = "trace")] - pub(super) fn wildcards(pcx: &PatCtxt<'_, 'p, 'tcx>, constructor: &Constructor<'tcx>) -> Self { - let ret = match constructor { - Single | Variant(_) => match pcx.ty.kind() { - ty::Tuple(fs) => Fields::wildcards_from_tys(pcx.cx, fs.iter()), - ty::Ref(_, rty, _) => Fields::wildcards_from_tys(pcx.cx, once(*rty)), - ty::Adt(adt, args) => { - if adt.is_box() { - // The only legal patterns of type `Box` (outside `std`) are `_` and box - // patterns. If we're here we can assume this is a box pattern. - Fields::wildcards_from_tys(pcx.cx, once(args.type_at(0))) - } else { - let variant = &adt.variant(constructor.variant_index_for_adt(*adt)); - let tys = Fields::list_variant_nonhidden_fields(pcx.cx, pcx.ty, variant) - .map(|(_, ty)| ty); - Fields::wildcards_from_tys(pcx.cx, tys) - } - } - _ => bug!("Unexpected type for `Single` constructor: {:?}", pcx), - }, - Slice(slice) => match *pcx.ty.kind() { - ty::Slice(ty) | ty::Array(ty, _) => { - let arity = slice.arity(); - Fields::wildcards_from_tys(pcx.cx, (0..arity).map(|_| ty)) - } - _ => bug!("bad slice pattern {:?} {:?}", constructor, pcx), - }, - Bool(..) - | IntRange(..) - | F32Range(..) - | F64Range(..) - | Str(..) - | Opaque(..) - | NonExhaustive - | Hidden - | Missing { .. } - | Wildcard => Fields::empty(), - Or => { - bug!("called `Fields::wildcards` on an `Or` ctor") - } - }; - debug!(?ret); - ret - } - - /// Returns the list of patterns. - pub(super) fn iter_patterns<'a>( - &'a self, - ) -> impl Iterator> + Captures<'a> { - self.fields.iter() - } -} - -/// Values and patterns can be represented as a constructor applied to some fields. This represents -/// a pattern in this form. -/// This also uses interior mutability to keep track of whether the pattern has been found reachable -/// during analysis. For this reason they cannot be cloned. -/// A `DeconstructedPat` will almost always come from user input; the only exception are some -/// `Wildcard`s introduced during specialization. -pub(crate) struct DeconstructedPat<'p, 'tcx> { - ctor: Constructor<'tcx>, - fields: Fields<'p, 'tcx>, - ty: Ty<'tcx>, - span: Span, - /// Whether removing this arm would change the behavior of the match expression. - useful: Cell, -} - -impl<'p, 'tcx> DeconstructedPat<'p, 'tcx> { - pub(super) fn wildcard(ty: Ty<'tcx>, span: Span) -> Self { - Self::new(Wildcard, Fields::empty(), ty, span) - } - - pub(super) fn new( - ctor: Constructor<'tcx>, - fields: Fields<'p, 'tcx>, - ty: Ty<'tcx>, - span: Span, - ) -> Self { - DeconstructedPat { ctor, fields, ty, span, useful: Cell::new(false) } - } - - /// Note: the input patterns must have been lowered through - /// `super::check_match::MatchVisitor::lower_pattern`. - pub(crate) fn from_pat(cx: &MatchCheckCtxt<'p, 'tcx>, pat: &Pat<'tcx>) -> Self { - let mkpat = |pat| DeconstructedPat::from_pat(cx, pat); - let ctor; - let fields; - match &pat.kind { - PatKind::AscribeUserType { subpattern, .. } - | PatKind::InlineConstant { subpattern, .. } => return mkpat(subpattern), - PatKind::Binding { subpattern: Some(subpat), .. } => return mkpat(subpat), - PatKind::Binding { subpattern: None, .. } | PatKind::Wild => { - ctor = Wildcard; - fields = Fields::empty(); - } - PatKind::Deref { subpattern } => { - ctor = Single; - fields = Fields::singleton(cx, mkpat(subpattern)); - } - PatKind::Leaf { subpatterns } | PatKind::Variant { subpatterns, .. } => { - match pat.ty.kind() { - ty::Tuple(fs) => { - ctor = Single; - let mut wilds: SmallVec<[_; 2]> = - fs.iter().map(|ty| DeconstructedPat::wildcard(ty, pat.span)).collect(); - for pat in subpatterns { - wilds[pat.field.index()] = mkpat(&pat.pattern); - } - fields = Fields::from_iter(cx, wilds); - } - ty::Adt(adt, args) if adt.is_box() => { - // The only legal patterns of type `Box` (outside `std`) are `_` and box - // patterns. If we're here we can assume this is a box pattern. - // FIXME(Nadrieril): A `Box` can in theory be matched either with `Box(_, - // _)` or a box pattern. As a hack to avoid an ICE with the former, we - // ignore other fields than the first one. This will trigger an error later - // anyway. - // See https://github.com/rust-lang/rust/issues/82772 , - // explanation: https://github.com/rust-lang/rust/pull/82789#issuecomment-796921977 - // The problem is that we can't know from the type whether we'll match - // normally or through box-patterns. We'll have to figure out a proper - // solution when we introduce generalized deref patterns. Also need to - // prevent mixing of those two options. - let pattern = subpatterns.into_iter().find(|pat| pat.field.index() == 0); - let pat = if let Some(pat) = pattern { - mkpat(&pat.pattern) - } else { - DeconstructedPat::wildcard(args.type_at(0), pat.span) - }; - ctor = Single; - fields = Fields::singleton(cx, pat); - } - ty::Adt(adt, _) => { - ctor = match pat.kind { - PatKind::Leaf { .. } => Single, - PatKind::Variant { variant_index, .. } => Variant(variant_index), - _ => bug!(), - }; - let variant = &adt.variant(ctor.variant_index_for_adt(*adt)); - // For each field in the variant, we store the relevant index into `self.fields` if any. - let mut field_id_to_id: Vec> = - (0..variant.fields.len()).map(|_| None).collect(); - let tys = Fields::list_variant_nonhidden_fields(cx, pat.ty, variant) - .enumerate() - .map(|(i, (field, ty))| { - field_id_to_id[field.index()] = Some(i); - ty - }); - let mut wilds: SmallVec<[_; 2]> = - tys.map(|ty| DeconstructedPat::wildcard(ty, pat.span)).collect(); - for pat in subpatterns { - if let Some(i) = field_id_to_id[pat.field.index()] { - wilds[i] = mkpat(&pat.pattern); - } - } - fields = Fields::from_iter(cx, wilds); - } - _ => bug!("pattern has unexpected type: pat: {:?}, ty: {:?}", pat, pat.ty), - } - } - PatKind::Constant { value } => { - match pat.ty.kind() { - ty::Bool => { - ctor = match value.try_eval_bool(cx.tcx, cx.param_env) { - Some(b) => Bool(b), - None => Opaque(OpaqueId::new()), - }; - fields = Fields::empty(); - } - ty::Char | ty::Int(_) | ty::Uint(_) => { - ctor = match value.try_eval_bits(cx.tcx, cx.param_env) { - Some(bits) => IntRange(IntRange::from_bits(cx.tcx, pat.ty, bits)), - None => Opaque(OpaqueId::new()), - }; - fields = Fields::empty(); - } - ty::Float(ty::FloatTy::F32) => { - ctor = match value.try_eval_bits(cx.tcx, cx.param_env) { - Some(bits) => { - use rustc_apfloat::Float; - let value = rustc_apfloat::ieee::Single::from_bits(bits); - F32Range(value, value, RangeEnd::Included) - } - None => Opaque(OpaqueId::new()), - }; - fields = Fields::empty(); - } - ty::Float(ty::FloatTy::F64) => { - ctor = match value.try_eval_bits(cx.tcx, cx.param_env) { - Some(bits) => { - use rustc_apfloat::Float; - let value = rustc_apfloat::ieee::Double::from_bits(bits); - F64Range(value, value, RangeEnd::Included) - } - None => Opaque(OpaqueId::new()), - }; - fields = Fields::empty(); - } - ty::Ref(_, t, _) if t.is_str() => { - // We want a `&str` constant to behave like a `Deref` pattern, to be compatible - // with other `Deref` patterns. This could have been done in `const_to_pat`, - // but that causes issues with the rest of the matching code. - // So here, the constructor for a `"foo"` pattern is `&` (represented by - // `Single`), and has one field. That field has constructor `Str(value)` and no - // fields. - // Note: `t` is `str`, not `&str`. - let subpattern = - DeconstructedPat::new(Str(*value), Fields::empty(), *t, pat.span); - ctor = Single; - fields = Fields::singleton(cx, subpattern) - } - // All constants that can be structurally matched have already been expanded - // into the corresponding `Pat`s by `const_to_pat`. Constants that remain are - // opaque. - _ => { - ctor = Opaque(OpaqueId::new()); - fields = Fields::empty(); - } - } - } - PatKind::Range(box PatRange { lo, hi, end, .. }) => { - let ty = pat.ty; - ctor = match ty.kind() { - ty::Char | ty::Int(_) | ty::Uint(_) => { - let lo = - MaybeInfiniteInt::from_pat_range_bdy(*lo, ty, cx.tcx, cx.param_env); - let hi = - MaybeInfiniteInt::from_pat_range_bdy(*hi, ty, cx.tcx, cx.param_env); - IntRange(IntRange::from_range(lo, hi, *end)) - } - ty::Float(fty) => { - use rustc_apfloat::Float; - let lo = lo.as_finite().map(|c| c.eval_bits(cx.tcx, cx.param_env)); - let hi = hi.as_finite().map(|c| c.eval_bits(cx.tcx, cx.param_env)); - match fty { - ty::FloatTy::F32 => { - use rustc_apfloat::ieee::Single; - let lo = lo.map(Single::from_bits).unwrap_or(-Single::INFINITY); - let hi = hi.map(Single::from_bits).unwrap_or(Single::INFINITY); - F32Range(lo, hi, *end) - } - ty::FloatTy::F64 => { - use rustc_apfloat::ieee::Double; - let lo = lo.map(Double::from_bits).unwrap_or(-Double::INFINITY); - let hi = hi.map(Double::from_bits).unwrap_or(Double::INFINITY); - F64Range(lo, hi, *end) - } - } - } - _ => bug!("invalid type for range pattern: {}", ty), - }; - fields = Fields::empty(); - } - PatKind::Array { prefix, slice, suffix } | PatKind::Slice { prefix, slice, suffix } => { - let array_len = match pat.ty.kind() { - ty::Array(_, length) => { - Some(length.eval_target_usize(cx.tcx, cx.param_env) as usize) - } - ty::Slice(_) => None, - _ => span_bug!(pat.span, "bad ty {:?} for slice pattern", pat.ty), - }; - let kind = if slice.is_some() { - VarLen(prefix.len(), suffix.len()) - } else { - FixedLen(prefix.len() + suffix.len()) - }; - ctor = Slice(Slice::new(array_len, kind)); - fields = - Fields::from_iter(cx, prefix.iter().chain(suffix.iter()).map(|p| mkpat(&*p))); - } - PatKind::Or { .. } => { - ctor = Or; - let pats = expand_or_pat(pat); - fields = Fields::from_iter(cx, pats.into_iter().map(mkpat)); - } - PatKind::Never => { - // FIXME(never_patterns): handle `!` in exhaustiveness. This is a sane default - // in the meantime. - ctor = Wildcard; - fields = Fields::empty(); - } - PatKind::Error(_) => { - ctor = Opaque(OpaqueId::new()); - fields = Fields::empty(); - } - } - DeconstructedPat::new(ctor, fields, pat.ty, pat.span) - } - - pub(super) fn is_or_pat(&self) -> bool { - matches!(self.ctor, Or) - } - /// Expand this (possibly-nested) or-pattern into its alternatives. - pub(super) fn flatten_or_pat(&'p self) -> SmallVec<[&'p Self; 1]> { - if self.is_or_pat() { - self.iter_fields().flat_map(|p| p.flatten_or_pat()).collect() - } else { - smallvec![self] - } - } - - pub(super) fn ctor(&self) -> &Constructor<'tcx> { - &self.ctor - } - pub(super) fn ty(&self) -> Ty<'tcx> { - self.ty - } - pub(super) fn span(&self) -> Span { - self.span - } - - pub(super) fn iter_fields<'a>( - &'a self, - ) -> impl Iterator> + Captures<'a> { - self.fields.iter_patterns() - } - - /// Specialize this pattern with a constructor. - /// `other_ctor` can be different from `self.ctor`, but must be covered by it. - pub(super) fn specialize<'a>( - &'a self, - pcx: &PatCtxt<'_, 'p, 'tcx>, - other_ctor: &Constructor<'tcx>, - ) -> SmallVec<[&'p DeconstructedPat<'p, 'tcx>; 2]> { - match (&self.ctor, other_ctor) { - (Wildcard, _) => { - // We return a wildcard for each field of `other_ctor`. - Fields::wildcards(pcx, other_ctor).iter_patterns().collect() - } - (Slice(self_slice), Slice(other_slice)) - if self_slice.arity() != other_slice.arity() => - { - // The only tricky case: two slices of different arity. Since `self_slice` covers - // `other_slice`, `self_slice` must be `VarLen`, i.e. of the form - // `[prefix, .., suffix]`. Moreover `other_slice` is guaranteed to have a larger - // arity. So we fill the middle part with enough wildcards to reach the length of - // the new, larger slice. - match self_slice.kind { - FixedLen(_) => bug!("{:?} doesn't cover {:?}", self_slice, other_slice), - VarLen(prefix, suffix) => { - let (ty::Slice(inner_ty) | ty::Array(inner_ty, _)) = *self.ty.kind() else { - bug!("bad slice pattern {:?} {:?}", self.ctor, self.ty); - }; - let prefix = &self.fields.fields[..prefix]; - let suffix = &self.fields.fields[self_slice.arity() - suffix..]; - let wildcard: &_ = pcx - .cx - .pattern_arena - .alloc(DeconstructedPat::wildcard(inner_ty, DUMMY_SP)); - let extra_wildcards = other_slice.arity() - self_slice.arity(); - let extra_wildcards = (0..extra_wildcards).map(|_| wildcard); - prefix.iter().chain(extra_wildcards).chain(suffix).collect() - } - } - } - _ => self.fields.iter_patterns().collect(), - } - } - - /// We keep track for each pattern if it was ever useful during the analysis. This is used - /// with `redundant_spans` to report redundant subpatterns arising from or patterns. - pub(super) fn set_useful(&self) { - self.useful.set(true) - } - pub(super) fn is_useful(&self) -> bool { - if self.useful.get() { - true - } else if self.is_or_pat() && self.iter_fields().any(|f| f.is_useful()) { - // We always expand or patterns in the matrix, so we will never see the actual - // or-pattern (the one with constructor `Or`) in the column. As such, it will not be - // marked as useful itself, only its children will. We recover this information here. - self.set_useful(); - true - } else { - false - } - } - - /// Report the spans of subpatterns that were not useful, if any. - pub(super) fn redundant_spans(&self) -> Vec { - let mut spans = Vec::new(); - self.collect_redundant_spans(&mut spans); - spans - } - fn collect_redundant_spans(&self, spans: &mut Vec) { - // We don't look at subpatterns if we already reported the whole pattern as redundant. - if !self.is_useful() { - spans.push(self.span); - } else { - for p in self.iter_fields() { - p.collect_redundant_spans(spans); - } - } - } -} - -/// This is mostly copied from the `Pat` impl. This is best effort and not good enough for a -/// `Display` impl. -impl<'p, 'tcx> fmt::Debug for DeconstructedPat<'p, 'tcx> { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - // Printing lists is a chore. - let mut first = true; - let mut start_or_continue = |s| { - if first { - first = false; - "" - } else { - s - } - }; - let mut start_or_comma = || start_or_continue(", "); - - match &self.ctor { - Single | Variant(_) => match self.ty.kind() { - ty::Adt(def, _) if def.is_box() => { - // Without `box_patterns`, the only legal pattern of type `Box` is `_` (outside - // of `std`). So this branch is only reachable when the feature is enabled and - // the pattern is a box pattern. - let subpattern = self.iter_fields().next().unwrap(); - write!(f, "box {subpattern:?}") - } - ty::Adt(..) | ty::Tuple(..) => { - let variant = match self.ty.kind() { - ty::Adt(adt, _) => Some(adt.variant(self.ctor.variant_index_for_adt(*adt))), - ty::Tuple(_) => None, - _ => unreachable!(), - }; - - if let Some(variant) = variant { - write!(f, "{}", variant.name)?; - } - - // Without `cx`, we can't know which field corresponds to which, so we can't - // get the names of the fields. Instead we just display everything as a tuple - // struct, which should be good enough. - write!(f, "(")?; - for p in self.iter_fields() { - write!(f, "{}", start_or_comma())?; - write!(f, "{p:?}")?; - } - write!(f, ")") - } - // Note: given the expansion of `&str` patterns done in `expand_pattern`, we should - // be careful to detect strings here. However a string literal pattern will never - // be reported as a non-exhaustiveness witness, so we can ignore this issue. - ty::Ref(_, _, mutbl) => { - let subpattern = self.iter_fields().next().unwrap(); - write!(f, "&{}{:?}", mutbl.prefix_str(), subpattern) - } - _ => write!(f, "_"), - }, - Slice(slice) => { - let mut subpatterns = self.fields.iter_patterns(); - write!(f, "[")?; - match slice.kind { - FixedLen(_) => { - for p in subpatterns { - write!(f, "{}{:?}", start_or_comma(), p)?; - } - } - VarLen(prefix_len, _) => { - for p in subpatterns.by_ref().take(prefix_len) { - write!(f, "{}{:?}", start_or_comma(), p)?; - } - write!(f, "{}", start_or_comma())?; - write!(f, "..")?; - for p in subpatterns { - write!(f, "{}{:?}", start_or_comma(), p)?; - } - } - } - write!(f, "]") - } - Bool(b) => write!(f, "{b}"), - // Best-effort, will render signed ranges incorrectly - IntRange(range) => write!(f, "{range:?}"), - F32Range(lo, hi, end) => write!(f, "{lo}{end}{hi}"), - F64Range(lo, hi, end) => write!(f, "{lo}{end}{hi}"), - Str(value) => write!(f, "{value}"), - Opaque(..) => write!(f, ""), - Or => { - for pat in self.iter_fields() { - write!(f, "{}{:?}", start_or_continue(" | "), pat)?; - } - Ok(()) - } - Wildcard | Missing { .. } | NonExhaustive | Hidden => write!(f, "_ : {:?}", self.ty), - } - } -} - -/// Same idea as `DeconstructedPat`, except this is a fictitious pattern built up for diagnostics -/// purposes. As such they don't use interning and can be cloned. -#[derive(Debug, Clone)] -pub(crate) struct WitnessPat<'tcx> { - ctor: Constructor<'tcx>, - pub(crate) fields: Vec>, - ty: Ty<'tcx>, -} - -impl<'tcx> WitnessPat<'tcx> { - pub(super) fn new(ctor: Constructor<'tcx>, fields: Vec, ty: Ty<'tcx>) -> Self { - Self { ctor, fields, ty } - } - pub(super) fn wildcard(ty: Ty<'tcx>) -> Self { - Self::new(Wildcard, Vec::new(), ty) - } - - /// Construct a pattern that matches everything that starts with this constructor. - /// For example, if `ctor` is a `Constructor::Variant` for `Option::Some`, we get the pattern - /// `Some(_)`. - pub(super) fn wild_from_ctor(pcx: &PatCtxt<'_, '_, 'tcx>, ctor: Constructor<'tcx>) -> Self { - // Reuse `Fields::wildcards` to get the types. - let fields = Fields::wildcards(pcx, &ctor) - .iter_patterns() - .map(|deco_pat| Self::wildcard(deco_pat.ty())) - .collect(); - Self::new(ctor, fields, pcx.ty) - } - - pub(super) fn ctor(&self) -> &Constructor<'tcx> { - &self.ctor - } - pub(super) fn ty(&self) -> Ty<'tcx> { - self.ty - } - - /// Convert back to a `thir::Pat` for diagnostic purposes. This panics for patterns that don't - /// appear in diagnostics, like float ranges. - pub(crate) fn to_diagnostic_pat(&self, cx: &MatchCheckCtxt<'_, 'tcx>) -> Pat<'tcx> { - let is_wildcard = |pat: &Pat<'_>| matches!(pat.kind, PatKind::Wild); - let mut subpatterns = self.iter_fields().map(|p| Box::new(p.to_diagnostic_pat(cx))); - let kind = match &self.ctor { - Bool(b) => PatKind::Constant { value: mir::Const::from_bool(cx.tcx, *b) }, - IntRange(range) => return range.to_diagnostic_pat(self.ty, cx.tcx), - Single | Variant(_) => match self.ty.kind() { - ty::Tuple(..) => PatKind::Leaf { - subpatterns: subpatterns - .enumerate() - .map(|(i, pattern)| FieldPat { field: FieldIdx::new(i), pattern }) - .collect(), - }, - ty::Adt(adt_def, _) if adt_def.is_box() => { - // Without `box_patterns`, the only legal pattern of type `Box` is `_` (outside - // of `std`). So this branch is only reachable when the feature is enabled and - // the pattern is a box pattern. - PatKind::Deref { subpattern: subpatterns.next().unwrap() } - } - ty::Adt(adt_def, args) => { - let variant_index = self.ctor.variant_index_for_adt(*adt_def); - let variant = &adt_def.variant(variant_index); - let subpatterns = Fields::list_variant_nonhidden_fields(cx, self.ty, variant) - .zip(subpatterns) - .map(|((field, _ty), pattern)| FieldPat { field, pattern }) - .collect(); - - if adt_def.is_enum() { - PatKind::Variant { adt_def: *adt_def, args, variant_index, subpatterns } - } else { - PatKind::Leaf { subpatterns } - } - } - // Note: given the expansion of `&str` patterns done in `expand_pattern`, we should - // be careful to reconstruct the correct constant pattern here. However a string - // literal pattern will never be reported as a non-exhaustiveness witness, so we - // ignore this issue. - ty::Ref(..) => PatKind::Deref { subpattern: subpatterns.next().unwrap() }, - _ => bug!("unexpected ctor for type {:?} {:?}", self.ctor, self.ty), - }, - Slice(slice) => { - match slice.kind { - FixedLen(_) => PatKind::Slice { - prefix: subpatterns.collect(), - slice: None, - suffix: Box::new([]), - }, - VarLen(prefix, _) => { - let mut subpatterns = subpatterns.peekable(); - let mut prefix: Vec<_> = subpatterns.by_ref().take(prefix).collect(); - if slice.array_len.is_some() { - // Improves diagnostics a bit: if the type is a known-size array, instead - // of reporting `[x, _, .., _, y]`, we prefer to report `[x, .., y]`. - // This is incorrect if the size is not known, since `[_, ..]` captures - // arrays of lengths `>= 1` whereas `[..]` captures any length. - while !prefix.is_empty() && is_wildcard(prefix.last().unwrap()) { - prefix.pop(); - } - while subpatterns.peek().is_some() - && is_wildcard(subpatterns.peek().unwrap()) - { - subpatterns.next(); - } - } - let suffix: Box<[_]> = subpatterns.collect(); - let wild = Pat::wildcard_from_ty(self.ty); - PatKind::Slice { - prefix: prefix.into_boxed_slice(), - slice: Some(Box::new(wild)), - suffix, - } - } - } - } - &Str(value) => PatKind::Constant { value }, - Wildcard | NonExhaustive | Hidden => PatKind::Wild, - Missing { .. } => bug!( - "trying to convert a `Missing` constructor into a `Pat`; this is probably a bug, - `Missing` should have been processed in `apply_constructors`" - ), - F32Range(..) | F64Range(..) | Opaque(..) | Or => { - bug!("can't convert to pattern: {:?}", self) - } - }; - - Pat { ty: self.ty, span: DUMMY_SP, kind } - } - - pub(super) fn iter_fields<'a>(&'a self) -> impl Iterator> { - self.fields.iter() - } -} diff --git a/compiler/rustc_mir_build/src/thir/pattern/mod.rs b/compiler/rustc_mir_build/src/thir/pattern/mod.rs index eb548ad29ebcd..af0dab4ebc7f4 100644 --- a/compiler/rustc_mir_build/src/thir/pattern/mod.rs +++ b/compiler/rustc_mir_build/src/thir/pattern/mod.rs @@ -2,11 +2,8 @@ mod check_match; mod const_to_pat; -pub(crate) mod deconstruct_pat; -mod usefulness; pub(crate) use self::check_match::check_match; -pub(crate) use self::usefulness::MatchCheckCtxt; use crate::errors::*; use crate::thir::util::UserAnnotatedTyHelpers; diff --git a/compiler/rustc_pattern_analysis/Cargo.toml b/compiler/rustc_pattern_analysis/Cargo.toml new file mode 100644 index 0000000000000..0639944a45c9b --- /dev/null +++ b/compiler/rustc_pattern_analysis/Cargo.toml @@ -0,0 +1,22 @@ +[package] +name = "rustc_pattern_analysis" +version = "0.0.0" +edition = "2021" + +[dependencies] +# tidy-alphabetical-start +rustc_apfloat = "0.2.0" +rustc_arena = { path = "../rustc_arena" } +rustc_data_structures = { path = "../rustc_data_structures" } +rustc_errors = { path = "../rustc_errors" } +rustc_fluent_macro = { path = "../rustc_fluent_macro" } +rustc_hir = { path = "../rustc_hir" } +rustc_index = { path = "../rustc_index" } +rustc_macros = { path = "../rustc_macros" } +rustc_middle = { path = "../rustc_middle" } +rustc_session = { path = "../rustc_session" } +rustc_span = { path = "../rustc_span" } +rustc_target = { path = "../rustc_target" } +smallvec = { version = "1.8.1", features = ["union", "may_dangle"] } +tracing = "0.1" +# tidy-alphabetical-end diff --git a/compiler/rustc_pattern_analysis/messages.ftl b/compiler/rustc_pattern_analysis/messages.ftl new file mode 100644 index 0000000000000..827928f97d7cb --- /dev/null +++ b/compiler/rustc_pattern_analysis/messages.ftl @@ -0,0 +1,19 @@ +pattern_analysis_non_exhaustive_omitted_pattern = some variants are not matched explicitly + .help = ensure that all variants are matched explicitly by adding the suggested match arms + .note = the matched value is of type `{$scrut_ty}` and the `non_exhaustive_omitted_patterns` attribute was found + +pattern_analysis_non_exhaustive_omitted_pattern_lint_on_arm = the lint level must be set on the whole match + .help = it no longer has any effect to set the lint level on an individual match arm + .label = remove this attribute + .suggestion = set the lint level on the whole match + +pattern_analysis_overlapping_range_endpoints = multiple patterns overlap on their endpoints + .label = ... with this range + .note = you likely meant to write mutually exclusive ranges + +pattern_analysis_uncovered = {$count -> + [1] pattern `{$witness_1}` + [2] patterns `{$witness_1}` and `{$witness_2}` + [3] patterns `{$witness_1}`, `{$witness_2}` and `{$witness_3}` + *[other] patterns `{$witness_1}`, `{$witness_2}`, `{$witness_3}` and {$remainder} more + } not covered diff --git a/compiler/rustc_pattern_analysis/src/constructor.rs b/compiler/rustc_pattern_analysis/src/constructor.rs new file mode 100644 index 0000000000000..716ccdd4dcd00 --- /dev/null +++ b/compiler/rustc_pattern_analysis/src/constructor.rs @@ -0,0 +1,987 @@ +//! As explained in [`crate::usefulness`], values and patterns are made from constructors applied to +//! fields. This file defines a `Constructor` enum and various operations to manipulate them. +//! +//! There are two important bits of core logic in this file: constructor inclusion and constructor +//! splitting. Constructor inclusion, i.e. whether a constructor is included in/covered by another, +//! is straightforward and defined in [`Constructor::is_covered_by`]. +//! +//! Constructor splitting is mentioned in [`crate::usefulness`] but not detailed. We describe it +//! precisely here. +//! +//! +//! +//! # Constructor grouping and splitting +//! +//! As explained in the corresponding section in [`crate::usefulness`], to make usefulness tractable +//! we need to group together constructors that have the same effect when they are used to +//! specialize the matrix. +//! +//! Example: +//! ```compile_fail,E0004 +//! match (0, false) { +//! (0 ..=100, true) => {} +//! (50..=150, false) => {} +//! (0 ..=200, _) => {} +//! } +//! ``` +//! +//! In this example we can restrict specialization to 5 cases: `0..50`, `50..=100`, `101..=150`, +//! `151..=200` and `200..`. +//! +//! In [`crate::usefulness`], we had said that `specialize` only takes value-only constructors. We +//! now relax this restriction: we allow `specialize` to take constructors like `0..50` as long as +//! we're careful to only do that with constructors that make sense. For example, `specialize(0..50, +//! (0..=100, true))` is sensible, but `specialize(50..=200, (0..=100, true))` is not. +//! +//! Constructor splitting looks at the constructors in the first column of the matrix and constructs +//! such a sensible set of constructors. Formally, we want to find a smallest disjoint set of +//! constructors: +//! - Whose union covers the whole type, and +//! - That have no non-trivial intersection with any of the constructors in the column (i.e. they're +//! each either disjoint with or covered by any given column constructor). +//! +//! We compute this in two steps: first [`crate::cx::MatchCheckCtxt::ctors_for_ty`] determines the +//! set of all possible constructors for the type. Then [`ConstructorSet::split`] looks at the +//! column of constructors and splits the set into groups accordingly. The precise invariants of +//! [`ConstructorSet::split`] is described in [`SplitConstructorSet`]. +//! +//! Constructor splitting has two interesting special cases: integer range splitting (see +//! [`IntRange::split`]) and slice splitting (see [`Slice::split`]). +//! +//! +//! +//! # The `Missing` constructor +//! +//! We detail a special case of constructor splitting that is a bit subtle. Take the following: +//! +//! ``` +//! enum Direction { North, South, East, West } +//! # let wind = (Direction::North, 0u8); +//! match wind { +//! (Direction::North, 50..) => {} +//! (_, _) => {} +//! } +//! ``` +//! +//! Here we expect constructor splitting to output two cases: `North`, and "everything else". This +//! "everything else" is represented by [`Constructor::Missing`]. Unlike other constructors, it's a +//! bit contextual: to know the exact list of constructors it represents we have to look at the +//! column. In practice however we don't need to, because by construction it only matches rows that +//! have wildcards. This is how this constructor is special: the only constructor that covers it is +//! `Wildcard`. +//! +//! The only place where we care about which constructors `Missing` represents is in diagnostics +//! (see `crate::usefulness::WitnessMatrix::apply_constructor`). +//! +//! We choose whether to specialize with `Missing` in +//! `crate::usefulness::compute_exhaustiveness_and_usefulness`. +//! +//! +//! +//! ## Empty types, empty constructors, and the `exhaustive_patterns` feature +//! +//! An empty type is a type that has no valid value, like `!`, `enum Void {}`, or `Result`. +//! They require careful handling. +//! +//! First, for soundness reasons related to the possible existence of invalid values, by default we +//! don't treat empty types as empty. We force them to be matched with wildcards. Except if the +//! `exhaustive_patterns` feature is turned on, in which case we do treat them as empty. And also +//! except if the type has no constructors (like `enum Void {}` but not like `Result`), we +//! specifically allow `match void {}` to be exhaustive. There are additionally considerations of +//! place validity that are handled in `crate::usefulness`. Yes this is a bit tricky. +//! +//! The second thing is that regardless of the above, it is always allowed to use all the +//! constructors of a type. For example, all the following is ok: +//! +//! ```rust,ignore(example) +//! # #![feature(never_type)] +//! # #![feature(exhaustive_patterns)] +//! fn foo(x: Option) { +//! match x { +//! None => {} +//! Some(_) => {} +//! } +//! } +//! fn bar(x: &[!]) -> u32 { +//! match x { +//! [] => 1, +//! [_] => 2, +//! [_, _] => 3, +//! } +//! } +//! ``` +//! +//! Moreover, take the following: +//! +//! ```rust +//! # #![feature(never_type)] +//! # #![feature(exhaustive_patterns)] +//! # let x = None::; +//! match x { +//! None => {} +//! } +//! ``` +//! +//! On a normal type, we would identify `Some` as missing and tell the user. If `x: Option` +//! however (and `exhaustive_patterns` is on), it's ok to omit `Some`. When listing the constructors +//! of a type, we must therefore track which can be omitted. +//! +//! Let's call "empty" a constructor that matches no valid value for the type, like `Some` for the +//! type `Option`. What this all means is that `ConstructorSet` must know which constructors are +//! empty. The difference between empty and nonempty constructors is that empty constructors need +//! not be present for the match to be exhaustive. +//! +//! A final remark: empty constructors of arity 0 break specialization, we must avoid them. The +//! reason is that if we specialize by them, nothing remains to witness the emptiness; the rest of +//! the algorithm can't distinguish them from a nonempty constructor. The only known case where this +//! could happen is the `[..]` pattern on `[!; N]` with `N > 0` so we must take care to not emit it. +//! +//! This is all handled by [`crate::cx::MatchCheckCtxt::ctors_for_ty`] and +//! [`ConstructorSet::split`]. The invariants of [`SplitConstructorSet`] are also of interest. +//! +//! +//! +//! ## Opaque patterns +//! +//! Some patterns, such as constants that are not allowed to be matched structurally, cannot be +//! inspected, which we handle with `Constructor::Opaque`. Since we know nothing of these patterns, +//! we assume they never cover each other. In order to respect the invariants of +//! [`SplitConstructorSet`], we give each `Opaque` constructor a unique id so we can recognize it. + +use std::cmp::{self, max, min, Ordering}; +use std::fmt; +use std::iter::once; + +use smallvec::SmallVec; + +use rustc_apfloat::ieee::{DoubleS, IeeeFloat, SingleS}; +use rustc_data_structures::fx::FxHashSet; +use rustc_hir::RangeEnd; +use rustc_index::IndexVec; +use rustc_middle::mir::Const; +use rustc_target::abi::VariantIdx; + +use self::Constructor::*; +use self::MaybeInfiniteInt::*; +use self::SliceKind::*; + +use crate::usefulness::PatCtxt; + +/// Whether we have seen a constructor in the column or not. +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)] +enum Presence { + Unseen, + Seen, +} + +/// A possibly infinite integer. Values are encoded such that the ordering on `u128` matches the +/// natural order on the original type. For example, `-128i8` is encoded as `0` and `127i8` as +/// `255`. See `signed_bias` for details. +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)] +pub enum MaybeInfiniteInt { + NegInfinity, + /// Encoded value. DO NOT CONSTRUCT BY HAND; use `new_finite`. + #[non_exhaustive] + Finite(u128), + /// The integer after `u128::MAX`. We need it to represent `x..=u128::MAX` as an exclusive range. + JustAfterMax, + PosInfinity, +} + +impl MaybeInfiniteInt { + pub fn new_finite_uint(bits: u128) -> Self { + Finite(bits) + } + pub fn new_finite_int(bits: u128, size: u64) -> Self { + // Perform a shift if the underlying types are signed, which makes the interval arithmetic + // type-independent. + let bias = 1u128 << (size - 1); + Finite(bits ^ bias) + } + + pub fn as_finite_uint(self) -> Option { + match self { + Finite(bits) => Some(bits), + _ => None, + } + } + pub fn as_finite_int(self, size: u64) -> Option { + // We decode the shift. + match self { + Finite(bits) => { + let bias = 1u128 << (size - 1); + Some(bits ^ bias) + } + _ => None, + } + } + + /// Note: this will not turn a finite value into an infinite one or vice-versa. + pub fn minus_one(self) -> Self { + match self { + Finite(n) => match n.checked_sub(1) { + Some(m) => Finite(m), + None => bug!(), + }, + JustAfterMax => Finite(u128::MAX), + x => x, + } + } + /// Note: this will not turn a finite value into an infinite one or vice-versa. + pub fn plus_one(self) -> Self { + match self { + Finite(n) => match n.checked_add(1) { + Some(m) => Finite(m), + None => JustAfterMax, + }, + JustAfterMax => bug!(), + x => x, + } + } +} + +/// An exclusive interval, used for precise integer exhaustiveness checking. `IntRange`s always +/// store a contiguous range. +/// +/// `IntRange` is never used to encode an empty range or a "range" that wraps around the (offset) +/// space: i.e., `range.lo < range.hi`. +#[derive(Clone, Copy, PartialEq, Eq)] +pub struct IntRange { + pub lo: MaybeInfiniteInt, // Must not be `PosInfinity`. + pub hi: MaybeInfiniteInt, // Must not be `NegInfinity`. +} + +impl IntRange { + /// Best effort; will not know that e.g. `255u8..` is a singleton. + pub(crate) fn is_singleton(&self) -> bool { + // Since `lo` and `hi` can't be the same `Infinity` and `plus_one` never changes from finite + // to infinite, this correctly only detects ranges that contain exacly one `Finite(x)`. + self.lo.plus_one() == self.hi + } + + #[inline] + pub fn from_singleton(x: MaybeInfiniteInt) -> IntRange { + IntRange { lo: x, hi: x.plus_one() } + } + + #[inline] + pub fn from_range(lo: MaybeInfiniteInt, mut hi: MaybeInfiniteInt, end: RangeEnd) -> IntRange { + if end == RangeEnd::Included { + hi = hi.plus_one(); + } + if lo >= hi { + // This should have been caught earlier by E0030. + bug!("malformed range pattern: {lo:?}..{hi:?}"); + } + IntRange { lo, hi } + } + + fn is_subrange(&self, other: &Self) -> bool { + other.lo <= self.lo && self.hi <= other.hi + } + + fn intersection(&self, other: &Self) -> Option { + if self.lo < other.hi && other.lo < self.hi { + Some(IntRange { lo: max(self.lo, other.lo), hi: min(self.hi, other.hi) }) + } else { + None + } + } + + /// Partition a range of integers into disjoint subranges. This does constructor splitting for + /// integer ranges as explained at the top of the file. + /// + /// This returns an output that covers `self`. The output is split so that the only + /// intersections between an output range and a column range are inclusions. No output range + /// straddles the boundary of one of the inputs. + /// + /// Additionally, we track for each output range whether it is covered by one of the column ranges or not. + /// + /// The following input: + /// ```text + /// (--------------------------) // `self` + /// (------) (----------) (-) + /// (------) (--------) + /// ``` + /// is first intersected with `self`: + /// ```text + /// (--------------------------) // `self` + /// (----) (----------) (-) + /// (------) (--------) + /// ``` + /// and then iterated over as follows: + /// ```text + /// (-(--)-(-)-(------)-)--(-)- + /// ``` + /// where each sequence of dashes is an output range, and dashes outside parentheses are marked + /// as `Presence::Missing`. + /// + /// ## `isize`/`usize` + /// + /// Whereas a wildcard of type `i32` stands for the range `i32::MIN..=i32::MAX`, a `usize` + /// wildcard stands for `0..PosInfinity` and a `isize` wildcard stands for + /// `NegInfinity..PosInfinity`. In other words, as far as `IntRange` is concerned, there are + /// values before `isize::MIN` and after `usize::MAX`/`isize::MAX`. + /// This is to avoid e.g. `0..(u32::MAX as usize)` from being exhaustive on one architecture and + /// not others. This was decided in . + /// + /// These infinities affect splitting subtly: it is possible to get `NegInfinity..0` and + /// `usize::MAX+1..PosInfinity` in the output. Diagnostics must be careful to handle these + /// fictitious ranges sensibly. + fn split( + &self, + column_ranges: impl Iterator, + ) -> impl Iterator { + // The boundaries of ranges in `column_ranges` intersected with `self`. + // We do parenthesis matching for input ranges. A boundary counts as +1 if it starts + // a range and -1 if it ends it. When the count is > 0 between two boundaries, we + // are within an input range. + let mut boundaries: Vec<(MaybeInfiniteInt, isize)> = column_ranges + .filter_map(|r| self.intersection(&r)) + .flat_map(|r| [(r.lo, 1), (r.hi, -1)]) + .collect(); + // We sort by boundary, and for each boundary we sort the "closing parentheses" first. The + // order of +1/-1 for a same boundary value is actually irrelevant, because we only look at + // the accumulated count between distinct boundary values. + boundaries.sort_unstable(); + + // Accumulate parenthesis counts. + let mut paren_counter = 0isize; + // Gather pairs of adjacent boundaries. + let mut prev_bdy = self.lo; + boundaries + .into_iter() + // End with the end of the range. The count is ignored. + .chain(once((self.hi, 0))) + // List pairs of adjacent boundaries and the count between them. + .map(move |(bdy, delta)| { + // `delta` affects the count as we cross `bdy`, so the relevant count between + // `prev_bdy` and `bdy` is untouched by `delta`. + let ret = (prev_bdy, paren_counter, bdy); + prev_bdy = bdy; + paren_counter += delta; + ret + }) + // Skip empty ranges. + .filter(|&(prev_bdy, _, bdy)| prev_bdy != bdy) + // Convert back to ranges. + .map(move |(prev_bdy, paren_count, bdy)| { + use Presence::*; + let presence = if paren_count > 0 { Seen } else { Unseen }; + let range = IntRange { lo: prev_bdy, hi: bdy }; + (presence, range) + }) + } +} + +/// Note: this will render signed ranges incorrectly. To render properly, convert to a pattern +/// first. +impl fmt::Debug for IntRange { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + if let Finite(lo) = self.lo { + write!(f, "{lo}")?; + } + write!(f, "{}", RangeEnd::Excluded)?; + if let Finite(hi) = self.hi { + write!(f, "{hi}")?; + } + Ok(()) + } +} + +#[derive(Copy, Clone, Debug, PartialEq, Eq)] +pub enum SliceKind { + /// Patterns of length `n` (`[x, y]`). + FixedLen(usize), + /// Patterns using the `..` notation (`[x, .., y]`). + /// Captures any array constructor of `length >= i + j`. + /// In the case where `array_len` is `Some(_)`, + /// this indicates that we only care about the first `i` and the last `j` values of the array, + /// and everything in between is a wildcard `_`. + VarLen(usize, usize), +} + +impl SliceKind { + fn arity(self) -> usize { + match self { + FixedLen(length) => length, + VarLen(prefix, suffix) => prefix + suffix, + } + } + + /// Whether this pattern includes patterns of length `other_len`. + fn covers_length(self, other_len: usize) -> bool { + match self { + FixedLen(len) => len == other_len, + VarLen(prefix, suffix) => prefix + suffix <= other_len, + } + } +} + +/// A constructor for array and slice patterns. +#[derive(Copy, Clone, Debug, PartialEq, Eq)] +pub struct Slice { + /// `None` if the matched value is a slice, `Some(n)` if it is an array of size `n`. + pub(crate) array_len: Option, + /// The kind of pattern it is: fixed-length `[x, y]` or variable length `[x, .., y]`. + pub(crate) kind: SliceKind, +} + +impl Slice { + pub fn new(array_len: Option, kind: SliceKind) -> Self { + let kind = match (array_len, kind) { + // If the middle `..` has length 0, we effectively have a fixed-length pattern. + (Some(len), VarLen(prefix, suffix)) if prefix + suffix == len => FixedLen(len), + (Some(len), VarLen(prefix, suffix)) if prefix + suffix > len => bug!( + "Slice pattern of length {} longer than its array length {len}", + prefix + suffix + ), + _ => kind, + }; + Slice { array_len, kind } + } + + pub(crate) fn arity(self) -> usize { + self.kind.arity() + } + + /// See `Constructor::is_covered_by` + fn is_covered_by(self, other: Self) -> bool { + other.kind.covers_length(self.arity()) + } + + /// This computes constructor splitting for variable-length slices, as explained at the top of + /// the file. + /// + /// A slice pattern `[x, .., y]` behaves like the infinite or-pattern `[x, y] | [x, _, y] | [x, + /// _, _, y] | etc`. The corresponding value constructors are fixed-length array constructors of + /// corresponding lengths. We obviously can't list this infinitude of constructors. + /// Thankfully, it turns out that for each finite set of slice patterns, all sufficiently large + /// array lengths are equivalent. + /// + /// Let's look at an example, where we are trying to split the last pattern: + /// ``` + /// # fn foo(x: &[bool]) { + /// match x { + /// [true, true, ..] => {} + /// [.., false, false] => {} + /// [..] => {} + /// } + /// # } + /// ``` + /// Here are the results of specialization for the first few lengths: + /// ``` + /// # fn foo(x: &[bool]) { match x { + /// // length 0 + /// [] => {} + /// // length 1 + /// [_] => {} + /// // length 2 + /// [true, true] => {} + /// [false, false] => {} + /// [_, _] => {} + /// // length 3 + /// [true, true, _ ] => {} + /// [_, false, false] => {} + /// [_, _, _ ] => {} + /// // length 4 + /// [true, true, _, _ ] => {} + /// [_, _, false, false] => {} + /// [_, _, _, _ ] => {} + /// // length 5 + /// [true, true, _, _, _ ] => {} + /// [_, _, _, false, false] => {} + /// [_, _, _, _, _ ] => {} + /// # _ => {} + /// # }} + /// ``` + /// + /// We see that above length 4, we are simply inserting columns full of wildcards in the middle. + /// This means that specialization and witness computation with slices of length `l >= 4` will + /// give equivalent results regardless of `l`. This applies to any set of slice patterns: there + /// will be a length `L` above which all lengths behave the same. This is exactly what we need + /// for constructor splitting. + /// + /// A variable-length slice pattern covers all lengths from its arity up to infinity. As we just + /// saw, we can split this in two: lengths below `L` are treated individually with a + /// fixed-length slice each; lengths above `L` are grouped into a single variable-length slice + /// constructor. + /// + /// For each variable-length slice pattern `p` with a prefix of length `plâ‚š` and suffix of + /// length `slâ‚š`, only the first `plâ‚š` and the last `slâ‚š` elements are examined. Therefore, as + /// long as `L` is positive (to avoid concerns about empty types), all elements after the + /// maximum prefix length and before the maximum suffix length are not examined by any + /// variable-length pattern, and therefore can be ignored. This gives us a way to compute `L`. + /// + /// Additionally, if fixed-length patterns exist, we must pick an `L` large enough to miss them, + /// so we can pick `L = max(max(FIXED_LEN)+1, max(PREFIX_LEN) + max(SUFFIX_LEN))`. + /// `max_slice` below will be made to have this arity `L`. + /// + /// If `self` is fixed-length, it is returned as-is. + /// + /// Additionally, we track for each output slice whether it is covered by one of the column slices or not. + fn split( + self, + column_slices: impl Iterator, + ) -> impl Iterator { + // Range of lengths below `L`. + let smaller_lengths; + let arity = self.arity(); + let mut max_slice = self.kind; + // Tracks the smallest variable-length slice we've seen. Any slice arity above it is + // therefore `Presence::Seen` in the column. + let mut min_var_len = usize::MAX; + // Tracks the fixed-length slices we've seen, to mark them as `Presence::Seen`. + let mut seen_fixed_lens = FxHashSet::default(); + match &mut max_slice { + VarLen(max_prefix_len, max_suffix_len) => { + // A length larger than any fixed-length slice encountered. + // We start at 1 in case the subtype is empty because in that case the zero-length + // slice must be treated separately from the rest. + let mut fixed_len_upper_bound = 1; + // We grow `max_slice` to be larger than all slices encountered, as described above. + // `L` is `max_slice.arity()`. For diagnostics, we keep the prefix and suffix + // lengths separate. + for slice in column_slices { + match slice.kind { + FixedLen(len) => { + fixed_len_upper_bound = cmp::max(fixed_len_upper_bound, len + 1); + seen_fixed_lens.insert(len); + } + VarLen(prefix, suffix) => { + *max_prefix_len = cmp::max(*max_prefix_len, prefix); + *max_suffix_len = cmp::max(*max_suffix_len, suffix); + min_var_len = cmp::min(min_var_len, prefix + suffix); + } + } + } + // If `fixed_len_upper_bound >= L`, we set `L` to `fixed_len_upper_bound`. + if let Some(delta) = + fixed_len_upper_bound.checked_sub(*max_prefix_len + *max_suffix_len) + { + *max_prefix_len += delta + } + + // We cap the arity of `max_slice` at the array size. + match self.array_len { + Some(len) if max_slice.arity() >= len => max_slice = FixedLen(len), + _ => {} + } + + smaller_lengths = match self.array_len { + // The only admissible fixed-length slice is one of the array size. Whether `max_slice` + // is fixed-length or variable-length, it will be the only relevant slice to output + // here. + Some(_) => 0..0, // empty range + // We need to cover all arities in the range `(arity..infinity)`. We split that + // range into two: lengths smaller than `max_slice.arity()` are treated + // independently as fixed-lengths slices, and lengths above are captured by + // `max_slice`. + None => self.arity()..max_slice.arity(), + }; + } + FixedLen(_) => { + // No need to split here. We only track presence. + for slice in column_slices { + match slice.kind { + FixedLen(len) => { + if len == arity { + seen_fixed_lens.insert(len); + } + } + VarLen(prefix, suffix) => { + min_var_len = cmp::min(min_var_len, prefix + suffix); + } + } + } + smaller_lengths = 0..0; + } + }; + + smaller_lengths.map(FixedLen).chain(once(max_slice)).map(move |kind| { + let arity = kind.arity(); + let seen = if min_var_len <= arity || seen_fixed_lens.contains(&arity) { + Presence::Seen + } else { + Presence::Unseen + }; + (seen, Slice::new(self.array_len, kind)) + }) + } +} + +/// A globally unique id to distinguish `Opaque` patterns. +#[derive(Clone, Debug, PartialEq, Eq)] +pub struct OpaqueId(u32); + +impl OpaqueId { + pub fn new() -> Self { + use std::sync::atomic::{AtomicU32, Ordering}; + static OPAQUE_ID: AtomicU32 = AtomicU32::new(0); + OpaqueId(OPAQUE_ID.fetch_add(1, Ordering::SeqCst)) + } +} + +/// A value can be decomposed into a constructor applied to some fields. This struct represents +/// the constructor. See also `Fields`. +/// +/// `pat_constructor` retrieves the constructor corresponding to a pattern. +/// `specialize_constructor` returns the list of fields corresponding to a pattern, given a +/// constructor. `Constructor::apply` reconstructs the pattern from a pair of `Constructor` and +/// `Fields`. +#[derive(Clone, Debug, PartialEq)] +pub enum Constructor<'tcx> { + /// The constructor for patterns that have a single constructor, like tuples, struct patterns, + /// and references. Fixed-length arrays are treated separately with `Slice`. + Single, + /// Enum variants. + Variant(VariantIdx), + /// Booleans + Bool(bool), + /// Ranges of integer literal values (`2`, `2..=5` or `2..5`). + IntRange(IntRange), + /// Ranges of floating-point literal values (`2.0..=5.2`). + F32Range(IeeeFloat, IeeeFloat, RangeEnd), + F64Range(IeeeFloat, IeeeFloat, RangeEnd), + /// String literals. Strings are not quite the same as `&[u8]` so we treat them separately. + Str(Const<'tcx>), + /// Array and slice patterns. + Slice(Slice), + /// Constants that must not be matched structurally. They are treated as black boxes for the + /// purposes of exhaustiveness: we must not inspect them, and they don't count towards making a + /// match exhaustive. + /// Carries an id that must be unique within a match. We need this to ensure the invariants of + /// [`SplitConstructorSet`]. + Opaque(OpaqueId), + /// Or-pattern. + Or, + /// Wildcard pattern. + Wildcard, + /// Fake extra constructor for enums that aren't allowed to be matched exhaustively. Also used + /// for those types for which we cannot list constructors explicitly, like `f64` and `str`. + NonExhaustive, + /// Fake extra constructor for variants that should not be mentioned in diagnostics. + /// We use this for variants behind an unstable gate as well as + /// `#[doc(hidden)]` ones. + Hidden, + /// Fake extra constructor for constructors that are not seen in the matrix, as explained at the + /// top of the file. + Missing, +} + +impl<'tcx> Constructor<'tcx> { + pub(crate) fn is_non_exhaustive(&self) -> bool { + matches!(self, NonExhaustive) + } + + pub(crate) fn as_variant(&self) -> Option { + match self { + Variant(i) => Some(*i), + _ => None, + } + } + fn as_bool(&self) -> Option { + match self { + Bool(b) => Some(*b), + _ => None, + } + } + pub(crate) fn as_int_range(&self) -> Option<&IntRange> { + match self { + IntRange(range) => Some(range), + _ => None, + } + } + fn as_slice(&self) -> Option { + match self { + Slice(slice) => Some(*slice), + _ => None, + } + } + + /// The number of fields for this constructor. This must be kept in sync with + /// `Fields::wildcards`. + pub(crate) fn arity(&self, pcx: &PatCtxt<'_, '_, 'tcx>) -> usize { + pcx.cx.ctor_arity(self, pcx.ty) + } + + /// Returns whether `self` is covered by `other`, i.e. whether `self` is a subset of `other`. + /// For the simple cases, this is simply checking for equality. For the "grouped" constructors, + /// this checks for inclusion. + // We inline because this has a single call site in `Matrix::specialize_constructor`. + #[inline] + pub(crate) fn is_covered_by<'p>(&self, pcx: &PatCtxt<'_, 'p, 'tcx>, other: &Self) -> bool { + match (self, other) { + (Wildcard, _) => { + span_bug!( + pcx.cx.scrut_span, + "Constructor splitting should not have returned `Wildcard`" + ) + } + // Wildcards cover anything + (_, Wildcard) => true, + // Only a wildcard pattern can match these special constructors. + (Missing { .. } | NonExhaustive | Hidden, _) => false, + + (Single, Single) => true, + (Variant(self_id), Variant(other_id)) => self_id == other_id, + (Bool(self_b), Bool(other_b)) => self_b == other_b, + + (IntRange(self_range), IntRange(other_range)) => self_range.is_subrange(other_range), + (F32Range(self_from, self_to, self_end), F32Range(other_from, other_to, other_end)) => { + self_from.ge(other_from) + && match self_to.partial_cmp(other_to) { + Some(Ordering::Less) => true, + Some(Ordering::Equal) => other_end == self_end, + _ => false, + } + } + (F64Range(self_from, self_to, self_end), F64Range(other_from, other_to, other_end)) => { + self_from.ge(other_from) + && match self_to.partial_cmp(other_to) { + Some(Ordering::Less) => true, + Some(Ordering::Equal) => other_end == self_end, + _ => false, + } + } + (Str(self_val), Str(other_val)) => { + // FIXME Once valtrees are available we can directly use the bytes + // in the `Str` variant of the valtree for the comparison here. + self_val == other_val + } + (Slice(self_slice), Slice(other_slice)) => self_slice.is_covered_by(*other_slice), + + // Opaque constructors don't interact with anything unless they come from the + // syntactically identical pattern. + (Opaque(self_id), Opaque(other_id)) => self_id == other_id, + (Opaque(..), _) | (_, Opaque(..)) => false, + + _ => span_bug!( + pcx.cx.scrut_span, + "trying to compare incompatible constructors {:?} and {:?}", + self, + other + ), + } + } +} + +#[derive(Debug, Clone, Copy)] +pub enum VariantVisibility { + /// Variant that doesn't fit the other cases, i.e. most variants. + Visible, + /// Variant behind an unstable gate or with the `#[doc(hidden)]` attribute. It will not be + /// mentioned in diagnostics unless the user mentioned it first. + Hidden, + /// Variant that matches no value. E.g. `Some::>` if the `exhaustive_patterns` feature + /// is enabled. Like `Hidden`, it will not be mentioned in diagnostics unless the user mentioned + /// it first. + Empty, +} + +/// Describes the set of all constructors for a type. For details, in particular about the emptiness +/// of constructors, see the top of the file. +/// +/// In terms of division of responsibility, [`ConstructorSet::split`] handles all of the +/// `exhaustive_patterns` feature. +#[derive(Debug)] +pub enum ConstructorSet { + /// The type has a single constructor, e.g. `&T` or a struct. `empty` tracks whether the + /// constructor is empty. + Single { empty: bool }, + /// This type has the following list of constructors. If `variants` is empty and + /// `non_exhaustive` is false, don't use this; use `NoConstructors` instead. + Variants { variants: IndexVec, non_exhaustive: bool }, + /// Booleans. + Bool, + /// The type is spanned by integer values. The range or ranges give the set of allowed values. + /// The second range is only useful for `char`. + Integers { range_1: IntRange, range_2: Option }, + /// The type is matched by slices. `array_len` is the compile-time length of the array, if + /// known. If `subtype_is_empty`, all constructors are empty except possibly the zero-length + /// slice `[]`. + Slice { array_len: Option, subtype_is_empty: bool }, + /// The constructors cannot be listed, and the type cannot be matched exhaustively. E.g. `str`, + /// floats. + Unlistable, + /// The type has no constructors (not even empty ones). This is `!` and empty enums. + NoConstructors, +} + +/// Describes the result of analyzing the constructors in a column of a match. +/// +/// `present` is morally the set of constructors present in the column, and `missing` is the set of +/// constructors that exist in the type but are not present in the column. +/// +/// More formally, if we discard wildcards from the column, this respects the following constraints: +/// 1. the union of `present`, `missing` and `missing_empty` covers all the constructors of the type +/// 2. each constructor in `present` is covered by something in the column +/// 3. no constructor in `missing` or `missing_empty` is covered by anything in the column +/// 4. each constructor in the column is equal to the union of one or more constructors in `present` +/// 5. `missing` does not contain empty constructors (see discussion about emptiness at the top of +/// the file); +/// 6. `missing_empty` contains only empty constructors +/// 7. constructors in `present`, `missing` and `missing_empty` are split for the column; in other +/// words, they are either fully included in or fully disjoint from each constructor in the +/// column. In yet other words, there are no non-trivial intersections like between `0..10` and +/// `5..15`. +/// +/// We must be particularly careful with weird constructors like `Opaque`: they're not formally part +/// of the `ConstructorSet` for the type, yet if we forgot to include them in `present` we would be +/// ignoring any row with `Opaque`s in the algorithm. Hence the importance of point 4. +#[derive(Debug)] +pub(crate) struct SplitConstructorSet<'tcx> { + pub(crate) present: SmallVec<[Constructor<'tcx>; 1]>, + pub(crate) missing: Vec>, + pub(crate) missing_empty: Vec>, +} + +impl ConstructorSet { + /// This analyzes a column of constructors to 1/ determine which constructors of the type (if + /// any) are missing; 2/ split constructors to handle non-trivial intersections e.g. on ranges + /// or slices. This can get subtle; see [`SplitConstructorSet`] for details of this operation + /// and its invariants. + #[instrument(level = "debug", skip(self, pcx, ctors), ret)] + pub(crate) fn split<'a, 'tcx>( + &self, + pcx: &PatCtxt<'_, '_, 'tcx>, + ctors: impl Iterator> + Clone, + ) -> SplitConstructorSet<'tcx> + where + 'tcx: 'a, + { + let mut present: SmallVec<[_; 1]> = SmallVec::new(); + // Empty constructors found missing. + let mut missing_empty = Vec::new(); + // Nonempty constructors found missing. + let mut missing = Vec::new(); + // Constructors in `ctors`, except wildcards and opaques. + let mut seen = Vec::new(); + for ctor in ctors.cloned() { + match ctor { + Opaque(..) => present.push(ctor), + Wildcard => {} // discard wildcards + _ => seen.push(ctor), + } + } + + match self { + ConstructorSet::Single { empty } => { + if !seen.is_empty() { + present.push(Single); + } else if *empty { + missing_empty.push(Single); + } else { + missing.push(Single); + } + } + ConstructorSet::Variants { variants, non_exhaustive } => { + let seen_set: FxHashSet<_> = seen.iter().map(|c| c.as_variant().unwrap()).collect(); + let mut skipped_a_hidden_variant = false; + + for (idx, visibility) in variants.iter_enumerated() { + let ctor = Variant(idx); + if seen_set.contains(&idx) { + present.push(ctor); + } else { + // We only put visible variants directly into `missing`. + match visibility { + VariantVisibility::Visible => missing.push(ctor), + VariantVisibility::Hidden => skipped_a_hidden_variant = true, + VariantVisibility::Empty => missing_empty.push(ctor), + } + } + } + + if skipped_a_hidden_variant { + missing.push(Hidden); + } + if *non_exhaustive { + missing.push(NonExhaustive); + } + } + ConstructorSet::Bool => { + let mut seen_false = false; + let mut seen_true = false; + for b in seen.iter().map(|ctor| ctor.as_bool().unwrap()) { + if b { + seen_true = true; + } else { + seen_false = true; + } + } + if seen_false { + present.push(Bool(false)); + } else { + missing.push(Bool(false)); + } + if seen_true { + present.push(Bool(true)); + } else { + missing.push(Bool(true)); + } + } + ConstructorSet::Integers { range_1, range_2 } => { + let seen_ranges: Vec<_> = + seen.iter().map(|ctor| ctor.as_int_range().unwrap().clone()).collect(); + for (seen, splitted_range) in range_1.split(seen_ranges.iter().cloned()) { + match seen { + Presence::Unseen => missing.push(IntRange(splitted_range)), + Presence::Seen => present.push(IntRange(splitted_range)), + } + } + if let Some(range_2) = range_2 { + for (seen, splitted_range) in range_2.split(seen_ranges.into_iter()) { + match seen { + Presence::Unseen => missing.push(IntRange(splitted_range)), + Presence::Seen => present.push(IntRange(splitted_range)), + } + } + } + } + ConstructorSet::Slice { array_len, subtype_is_empty } => { + let seen_slices = seen.iter().map(|c| c.as_slice().unwrap()); + let base_slice = Slice::new(*array_len, VarLen(0, 0)); + for (seen, splitted_slice) in base_slice.split(seen_slices) { + let ctor = Slice(splitted_slice); + match seen { + Presence::Seen => present.push(ctor), + Presence::Unseen => { + if *subtype_is_empty && splitted_slice.arity() != 0 { + // We have subpatterns of an empty type, so the constructor is + // empty. + missing_empty.push(ctor); + } else { + missing.push(ctor); + } + } + } + } + } + ConstructorSet::Unlistable => { + // Since we can't list constructors, we take the ones in the column. This might list + // some constructors several times but there's not much we can do. + present.extend(seen); + missing.push(NonExhaustive); + } + ConstructorSet::NoConstructors => { + // In a `MaybeInvalid` place even an empty pattern may be reachable. We therefore + // add a dummy empty constructor here, which will be ignored if the place is + // `ValidOnly`. + missing_empty.push(NonExhaustive); + } + } + + // We have now grouped all the constructors into 3 buckets: present, missing, missing_empty. + // In the absence of the `exhaustive_patterns` feature however, we don't count nested empty + // types as empty. Only non-nested `!` or `enum Foo {}` are considered empty. + if !pcx.cx.tcx.features().exhaustive_patterns + && !(pcx.is_top_level && matches!(self, Self::NoConstructors)) + { + // Treat all missing constructors as nonempty. + missing.extend(missing_empty.drain(..)); + } + + SplitConstructorSet { present, missing, missing_empty } + } +} diff --git a/compiler/rustc_pattern_analysis/src/cx.rs b/compiler/rustc_pattern_analysis/src/cx.rs new file mode 100644 index 0000000000000..8a4f39a1f4abb --- /dev/null +++ b/compiler/rustc_pattern_analysis/src/cx.rs @@ -0,0 +1,856 @@ +use std::fmt; +use std::iter::once; + +use rustc_arena::TypedArena; +use rustc_data_structures::captures::Captures; +use rustc_hir::def_id::DefId; +use rustc_hir::{HirId, RangeEnd}; +use rustc_index::Idx; +use rustc_index::IndexVec; +use rustc_middle::middle::stability::EvalResult; +use rustc_middle::mir; +use rustc_middle::mir::interpret::Scalar; +use rustc_middle::thir::{FieldPat, Pat, PatKind, PatRange, PatRangeBoundary}; +use rustc_middle::ty::layout::IntegerExt; +use rustc_middle::ty::{self, Ty, TyCtxt, VariantDef}; +use rustc_span::{Span, DUMMY_SP}; +use rustc_target::abi::{FieldIdx, Integer, VariantIdx, FIRST_VARIANT}; +use smallvec::SmallVec; + +use crate::constructor::{ + Constructor, ConstructorSet, IntRange, MaybeInfiniteInt, OpaqueId, Slice, SliceKind, + VariantVisibility, +}; +use crate::pat::{DeconstructedPat, WitnessPat}; + +use Constructor::*; + +pub struct MatchCheckCtxt<'p, 'tcx> { + pub tcx: TyCtxt<'tcx>, + /// The module in which the match occurs. This is necessary for + /// checking inhabited-ness of types because whether a type is (visibly) + /// inhabited can depend on whether it was defined in the current module or + /// not. E.g., `struct Foo { _private: ! }` cannot be seen to be empty + /// outside its module and should not be matchable with an empty match statement. + pub module: DefId, + pub param_env: ty::ParamEnv<'tcx>, + pub pattern_arena: &'p TypedArena>, + /// Lint level at the match. + pub match_lint_level: HirId, + /// The span of the whole match, if applicable. + pub whole_match_span: Option, + /// Span of the scrutinee. + pub scrut_span: Span, + /// Only produce `NON_EXHAUSTIVE_OMITTED_PATTERNS` lint on refutable patterns. + pub refutable: bool, + /// Whether the data at the scrutinee is known to be valid. This is false if the scrutinee comes + /// from a union field, a pointer deref, or a reference deref (pending opsem decisions). + pub known_valid_scrutinee: bool, +} + +impl<'p, 'tcx> MatchCheckCtxt<'p, 'tcx> { + pub(super) fn is_uninhabited(&self, ty: Ty<'tcx>) -> bool { + !ty.is_inhabited_from(self.tcx, self.module, self.param_env) + } + + /// Returns whether the given type is an enum from another crate declared `#[non_exhaustive]`. + pub fn is_foreign_non_exhaustive_enum(&self, ty: Ty<'tcx>) -> bool { + match ty.kind() { + ty::Adt(def, ..) => { + def.is_enum() && def.is_variant_list_non_exhaustive() && !def.did().is_local() + } + _ => false, + } + } + + pub(crate) fn alloc_wildcard_slice( + &self, + tys: impl IntoIterator>, + ) -> &'p [DeconstructedPat<'p, 'tcx>] { + self.pattern_arena + .alloc_from_iter(tys.into_iter().map(|ty| DeconstructedPat::wildcard(ty, DUMMY_SP))) + } + + // In the cases of either a `#[non_exhaustive]` field list or a non-public field, we hide + // uninhabited fields in order not to reveal the uninhabitedness of the whole variant. + // This lists the fields we keep along with their types. + pub(crate) fn list_variant_nonhidden_fields<'a>( + &'a self, + ty: Ty<'tcx>, + variant: &'a VariantDef, + ) -> impl Iterator)> + Captures<'p> + Captures<'a> { + let cx = self; + let ty::Adt(adt, args) = ty.kind() else { bug!() }; + // Whether we must not match the fields of this variant exhaustively. + let is_non_exhaustive = variant.is_field_list_non_exhaustive() && !adt.did().is_local(); + + variant.fields.iter().enumerate().filter_map(move |(i, field)| { + let ty = field.ty(cx.tcx, args); + // `field.ty()` doesn't normalize after substituting. + let ty = cx.tcx.normalize_erasing_regions(cx.param_env, ty); + let is_visible = adt.is_enum() || field.vis.is_accessible_from(cx.module, cx.tcx); + let is_uninhabited = cx.tcx.features().exhaustive_patterns && cx.is_uninhabited(ty); + + if is_uninhabited && (!is_visible || is_non_exhaustive) { + None + } else { + Some((FieldIdx::new(i), ty)) + } + }) + } + + pub(crate) fn variant_index_for_adt( + ctor: &Constructor<'tcx>, + adt: ty::AdtDef<'tcx>, + ) -> VariantIdx { + match *ctor { + Variant(idx) => idx, + Single => { + assert!(!adt.is_enum()); + FIRST_VARIANT + } + _ => bug!("bad constructor {:?} for adt {:?}", ctor, adt), + } + } + + /// Creates a new list of wildcard fields for a given constructor. The result must have a length + /// of `ctor.arity()`. + #[instrument(level = "trace", skip(self))] + pub(crate) fn ctor_wildcard_fields( + &self, + ctor: &Constructor<'tcx>, + ty: Ty<'tcx>, + ) -> &'p [DeconstructedPat<'p, 'tcx>] { + let cx = self; + match ctor { + Single | Variant(_) => match ty.kind() { + ty::Tuple(fs) => cx.alloc_wildcard_slice(fs.iter()), + ty::Ref(_, rty, _) => cx.alloc_wildcard_slice(once(*rty)), + ty::Adt(adt, args) => { + if adt.is_box() { + // The only legal patterns of type `Box` (outside `std`) are `_` and box + // patterns. If we're here we can assume this is a box pattern. + cx.alloc_wildcard_slice(once(args.type_at(0))) + } else { + let variant = + &adt.variant(MatchCheckCtxt::variant_index_for_adt(&ctor, *adt)); + let tys = cx.list_variant_nonhidden_fields(ty, variant).map(|(_, ty)| ty); + cx.alloc_wildcard_slice(tys) + } + } + _ => bug!("Unexpected type for `Single` constructor: {:?}", ty), + }, + Slice(slice) => match *ty.kind() { + ty::Slice(ty) | ty::Array(ty, _) => { + let arity = slice.arity(); + cx.alloc_wildcard_slice((0..arity).map(|_| ty)) + } + _ => bug!("bad slice pattern {:?} {:?}", ctor, ty), + }, + Bool(..) + | IntRange(..) + | F32Range(..) + | F64Range(..) + | Str(..) + | Opaque(..) + | NonExhaustive + | Hidden + | Missing { .. } + | Wildcard => &[], + Or => { + bug!("called `Fields::wildcards` on an `Or` ctor") + } + } + } + + /// The number of fields for this constructor. This must be kept in sync with + /// `Fields::wildcards`. + pub(crate) fn ctor_arity(&self, ctor: &Constructor<'tcx>, ty: Ty<'tcx>) -> usize { + match ctor { + Single | Variant(_) => match ty.kind() { + ty::Tuple(fs) => fs.len(), + ty::Ref(..) => 1, + ty::Adt(adt, ..) => { + if adt.is_box() { + // The only legal patterns of type `Box` (outside `std`) are `_` and box + // patterns. If we're here we can assume this is a box pattern. + 1 + } else { + let variant = + &adt.variant(MatchCheckCtxt::variant_index_for_adt(&ctor, *adt)); + self.list_variant_nonhidden_fields(ty, variant).count() + } + } + _ => bug!("Unexpected type for `Single` constructor: {:?}", ty), + }, + Slice(slice) => slice.arity(), + Bool(..) + | IntRange(..) + | F32Range(..) + | F64Range(..) + | Str(..) + | Opaque(..) + | NonExhaustive + | Hidden + | Missing { .. } + | Wildcard => 0, + Or => bug!("The `Or` constructor doesn't have a fixed arity"), + } + } + + /// Creates a set that represents all the constructors of `ty`. + /// + /// See [`crate::constructor`] for considerations of emptiness. + #[instrument(level = "debug", skip(self), ret)] + pub fn ctors_for_ty(&self, ty: Ty<'tcx>) -> ConstructorSet { + let cx = self; + let make_uint_range = |start, end| { + IntRange::from_range( + MaybeInfiniteInt::new_finite_uint(start), + MaybeInfiniteInt::new_finite_uint(end), + RangeEnd::Included, + ) + }; + // This determines the set of all possible constructors for the type `ty`. For numbers, + // arrays and slices we use ranges and variable-length slices when appropriate. + match ty.kind() { + ty::Bool => ConstructorSet::Bool, + ty::Char => { + // The valid Unicode Scalar Value ranges. + ConstructorSet::Integers { + range_1: make_uint_range('\u{0000}' as u128, '\u{D7FF}' as u128), + range_2: Some(make_uint_range('\u{E000}' as u128, '\u{10FFFF}' as u128)), + } + } + &ty::Int(ity) => { + let range = if ty.is_ptr_sized_integral() { + // The min/max values of `isize` are not allowed to be observed. + IntRange { + lo: MaybeInfiniteInt::NegInfinity, + hi: MaybeInfiniteInt::PosInfinity, + } + } else { + let size = Integer::from_int_ty(&cx.tcx, ity).size().bits(); + let min = 1u128 << (size - 1); + let max = min - 1; + let min = MaybeInfiniteInt::new_finite_int(min, size); + let max = MaybeInfiniteInt::new_finite_int(max, size); + IntRange::from_range(min, max, RangeEnd::Included) + }; + ConstructorSet::Integers { range_1: range, range_2: None } + } + &ty::Uint(uty) => { + let range = if ty.is_ptr_sized_integral() { + // The max value of `usize` is not allowed to be observed. + let lo = MaybeInfiniteInt::new_finite_uint(0); + IntRange { lo, hi: MaybeInfiniteInt::PosInfinity } + } else { + let size = Integer::from_uint_ty(&cx.tcx, uty).size(); + let max = size.truncate(u128::MAX); + make_uint_range(0, max) + }; + ConstructorSet::Integers { range_1: range, range_2: None } + } + ty::Slice(sub_ty) => ConstructorSet::Slice { + array_len: None, + subtype_is_empty: cx.is_uninhabited(*sub_ty), + }, + ty::Array(sub_ty, len) => { + // We treat arrays of a constant but unknown length like slices. + ConstructorSet::Slice { + array_len: len.try_eval_target_usize(cx.tcx, cx.param_env).map(|l| l as usize), + subtype_is_empty: cx.is_uninhabited(*sub_ty), + } + } + ty::Adt(def, args) if def.is_enum() => { + let is_declared_nonexhaustive = cx.is_foreign_non_exhaustive_enum(ty); + if def.variants().is_empty() && !is_declared_nonexhaustive { + ConstructorSet::NoConstructors + } else { + let mut variants = + IndexVec::from_elem(VariantVisibility::Visible, def.variants()); + for (idx, v) in def.variants().iter_enumerated() { + let variant_def_id = def.variant(idx).def_id; + // Visibly uninhabited variants. + let is_inhabited = v + .inhabited_predicate(cx.tcx, *def) + .instantiate(cx.tcx, args) + .apply(cx.tcx, cx.param_env, cx.module); + // Variants that depend on a disabled unstable feature. + let is_unstable = matches!( + cx.tcx.eval_stability(variant_def_id, None, DUMMY_SP, None), + EvalResult::Deny { .. } + ); + // Foreign `#[doc(hidden)]` variants. + let is_doc_hidden = + cx.tcx.is_doc_hidden(variant_def_id) && !variant_def_id.is_local(); + let visibility = if !is_inhabited { + // FIXME: handle empty+hidden + VariantVisibility::Empty + } else if is_unstable || is_doc_hidden { + VariantVisibility::Hidden + } else { + VariantVisibility::Visible + }; + variants[idx] = visibility; + } + + ConstructorSet::Variants { variants, non_exhaustive: is_declared_nonexhaustive } + } + } + ty::Adt(..) | ty::Tuple(..) | ty::Ref(..) => { + ConstructorSet::Single { empty: cx.is_uninhabited(ty) } + } + ty::Never => ConstructorSet::NoConstructors, + // This type is one for which we cannot list constructors, like `str` or `f64`. + // FIXME(Nadrieril): which of these are actually allowed? + ty::Float(_) + | ty::Str + | ty::Foreign(_) + | ty::RawPtr(_) + | ty::FnDef(_, _) + | ty::FnPtr(_) + | ty::Dynamic(_, _, _) + | ty::Closure(_, _) + | ty::Coroutine(_, _, _) + | ty::Alias(_, _) + | ty::Param(_) + | ty::Error(_) => ConstructorSet::Unlistable, + ty::CoroutineWitness(_, _) | ty::Bound(_, _) | ty::Placeholder(_) | ty::Infer(_) => { + bug!("Encountered unexpected type in `ConstructorSet::for_ty`: {ty:?}") + } + } + } + + pub(crate) fn lower_pat_range_bdy( + &self, + bdy: PatRangeBoundary<'tcx>, + ty: Ty<'tcx>, + ) -> MaybeInfiniteInt { + match bdy { + PatRangeBoundary::NegInfinity => MaybeInfiniteInt::NegInfinity, + PatRangeBoundary::Finite(value) => { + let bits = value.eval_bits(self.tcx, self.param_env); + match *ty.kind() { + ty::Int(ity) => { + let size = Integer::from_int_ty(&self.tcx, ity).size().bits(); + MaybeInfiniteInt::new_finite_int(bits, size) + } + _ => MaybeInfiniteInt::new_finite_uint(bits), + } + } + PatRangeBoundary::PosInfinity => MaybeInfiniteInt::PosInfinity, + } + } + + /// Note: the input patterns must have been lowered through + /// `rustc_mir_build::thir::pattern::check_match::MatchVisitor::lower_pattern`. + pub fn lower_pat(&self, pat: &Pat<'tcx>) -> DeconstructedPat<'p, 'tcx> { + let singleton = |pat| std::slice::from_ref(self.pattern_arena.alloc(pat)); + let cx = self; + let ctor; + let fields: &[_]; + match &pat.kind { + PatKind::AscribeUserType { subpattern, .. } + | PatKind::InlineConstant { subpattern, .. } => return self.lower_pat(subpattern), + PatKind::Binding { subpattern: Some(subpat), .. } => return self.lower_pat(subpat), + PatKind::Binding { subpattern: None, .. } | PatKind::Wild => { + ctor = Wildcard; + fields = &[]; + } + PatKind::Deref { subpattern } => { + ctor = Single; + fields = singleton(self.lower_pat(subpattern)); + } + PatKind::Leaf { subpatterns } | PatKind::Variant { subpatterns, .. } => { + match pat.ty.kind() { + ty::Tuple(fs) => { + ctor = Single; + let mut wilds: SmallVec<[_; 2]> = + fs.iter().map(|ty| DeconstructedPat::wildcard(ty, pat.span)).collect(); + for pat in subpatterns { + wilds[pat.field.index()] = self.lower_pat(&pat.pattern); + } + fields = cx.pattern_arena.alloc_from_iter(wilds); + } + ty::Adt(adt, args) if adt.is_box() => { + // The only legal patterns of type `Box` (outside `std`) are `_` and box + // patterns. If we're here we can assume this is a box pattern. + // FIXME(Nadrieril): A `Box` can in theory be matched either with `Box(_, + // _)` or a box pattern. As a hack to avoid an ICE with the former, we + // ignore other fields than the first one. This will trigger an error later + // anyway. + // See https://github.com/rust-lang/rust/issues/82772 , + // explanation: https://github.com/rust-lang/rust/pull/82789#issuecomment-796921977 + // The problem is that we can't know from the type whether we'll match + // normally or through box-patterns. We'll have to figure out a proper + // solution when we introduce generalized deref patterns. Also need to + // prevent mixing of those two options. + let pattern = subpatterns.into_iter().find(|pat| pat.field.index() == 0); + let pat = if let Some(pat) = pattern { + self.lower_pat(&pat.pattern) + } else { + DeconstructedPat::wildcard(args.type_at(0), pat.span) + }; + ctor = Single; + fields = singleton(pat); + } + ty::Adt(adt, _) => { + ctor = match pat.kind { + PatKind::Leaf { .. } => Single, + PatKind::Variant { variant_index, .. } => Variant(variant_index), + _ => bug!(), + }; + let variant = + &adt.variant(MatchCheckCtxt::variant_index_for_adt(&ctor, *adt)); + // For each field in the variant, we store the relevant index into `self.fields` if any. + let mut field_id_to_id: Vec> = + (0..variant.fields.len()).map(|_| None).collect(); + let tys = cx + .list_variant_nonhidden_fields(pat.ty, variant) + .enumerate() + .map(|(i, (field, ty))| { + field_id_to_id[field.index()] = Some(i); + ty + }); + let mut wilds: SmallVec<[_; 2]> = + tys.map(|ty| DeconstructedPat::wildcard(ty, pat.span)).collect(); + for pat in subpatterns { + if let Some(i) = field_id_to_id[pat.field.index()] { + wilds[i] = self.lower_pat(&pat.pattern); + } + } + fields = cx.pattern_arena.alloc_from_iter(wilds); + } + _ => bug!("pattern has unexpected type: pat: {:?}, ty: {:?}", pat, pat.ty), + } + } + PatKind::Constant { value } => { + match pat.ty.kind() { + ty::Bool => { + ctor = match value.try_eval_bool(cx.tcx, cx.param_env) { + Some(b) => Bool(b), + None => Opaque(OpaqueId::new()), + }; + fields = &[]; + } + ty::Char | ty::Int(_) | ty::Uint(_) => { + ctor = match value.try_eval_bits(cx.tcx, cx.param_env) { + Some(bits) => { + let x = match *pat.ty.kind() { + ty::Int(ity) => { + let size = Integer::from_int_ty(&cx.tcx, ity).size().bits(); + MaybeInfiniteInt::new_finite_int(bits, size) + } + _ => MaybeInfiniteInt::new_finite_uint(bits), + }; + IntRange(IntRange::from_singleton(x)) + } + None => Opaque(OpaqueId::new()), + }; + fields = &[]; + } + ty::Float(ty::FloatTy::F32) => { + ctor = match value.try_eval_bits(cx.tcx, cx.param_env) { + Some(bits) => { + use rustc_apfloat::Float; + let value = rustc_apfloat::ieee::Single::from_bits(bits); + F32Range(value, value, RangeEnd::Included) + } + None => Opaque(OpaqueId::new()), + }; + fields = &[]; + } + ty::Float(ty::FloatTy::F64) => { + ctor = match value.try_eval_bits(cx.tcx, cx.param_env) { + Some(bits) => { + use rustc_apfloat::Float; + let value = rustc_apfloat::ieee::Double::from_bits(bits); + F64Range(value, value, RangeEnd::Included) + } + None => Opaque(OpaqueId::new()), + }; + fields = &[]; + } + ty::Ref(_, t, _) if t.is_str() => { + // We want a `&str` constant to behave like a `Deref` pattern, to be compatible + // with other `Deref` patterns. This could have been done in `const_to_pat`, + // but that causes issues with the rest of the matching code. + // So here, the constructor for a `"foo"` pattern is `&` (represented by + // `Single`), and has one field. That field has constructor `Str(value)` and no + // fields. + // Note: `t` is `str`, not `&str`. + let subpattern = DeconstructedPat::new(Str(*value), &[], *t, pat.span); + ctor = Single; + fields = singleton(subpattern) + } + // All constants that can be structurally matched have already been expanded + // into the corresponding `Pat`s by `const_to_pat`. Constants that remain are + // opaque. + _ => { + ctor = Opaque(OpaqueId::new()); + fields = &[]; + } + } + } + PatKind::Range(patrange) => { + let PatRange { lo, hi, end, .. } = patrange.as_ref(); + let ty = pat.ty; + ctor = match ty.kind() { + ty::Char | ty::Int(_) | ty::Uint(_) => { + let lo = cx.lower_pat_range_bdy(*lo, ty); + let hi = cx.lower_pat_range_bdy(*hi, ty); + IntRange(IntRange::from_range(lo, hi, *end)) + } + ty::Float(fty) => { + use rustc_apfloat::Float; + let lo = lo.as_finite().map(|c| c.eval_bits(cx.tcx, cx.param_env)); + let hi = hi.as_finite().map(|c| c.eval_bits(cx.tcx, cx.param_env)); + match fty { + ty::FloatTy::F32 => { + use rustc_apfloat::ieee::Single; + let lo = lo.map(Single::from_bits).unwrap_or(-Single::INFINITY); + let hi = hi.map(Single::from_bits).unwrap_or(Single::INFINITY); + F32Range(lo, hi, *end) + } + ty::FloatTy::F64 => { + use rustc_apfloat::ieee::Double; + let lo = lo.map(Double::from_bits).unwrap_or(-Double::INFINITY); + let hi = hi.map(Double::from_bits).unwrap_or(Double::INFINITY); + F64Range(lo, hi, *end) + } + } + } + _ => bug!("invalid type for range pattern: {}", ty), + }; + fields = &[]; + } + PatKind::Array { prefix, slice, suffix } | PatKind::Slice { prefix, slice, suffix } => { + let array_len = match pat.ty.kind() { + ty::Array(_, length) => { + Some(length.eval_target_usize(cx.tcx, cx.param_env) as usize) + } + ty::Slice(_) => None, + _ => span_bug!(pat.span, "bad ty {:?} for slice pattern", pat.ty), + }; + let kind = if slice.is_some() { + SliceKind::VarLen(prefix.len(), suffix.len()) + } else { + SliceKind::FixedLen(prefix.len() + suffix.len()) + }; + ctor = Slice(Slice::new(array_len, kind)); + fields = cx.pattern_arena.alloc_from_iter( + prefix.iter().chain(suffix.iter()).map(|p| self.lower_pat(&*p)), + ) + } + PatKind::Or { .. } => { + ctor = Or; + let pats = expand_or_pat(pat); + fields = + cx.pattern_arena.alloc_from_iter(pats.into_iter().map(|p| self.lower_pat(p))) + } + PatKind::Never => { + // FIXME(never_patterns): handle `!` in exhaustiveness. This is a sane default + // in the meantime. + ctor = Wildcard; + fields = &[]; + } + PatKind::Error(_) => { + ctor = Opaque(OpaqueId::new()); + fields = &[]; + } + } + DeconstructedPat::new(ctor, fields, pat.ty, pat.span) + } + + /// Convert back to a `thir::PatRangeBoundary` for diagnostic purposes. + /// Note: it is possible to get `isize/usize::MAX+1` here, as explained in the doc for + /// [`IntRange::split`]. This cannot be represented as a `Const`, so we represent it with + /// `PosInfinity`. + pub(crate) fn hoist_pat_range_bdy( + &self, + miint: MaybeInfiniteInt, + ty: Ty<'tcx>, + ) -> PatRangeBoundary<'tcx> { + use MaybeInfiniteInt::*; + let tcx = self.tcx; + match miint { + NegInfinity => PatRangeBoundary::NegInfinity, + Finite(_) => { + let size = ty.primitive_size(tcx); + let bits = match *ty.kind() { + ty::Int(_) => miint.as_finite_int(size.bits()).unwrap(), + _ => miint.as_finite_uint().unwrap(), + }; + match Scalar::try_from_uint(bits, size) { + Some(scalar) => { + let value = mir::Const::from_scalar(tcx, scalar, ty); + PatRangeBoundary::Finite(value) + } + // The value doesn't fit. Since `x >= 0` and 0 always encodes the minimum value + // for a type, the problem isn't that the value is too small. So it must be too + // large. + None => PatRangeBoundary::PosInfinity, + } + } + JustAfterMax | PosInfinity => PatRangeBoundary::PosInfinity, + } + } + + /// Whether the range denotes the fictitious values before `isize::MIN` or after + /// `usize::MAX`/`isize::MAX` (see doc of [`IntRange::split`] for why these exist). + pub fn is_range_beyond_boundaries(&self, range: &IntRange, ty: Ty<'tcx>) -> bool { + ty.is_ptr_sized_integral() && { + // The two invalid ranges are `NegInfinity..isize::MIN` (represented as + // `NegInfinity..0`), and `{u,i}size::MAX+1..PosInfinity`. `hoist_pat_range_bdy` + // converts `MAX+1` to `PosInfinity`, and we couldn't have `PosInfinity` in `range.lo` + // otherwise. + let lo = self.hoist_pat_range_bdy(range.lo, ty); + matches!(lo, PatRangeBoundary::PosInfinity) + || matches!(range.hi, MaybeInfiniteInt::Finite(0)) + } + } + + /// Convert back to a `thir::Pat` for diagnostic purposes. + pub(crate) fn hoist_pat_range(&self, range: &IntRange, ty: Ty<'tcx>) -> Pat<'tcx> { + use MaybeInfiniteInt::*; + let cx = self; + let kind = if matches!((range.lo, range.hi), (NegInfinity, PosInfinity)) { + PatKind::Wild + } else if range.is_singleton() { + let lo = cx.hoist_pat_range_bdy(range.lo, ty); + let value = lo.as_finite().unwrap(); + PatKind::Constant { value } + } else { + // We convert to an inclusive range for diagnostics. + let mut end = RangeEnd::Included; + let mut lo = cx.hoist_pat_range_bdy(range.lo, ty); + if matches!(lo, PatRangeBoundary::PosInfinity) { + // The only reason to get `PosInfinity` here is the special case where + // `hoist_pat_range_bdy` found `{u,i}size::MAX+1`. So the range denotes the + // fictitious values after `{u,i}size::MAX` (see [`IntRange::split`] for why we do + // this). We show this to the user as `usize::MAX..` which is slightly incorrect but + // probably clear enough. + let c = ty.numeric_max_val(cx.tcx).unwrap(); + let value = mir::Const::from_ty_const(c, cx.tcx); + lo = PatRangeBoundary::Finite(value); + } + let hi = if matches!(range.hi, Finite(0)) { + // The range encodes `..ty::MIN`, so we can't convert it to an inclusive range. + end = RangeEnd::Excluded; + range.hi + } else { + range.hi.minus_one() + }; + let hi = cx.hoist_pat_range_bdy(hi, ty); + PatKind::Range(Box::new(PatRange { lo, hi, end, ty })) + }; + + Pat { ty, span: DUMMY_SP, kind } + } + /// Convert back to a `thir::Pat` for diagnostic purposes. This panics for patterns that don't + /// appear in diagnostics, like float ranges. + pub fn hoist_witness_pat(&self, pat: &WitnessPat<'tcx>) -> Pat<'tcx> { + let cx = self; + let is_wildcard = |pat: &Pat<'_>| matches!(pat.kind, PatKind::Wild); + let mut subpatterns = pat.iter_fields().map(|p| Box::new(cx.hoist_witness_pat(p))); + let kind = match pat.ctor() { + Bool(b) => PatKind::Constant { value: mir::Const::from_bool(cx.tcx, *b) }, + IntRange(range) => return self.hoist_pat_range(range, pat.ty()), + Single | Variant(_) => match pat.ty().kind() { + ty::Tuple(..) => PatKind::Leaf { + subpatterns: subpatterns + .enumerate() + .map(|(i, pattern)| FieldPat { field: FieldIdx::new(i), pattern }) + .collect(), + }, + ty::Adt(adt_def, _) if adt_def.is_box() => { + // Without `box_patterns`, the only legal pattern of type `Box` is `_` (outside + // of `std`). So this branch is only reachable when the feature is enabled and + // the pattern is a box pattern. + PatKind::Deref { subpattern: subpatterns.next().unwrap() } + } + ty::Adt(adt_def, args) => { + let variant_index = + MatchCheckCtxt::variant_index_for_adt(&pat.ctor(), *adt_def); + let variant = &adt_def.variant(variant_index); + let subpatterns = cx + .list_variant_nonhidden_fields(pat.ty(), variant) + .zip(subpatterns) + .map(|((field, _ty), pattern)| FieldPat { field, pattern }) + .collect(); + + if adt_def.is_enum() { + PatKind::Variant { adt_def: *adt_def, args, variant_index, subpatterns } + } else { + PatKind::Leaf { subpatterns } + } + } + // Note: given the expansion of `&str` patterns done in `expand_pattern`, we should + // be careful to reconstruct the correct constant pattern here. However a string + // literal pattern will never be reported as a non-exhaustiveness witness, so we + // ignore this issue. + ty::Ref(..) => PatKind::Deref { subpattern: subpatterns.next().unwrap() }, + _ => bug!("unexpected ctor for type {:?} {:?}", pat.ctor(), pat.ty()), + }, + Slice(slice) => { + match slice.kind { + SliceKind::FixedLen(_) => PatKind::Slice { + prefix: subpatterns.collect(), + slice: None, + suffix: Box::new([]), + }, + SliceKind::VarLen(prefix, _) => { + let mut subpatterns = subpatterns.peekable(); + let mut prefix: Vec<_> = subpatterns.by_ref().take(prefix).collect(); + if slice.array_len.is_some() { + // Improves diagnostics a bit: if the type is a known-size array, instead + // of reporting `[x, _, .., _, y]`, we prefer to report `[x, .., y]`. + // This is incorrect if the size is not known, since `[_, ..]` captures + // arrays of lengths `>= 1` whereas `[..]` captures any length. + while !prefix.is_empty() && is_wildcard(prefix.last().unwrap()) { + prefix.pop(); + } + while subpatterns.peek().is_some() + && is_wildcard(subpatterns.peek().unwrap()) + { + subpatterns.next(); + } + } + let suffix: Box<[_]> = subpatterns.collect(); + let wild = Pat::wildcard_from_ty(pat.ty()); + PatKind::Slice { + prefix: prefix.into_boxed_slice(), + slice: Some(Box::new(wild)), + suffix, + } + } + } + } + &Str(value) => PatKind::Constant { value }, + Wildcard | NonExhaustive | Hidden => PatKind::Wild, + Missing { .. } => bug!( + "trying to convert a `Missing` constructor into a `Pat`; this is probably a bug, + `Missing` should have been processed in `apply_constructors`" + ), + F32Range(..) | F64Range(..) | Opaque(..) | Or => { + bug!("can't convert to pattern: {:?}", pat) + } + }; + + Pat { ty: pat.ty(), span: DUMMY_SP, kind } + } + + /// Best-effort `Debug` implementation. + pub(crate) fn debug_pat( + f: &mut fmt::Formatter<'_>, + pat: &DeconstructedPat<'p, 'tcx>, + ) -> fmt::Result { + let mut first = true; + let mut start_or_continue = |s| { + if first { + first = false; + "" + } else { + s + } + }; + let mut start_or_comma = || start_or_continue(", "); + + match pat.ctor() { + Single | Variant(_) => match pat.ty().kind() { + ty::Adt(def, _) if def.is_box() => { + // Without `box_patterns`, the only legal pattern of type `Box` is `_` (outside + // of `std`). So this branch is only reachable when the feature is enabled and + // the pattern is a box pattern. + let subpattern = pat.iter_fields().next().unwrap(); + write!(f, "box {subpattern:?}") + } + ty::Adt(..) | ty::Tuple(..) => { + let variant = match pat.ty().kind() { + ty::Adt(adt, _) => Some( + adt.variant(MatchCheckCtxt::variant_index_for_adt(pat.ctor(), *adt)), + ), + ty::Tuple(_) => None, + _ => unreachable!(), + }; + + if let Some(variant) = variant { + write!(f, "{}", variant.name)?; + } + + // Without `cx`, we can't know which field corresponds to which, so we can't + // get the names of the fields. Instead we just display everything as a tuple + // struct, which should be good enough. + write!(f, "(")?; + for p in pat.iter_fields() { + write!(f, "{}", start_or_comma())?; + write!(f, "{p:?}")?; + } + write!(f, ")") + } + // Note: given the expansion of `&str` patterns done in `expand_pattern`, we should + // be careful to detect strings here. However a string literal pattern will never + // be reported as a non-exhaustiveness witness, so we can ignore this issue. + ty::Ref(_, _, mutbl) => { + let subpattern = pat.iter_fields().next().unwrap(); + write!(f, "&{}{:?}", mutbl.prefix_str(), subpattern) + } + _ => write!(f, "_"), + }, + Slice(slice) => { + let mut subpatterns = pat.iter_fields(); + write!(f, "[")?; + match slice.kind { + SliceKind::FixedLen(_) => { + for p in subpatterns { + write!(f, "{}{:?}", start_or_comma(), p)?; + } + } + SliceKind::VarLen(prefix_len, _) => { + for p in subpatterns.by_ref().take(prefix_len) { + write!(f, "{}{:?}", start_or_comma(), p)?; + } + write!(f, "{}", start_or_comma())?; + write!(f, "..")?; + for p in subpatterns { + write!(f, "{}{:?}", start_or_comma(), p)?; + } + } + } + write!(f, "]") + } + Bool(b) => write!(f, "{b}"), + // Best-effort, will render signed ranges incorrectly + IntRange(range) => write!(f, "{range:?}"), + F32Range(lo, hi, end) => write!(f, "{lo}{end}{hi}"), + F64Range(lo, hi, end) => write!(f, "{lo}{end}{hi}"), + Str(value) => write!(f, "{value}"), + Opaque(..) => write!(f, ""), + Or => { + for pat in pat.iter_fields() { + write!(f, "{}{:?}", start_or_continue(" | "), pat)?; + } + Ok(()) + } + Wildcard | Missing { .. } | NonExhaustive | Hidden => write!(f, "_ : {:?}", pat.ty()), + } + } +} + +/// Recursively expand this pattern into its subpatterns. Only useful for or-patterns. +fn expand_or_pat<'p, 'tcx>(pat: &'p Pat<'tcx>) -> Vec<&'p Pat<'tcx>> { + fn expand<'p, 'tcx>(pat: &'p Pat<'tcx>, vec: &mut Vec<&'p Pat<'tcx>>) { + if let PatKind::Or { pats } = &pat.kind { + for pat in pats.iter() { + expand(pat, vec); + } + } else { + vec.push(pat) + } + } + + let mut pats = Vec::new(); + expand(pat, &mut pats); + pats +} diff --git a/compiler/rustc_pattern_analysis/src/errors.rs b/compiler/rustc_pattern_analysis/src/errors.rs new file mode 100644 index 0000000000000..0efa8a0ec0845 --- /dev/null +++ b/compiler/rustc_pattern_analysis/src/errors.rs @@ -0,0 +1,95 @@ +use crate::{cx::MatchCheckCtxt, pat::WitnessPat}; + +use rustc_errors::{AddToDiagnostic, Diagnostic, SubdiagnosticMessage}; +use rustc_macros::{LintDiagnostic, Subdiagnostic}; +use rustc_middle::thir::Pat; +use rustc_middle::ty::Ty; +use rustc_span::Span; + +#[derive(Subdiagnostic)] +#[label(pattern_analysis_uncovered)] +pub struct Uncovered<'tcx> { + #[primary_span] + span: Span, + count: usize, + witness_1: Pat<'tcx>, + witness_2: Pat<'tcx>, + witness_3: Pat<'tcx>, + remainder: usize, +} + +impl<'tcx> Uncovered<'tcx> { + pub fn new<'p>( + span: Span, + cx: &MatchCheckCtxt<'p, 'tcx>, + witnesses: Vec>, + ) -> Self { + let witness_1 = cx.hoist_witness_pat(witnesses.get(0).unwrap()); + Self { + span, + count: witnesses.len(), + // Substitute dummy values if witnesses is smaller than 3. These will never be read. + witness_2: witnesses + .get(1) + .map(|w| cx.hoist_witness_pat(w)) + .unwrap_or_else(|| witness_1.clone()), + witness_3: witnesses + .get(2) + .map(|w| cx.hoist_witness_pat(w)) + .unwrap_or_else(|| witness_1.clone()), + witness_1, + remainder: witnesses.len().saturating_sub(3), + } + } +} + +#[derive(LintDiagnostic)] +#[diag(pattern_analysis_overlapping_range_endpoints)] +#[note] +pub struct OverlappingRangeEndpoints<'tcx> { + #[label] + pub range: Span, + #[subdiagnostic] + pub overlap: Vec>, +} + +pub struct Overlap<'tcx> { + pub span: Span, + pub range: Pat<'tcx>, +} + +impl<'tcx> AddToDiagnostic for Overlap<'tcx> { + fn add_to_diagnostic_with(self, diag: &mut Diagnostic, _: F) + where + F: Fn(&mut Diagnostic, SubdiagnosticMessage) -> SubdiagnosticMessage, + { + let Overlap { span, range } = self; + + // FIXME(mejrs) unfortunately `#[derive(LintDiagnostic)]` + // does not support `#[subdiagnostic(eager)]`... + let message = format!("this range overlaps on `{range}`..."); + diag.span_label(span, message); + } +} + +#[derive(LintDiagnostic)] +#[diag(pattern_analysis_non_exhaustive_omitted_pattern)] +#[help] +#[note] +pub(crate) struct NonExhaustiveOmittedPattern<'tcx> { + pub scrut_ty: Ty<'tcx>, + #[subdiagnostic] + pub uncovered: Uncovered<'tcx>, +} + +#[derive(LintDiagnostic)] +#[diag(pattern_analysis_non_exhaustive_omitted_pattern_lint_on_arm)] +#[help] +pub(crate) struct NonExhaustiveOmittedPatternLintOnArm { + #[label] + pub lint_span: Span, + #[suggestion(code = "#[{lint_level}({lint_name})]\n", applicability = "maybe-incorrect")] + pub suggest_lint_on_match: Option, + pub lint_level: &'static str, + pub lint_name: &'static str, +} diff --git a/compiler/rustc_pattern_analysis/src/lib.rs b/compiler/rustc_pattern_analysis/src/lib.rs new file mode 100644 index 0000000000000..07730aa49d3f7 --- /dev/null +++ b/compiler/rustc_pattern_analysis/src/lib.rs @@ -0,0 +1,56 @@ +//! Analysis of patterns, notably match exhaustiveness checking. + +pub mod constructor; +pub mod cx; +pub mod errors; +pub(crate) mod lints; +pub mod pat; +pub mod usefulness; + +#[macro_use] +extern crate tracing; +#[macro_use] +extern crate rustc_middle; + +rustc_fluent_macro::fluent_messages! { "../messages.ftl" } + +use lints::PatternColumn; +use rustc_hir::HirId; +use rustc_middle::ty::Ty; +use usefulness::{compute_match_usefulness, UsefulnessReport}; + +use crate::cx::MatchCheckCtxt; +use crate::lints::{lint_nonexhaustive_missing_variants, lint_overlapping_range_endpoints}; +use crate::pat::DeconstructedPat; + +/// The arm of a match expression. +#[derive(Clone, Copy, Debug)] +pub struct MatchArm<'p, 'tcx> { + /// The pattern must have been lowered through `check_match::MatchVisitor::lower_pattern`. + pub pat: &'p DeconstructedPat<'p, 'tcx>, + pub hir_id: HirId, + pub has_guard: bool, +} + +/// The entrypoint for this crate. Computes whether a match is exhaustive and which of its arms are +/// useful, and runs some lints. +pub fn analyze_match<'p, 'tcx>( + cx: &MatchCheckCtxt<'p, 'tcx>, + arms: &[MatchArm<'p, 'tcx>], + scrut_ty: Ty<'tcx>, +) -> UsefulnessReport<'p, 'tcx> { + let pat_column = PatternColumn::new(arms); + + let report = compute_match_usefulness(cx, arms, scrut_ty); + + // Lint on ranges that overlap on their endpoints, which is likely a mistake. + lint_overlapping_range_endpoints(cx, &pat_column); + + // Run the non_exhaustive_omitted_patterns lint. Only run on refutable patterns to avoid hitting + // `if let`s. Only run if the match is exhaustive otherwise the error is redundant. + if cx.refutable && report.non_exhaustiveness_witnesses.is_empty() { + lint_nonexhaustive_missing_variants(cx, arms, &pat_column, scrut_ty) + } + + report +} diff --git a/compiler/rustc_pattern_analysis/src/lints.rs b/compiler/rustc_pattern_analysis/src/lints.rs new file mode 100644 index 0000000000000..8ab559c9e7a41 --- /dev/null +++ b/compiler/rustc_pattern_analysis/src/lints.rs @@ -0,0 +1,290 @@ +use smallvec::SmallVec; + +use rustc_data_structures::captures::Captures; +use rustc_middle::ty::{self, Ty}; +use rustc_session::lint; +use rustc_session::lint::builtin::NON_EXHAUSTIVE_OMITTED_PATTERNS; +use rustc_span::Span; + +use crate::constructor::{Constructor, IntRange, MaybeInfiniteInt, SplitConstructorSet}; +use crate::cx::MatchCheckCtxt; +use crate::errors::{ + NonExhaustiveOmittedPattern, NonExhaustiveOmittedPatternLintOnArm, Overlap, + OverlappingRangeEndpoints, Uncovered, +}; +use crate::pat::{DeconstructedPat, WitnessPat}; +use crate::usefulness::PatCtxt; +use crate::MatchArm; + +/// A column of patterns in the matrix, where a column is the intuitive notion of "subpatterns that +/// inspect the same subvalue/place". +/// This is used to traverse patterns column-by-column for lints. Despite similarities with the +/// algorithm in [`crate::usefulness`], this does a different traversal. Notably this is linear in +/// the depth of patterns, whereas `compute_exhaustiveness_and_usefulness` is worst-case exponential +/// (exhaustiveness is NP-complete). The core difference is that we treat sub-columns separately. +/// +/// This must not contain an or-pattern. `specialize` takes care to expand them. +/// +/// This is not used in the main algorithm; only in lints. +#[derive(Debug)] +pub(crate) struct PatternColumn<'p, 'tcx> { + patterns: Vec<&'p DeconstructedPat<'p, 'tcx>>, +} + +impl<'p, 'tcx> PatternColumn<'p, 'tcx> { + pub(crate) fn new(arms: &[MatchArm<'p, 'tcx>]) -> Self { + let mut patterns = Vec::with_capacity(arms.len()); + for arm in arms { + if arm.pat.is_or_pat() { + patterns.extend(arm.pat.flatten_or_pat()) + } else { + patterns.push(arm.pat) + } + } + Self { patterns } + } + + fn is_empty(&self) -> bool { + self.patterns.is_empty() + } + fn head_ty(&self) -> Option> { + if self.patterns.len() == 0 { + return None; + } + // If the type is opaque and it is revealed anywhere in the column, we take the revealed + // version. Otherwise we could encounter constructors for the revealed type and crash. + let is_opaque = |ty: Ty<'tcx>| matches!(ty.kind(), ty::Alias(ty::Opaque, ..)); + let first_ty = self.patterns[0].ty(); + if is_opaque(first_ty) { + for pat in &self.patterns { + let ty = pat.ty(); + if !is_opaque(ty) { + return Some(ty); + } + } + } + Some(first_ty) + } + + /// Do constructor splitting on the constructors of the column. + fn analyze_ctors(&self, pcx: &PatCtxt<'_, 'p, 'tcx>) -> SplitConstructorSet<'tcx> { + let column_ctors = self.patterns.iter().map(|p| p.ctor()); + pcx.cx.ctors_for_ty(pcx.ty).split(pcx, column_ctors) + } + + fn iter<'a>(&'a self) -> impl Iterator> + Captures<'a> { + self.patterns.iter().copied() + } + + /// Does specialization: given a constructor, this takes the patterns from the column that match + /// the constructor, and outputs their fields. + /// This returns one column per field of the constructor. They usually all have the same length + /// (the number of patterns in `self` that matched `ctor`), except that we expand or-patterns + /// which may change the lengths. + fn specialize(&self, pcx: &PatCtxt<'_, 'p, 'tcx>, ctor: &Constructor<'tcx>) -> Vec { + let arity = ctor.arity(pcx); + if arity == 0 { + return Vec::new(); + } + + // We specialize the column by `ctor`. This gives us `arity`-many columns of patterns. These + // columns may have different lengths in the presence of or-patterns (this is why we can't + // reuse `Matrix`). + let mut specialized_columns: Vec<_> = + (0..arity).map(|_| Self { patterns: Vec::new() }).collect(); + let relevant_patterns = + self.patterns.iter().filter(|pat| ctor.is_covered_by(pcx, pat.ctor())); + for pat in relevant_patterns { + let specialized = pat.specialize(pcx, ctor); + for (subpat, column) in specialized.iter().zip(&mut specialized_columns) { + if subpat.is_or_pat() { + column.patterns.extend(subpat.flatten_or_pat()) + } else { + column.patterns.push(subpat) + } + } + } + + assert!( + !specialized_columns[0].is_empty(), + "ctor {ctor:?} was listed as present but isn't; + there is an inconsistency between `Constructor::is_covered_by` and `ConstructorSet::split`" + ); + specialized_columns + } +} + +/// Traverse the patterns to collect any variants of a non_exhaustive enum that fail to be mentioned +/// in a given column. +#[instrument(level = "debug", skip(cx), ret)] +fn collect_nonexhaustive_missing_variants<'p, 'tcx>( + cx: &MatchCheckCtxt<'p, 'tcx>, + column: &PatternColumn<'p, 'tcx>, +) -> Vec> { + let Some(ty) = column.head_ty() else { + return Vec::new(); + }; + let pcx = &PatCtxt::new_dummy(cx, ty); + + let set = column.analyze_ctors(pcx); + if set.present.is_empty() { + // We can't consistently handle the case where no constructors are present (since this would + // require digging deep through any type in case there's a non_exhaustive enum somewhere), + // so for consistency we refuse to handle the top-level case, where we could handle it. + return vec![]; + } + + let mut witnesses = Vec::new(); + if cx.is_foreign_non_exhaustive_enum(ty) { + witnesses.extend( + set.missing + .into_iter() + // This will list missing visible variants. + .filter(|c| !matches!(c, Constructor::Hidden | Constructor::NonExhaustive)) + .map(|missing_ctor| WitnessPat::wild_from_ctor(pcx, missing_ctor)), + ) + } + + // Recurse into the fields. + for ctor in set.present { + let specialized_columns = column.specialize(pcx, &ctor); + let wild_pat = WitnessPat::wild_from_ctor(pcx, ctor); + for (i, col_i) in specialized_columns.iter().enumerate() { + // Compute witnesses for each column. + let wits_for_col_i = collect_nonexhaustive_missing_variants(cx, col_i); + // For each witness, we build a new pattern in the shape of `ctor(_, _, wit, _, _)`, + // adding enough wildcards to match `arity`. + for wit in wits_for_col_i { + let mut pat = wild_pat.clone(); + pat.fields[i] = wit; + witnesses.push(pat); + } + } + } + witnesses +} + +pub(crate) fn lint_nonexhaustive_missing_variants<'p, 'tcx>( + cx: &MatchCheckCtxt<'p, 'tcx>, + arms: &[MatchArm<'p, 'tcx>], + pat_column: &PatternColumn<'p, 'tcx>, + scrut_ty: Ty<'tcx>, +) { + if !matches!( + cx.tcx.lint_level_at_node(NON_EXHAUSTIVE_OMITTED_PATTERNS, cx.match_lint_level).0, + rustc_session::lint::Level::Allow + ) { + let witnesses = collect_nonexhaustive_missing_variants(cx, pat_column); + if !witnesses.is_empty() { + // Report that a match of a `non_exhaustive` enum marked with `non_exhaustive_omitted_patterns` + // is not exhaustive enough. + // + // NB: The partner lint for structs lives in `compiler/rustc_hir_analysis/src/check/pat.rs`. + cx.tcx.emit_spanned_lint( + NON_EXHAUSTIVE_OMITTED_PATTERNS, + cx.match_lint_level, + cx.scrut_span, + NonExhaustiveOmittedPattern { + scrut_ty, + uncovered: Uncovered::new(cx.scrut_span, cx, witnesses), + }, + ); + } + } else { + // We used to allow putting the `#[allow(non_exhaustive_omitted_patterns)]` on a match + // arm. This no longer makes sense so we warn users, to avoid silently breaking their + // usage of the lint. + for arm in arms { + let (lint_level, lint_level_source) = + cx.tcx.lint_level_at_node(NON_EXHAUSTIVE_OMITTED_PATTERNS, arm.hir_id); + if !matches!(lint_level, rustc_session::lint::Level::Allow) { + let decorator = NonExhaustiveOmittedPatternLintOnArm { + lint_span: lint_level_source.span(), + suggest_lint_on_match: cx.whole_match_span.map(|span| span.shrink_to_lo()), + lint_level: lint_level.as_str(), + lint_name: "non_exhaustive_omitted_patterns", + }; + + use rustc_errors::DecorateLint; + let mut err = cx.tcx.sess.struct_span_warn(arm.pat.span(), ""); + err.set_primary_message(decorator.msg()); + decorator.decorate_lint(&mut err); + err.emit(); + } + } + } +} + +/// Traverse the patterns to warn the user about ranges that overlap on their endpoints. +#[instrument(level = "debug", skip(cx))] +pub(crate) fn lint_overlapping_range_endpoints<'p, 'tcx>( + cx: &MatchCheckCtxt<'p, 'tcx>, + column: &PatternColumn<'p, 'tcx>, +) { + let Some(ty) = column.head_ty() else { + return; + }; + let pcx = &PatCtxt::new_dummy(cx, ty); + + let set = column.analyze_ctors(pcx); + + if matches!(ty.kind(), ty::Char | ty::Int(_) | ty::Uint(_)) { + let emit_lint = |overlap: &IntRange, this_span: Span, overlapped_spans: &[Span]| { + let overlap_as_pat = cx.hoist_pat_range(overlap, ty); + let overlaps: Vec<_> = overlapped_spans + .iter() + .copied() + .map(|span| Overlap { range: overlap_as_pat.clone(), span }) + .collect(); + cx.tcx.emit_spanned_lint( + lint::builtin::OVERLAPPING_RANGE_ENDPOINTS, + cx.match_lint_level, + this_span, + OverlappingRangeEndpoints { overlap: overlaps, range: this_span }, + ); + }; + + // If two ranges overlapped, the split set will contain their intersection as a singleton. + let split_int_ranges = set.present.iter().filter_map(|c| c.as_int_range()); + for overlap_range in split_int_ranges.clone() { + if overlap_range.is_singleton() { + let overlap: MaybeInfiniteInt = overlap_range.lo; + // Ranges that look like `lo..=overlap`. + let mut prefixes: SmallVec<[_; 1]> = Default::default(); + // Ranges that look like `overlap..=hi`. + let mut suffixes: SmallVec<[_; 1]> = Default::default(); + // Iterate on patterns that contained `overlap`. + for pat in column.iter() { + let this_span = pat.span(); + let Constructor::IntRange(this_range) = pat.ctor() else { continue }; + if this_range.is_singleton() { + // Don't lint when one of the ranges is a singleton. + continue; + } + if this_range.lo == overlap { + // `this_range` looks like `overlap..=this_range.hi`; it overlaps with any + // ranges that look like `lo..=overlap`. + if !prefixes.is_empty() { + emit_lint(overlap_range, this_span, &prefixes); + } + suffixes.push(this_span) + } else if this_range.hi == overlap.plus_one() { + // `this_range` looks like `this_range.lo..=overlap`; it overlaps with any + // ranges that look like `overlap..=hi`. + if !suffixes.is_empty() { + emit_lint(overlap_range, this_span, &suffixes); + } + prefixes.push(this_span) + } + } + } + } + } else { + // Recurse into the fields. + for ctor in set.present { + for col in column.specialize(pcx, &ctor) { + lint_overlapping_range_endpoints(cx, &col); + } + } + } +} diff --git a/compiler/rustc_pattern_analysis/src/pat.rs b/compiler/rustc_pattern_analysis/src/pat.rs new file mode 100644 index 0000000000000..404651124ad34 --- /dev/null +++ b/compiler/rustc_pattern_analysis/src/pat.rs @@ -0,0 +1,205 @@ +//! As explained in [`crate::usefulness`], values and patterns are made from constructors applied to +//! fields. This file defines types that represent patterns in this way. +use std::cell::Cell; +use std::fmt; + +use smallvec::{smallvec, SmallVec}; + +use rustc_data_structures::captures::Captures; +use rustc_middle::ty::{self, Ty}; +use rustc_span::{Span, DUMMY_SP}; + +use self::Constructor::*; +use self::SliceKind::*; + +use crate::constructor::{Constructor, SliceKind}; +use crate::cx::MatchCheckCtxt; +use crate::usefulness::PatCtxt; + +/// Values and patterns can be represented as a constructor applied to some fields. This represents +/// a pattern in this form. +/// This also uses interior mutability to keep track of whether the pattern has been found reachable +/// during analysis. For this reason they cannot be cloned. +/// A `DeconstructedPat` will almost always come from user input; the only exception are some +/// `Wildcard`s introduced during specialization. +/// +/// Note that the number of fields may not match the fields declared in the original struct/variant. +/// This happens if a private or `non_exhaustive` field is uninhabited, because the code mustn't +/// observe that it is uninhabited. In that case that field is not included in `fields`. Care must +/// be taken when converting to/from `thir::Pat`. +pub struct DeconstructedPat<'p, 'tcx> { + ctor: Constructor<'tcx>, + fields: &'p [DeconstructedPat<'p, 'tcx>], + ty: Ty<'tcx>, + span: Span, + /// Whether removing this arm would change the behavior of the match expression. + useful: Cell, +} + +impl<'p, 'tcx> DeconstructedPat<'p, 'tcx> { + pub fn wildcard(ty: Ty<'tcx>, span: Span) -> Self { + Self::new(Wildcard, &[], ty, span) + } + + pub fn new( + ctor: Constructor<'tcx>, + fields: &'p [DeconstructedPat<'p, 'tcx>], + ty: Ty<'tcx>, + span: Span, + ) -> Self { + DeconstructedPat { ctor, fields, ty, span, useful: Cell::new(false) } + } + + pub(crate) fn is_or_pat(&self) -> bool { + matches!(self.ctor, Or) + } + /// Expand this (possibly-nested) or-pattern into its alternatives. + pub(crate) fn flatten_or_pat(&'p self) -> SmallVec<[&'p Self; 1]> { + if self.is_or_pat() { + self.iter_fields().flat_map(|p| p.flatten_or_pat()).collect() + } else { + smallvec![self] + } + } + + pub fn ctor(&self) -> &Constructor<'tcx> { + &self.ctor + } + pub fn ty(&self) -> Ty<'tcx> { + self.ty + } + pub fn span(&self) -> Span { + self.span + } + + pub fn iter_fields<'a>( + &'a self, + ) -> impl Iterator> + Captures<'a> { + self.fields.iter() + } + + /// Specialize this pattern with a constructor. + /// `other_ctor` can be different from `self.ctor`, but must be covered by it. + pub(crate) fn specialize<'a>( + &'a self, + pcx: &PatCtxt<'_, 'p, 'tcx>, + other_ctor: &Constructor<'tcx>, + ) -> SmallVec<[&'p DeconstructedPat<'p, 'tcx>; 2]> { + match (&self.ctor, other_ctor) { + (Wildcard, _) => { + // We return a wildcard for each field of `other_ctor`. + pcx.cx.ctor_wildcard_fields(other_ctor, pcx.ty).iter().collect() + } + (Slice(self_slice), Slice(other_slice)) + if self_slice.arity() != other_slice.arity() => + { + // The only tricky case: two slices of different arity. Since `self_slice` covers + // `other_slice`, `self_slice` must be `VarLen`, i.e. of the form + // `[prefix, .., suffix]`. Moreover `other_slice` is guaranteed to have a larger + // arity. So we fill the middle part with enough wildcards to reach the length of + // the new, larger slice. + match self_slice.kind { + FixedLen(_) => bug!("{:?} doesn't cover {:?}", self_slice, other_slice), + VarLen(prefix, suffix) => { + let (ty::Slice(inner_ty) | ty::Array(inner_ty, _)) = *self.ty.kind() else { + bug!("bad slice pattern {:?} {:?}", self.ctor, self.ty); + }; + let prefix = &self.fields[..prefix]; + let suffix = &self.fields[self_slice.arity() - suffix..]; + let wildcard: &_ = pcx + .cx + .pattern_arena + .alloc(DeconstructedPat::wildcard(inner_ty, DUMMY_SP)); + let extra_wildcards = other_slice.arity() - self_slice.arity(); + let extra_wildcards = (0..extra_wildcards).map(|_| wildcard); + prefix.iter().chain(extra_wildcards).chain(suffix).collect() + } + } + } + _ => self.fields.iter().collect(), + } + } + + /// We keep track for each pattern if it was ever useful during the analysis. This is used + /// with `redundant_spans` to report redundant subpatterns arising from or patterns. + pub(crate) fn set_useful(&self) { + self.useful.set(true) + } + pub(crate) fn is_useful(&self) -> bool { + if self.useful.get() { + true + } else if self.is_or_pat() && self.iter_fields().any(|f| f.is_useful()) { + // We always expand or patterns in the matrix, so we will never see the actual + // or-pattern (the one with constructor `Or`) in the column. As such, it will not be + // marked as useful itself, only its children will. We recover this information here. + self.set_useful(); + true + } else { + false + } + } + + /// Report the spans of subpatterns that were not useful, if any. + pub(crate) fn redundant_spans(&self) -> Vec { + let mut spans = Vec::new(); + self.collect_redundant_spans(&mut spans); + spans + } + fn collect_redundant_spans(&self, spans: &mut Vec) { + // We don't look at subpatterns if we already reported the whole pattern as redundant. + if !self.is_useful() { + spans.push(self.span); + } else { + for p in self.iter_fields() { + p.collect_redundant_spans(spans); + } + } + } +} + +/// This is mostly copied from the `Pat` impl. This is best effort and not good enough for a +/// `Display` impl. +impl<'p, 'tcx> fmt::Debug for DeconstructedPat<'p, 'tcx> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + MatchCheckCtxt::debug_pat(f, self) + } +} + +/// Same idea as `DeconstructedPat`, except this is a fictitious pattern built up for diagnostics +/// purposes. As such they don't use interning and can be cloned. +#[derive(Debug, Clone)] +pub struct WitnessPat<'tcx> { + ctor: Constructor<'tcx>, + pub(crate) fields: Vec>, + ty: Ty<'tcx>, +} + +impl<'tcx> WitnessPat<'tcx> { + pub(crate) fn new(ctor: Constructor<'tcx>, fields: Vec, ty: Ty<'tcx>) -> Self { + Self { ctor, fields, ty } + } + pub(crate) fn wildcard(ty: Ty<'tcx>) -> Self { + Self::new(Wildcard, Vec::new(), ty) + } + + /// Construct a pattern that matches everything that starts with this constructor. + /// For example, if `ctor` is a `Constructor::Variant` for `Option::Some`, we get the pattern + /// `Some(_)`. + pub(crate) fn wild_from_ctor(pcx: &PatCtxt<'_, '_, 'tcx>, ctor: Constructor<'tcx>) -> Self { + let field_tys = + pcx.cx.ctor_wildcard_fields(&ctor, pcx.ty).iter().map(|deco_pat| deco_pat.ty()); + let fields = field_tys.map(|ty| Self::wildcard(ty)).collect(); + Self::new(ctor, fields, pcx.ty) + } + + pub fn ctor(&self) -> &Constructor<'tcx> { + &self.ctor + } + pub fn ty(&self) -> Ty<'tcx> { + self.ty + } + + pub fn iter_fields<'a>(&'a self) -> impl Iterator> { + self.fields.iter() + } +} diff --git a/compiler/rustc_mir_build/src/thir/pattern/usefulness.rs b/compiler/rustc_pattern_analysis/src/usefulness.rs similarity index 75% rename from compiler/rustc_mir_build/src/thir/pattern/usefulness.rs rename to compiler/rustc_pattern_analysis/src/usefulness.rs index 637cc38be2c7c..f268a551547f3 100644 --- a/compiler/rustc_mir_build/src/thir/pattern/usefulness.rs +++ b/compiler/rustc_pattern_analysis/src/usefulness.rs @@ -97,8 +97,9 @@ //! - `matches!([v0], [p0, .., p1]) := false` (incompatible lengths) //! - `matches!([v0, v1, v2], [p0, .., p1]) := matches!(v0, p0) && matches!(v2, p1)` //! -//! Constructors, fields and relevant operations are defined in the [`super::deconstruct_pat`] -//! module. The question of whether a constructor is matched by another one is answered by +//! Constructors and relevant operations are defined in the [`crate::constructor`] module. A +//! representation of patterns that uses constructors is available in [`crate::pat`]. The question +//! of whether a constructor is matched by another one is answered by //! [`Constructor::is_covered_by`]. //! //! Note 1: variable bindings (like the `x` in `Some(x)`) match anything, so we treat them as wildcards. @@ -241,8 +242,8 @@ //! Therefore `usefulness(tp_1, tp_2, tq)` returns the single witness-tuple `[Variant2(Some(true), 0)]`. //! //! -//! Computing the set of constructors for a type is done in [`ConstructorSet::for_ty`]. See the -//! following sections for more accurate versions of the algorithm and corresponding links. +//! Computing the set of constructors for a type is done in [`MatchCheckCtxt::ctors_for_ty`]. See +//! the following sections for more accurate versions of the algorithm and corresponding links. //! //! //! @@ -295,7 +296,7 @@ //! the same reasoning, we only need to try two cases: `North`, and "everything else". //! //! We call _constructor splitting_ the operation that computes such a minimal set of cases to try. -//! This is done in [`ConstructorSet::split`] and explained in [`super::deconstruct_pat`]. +//! This is done in [`ConstructorSet::split`] and explained in [`crate::constructor`]. //! //! //! @@ -551,82 +552,33 @@ //! I (Nadrieril) prefer to put new tests in `ui/pattern/usefulness` unless there's a specific //! reason not to, for example if they crucially depend on a particular feature like `or_patterns`. -use self::ValidityConstraint::*; -use super::deconstruct_pat::{ - Constructor, ConstructorSet, DeconstructedPat, IntRange, MaybeInfiniteInt, SplitConstructorSet, - WitnessPat, -}; -use crate::errors::{ - NonExhaustiveOmittedPattern, NonExhaustiveOmittedPatternLintOnArm, Overlap, - OverlappingRangeEndpoints, Uncovered, -}; - -use rustc_data_structures::captures::Captures; - -use rustc_arena::TypedArena; -use rustc_data_structures::stack::ensure_sufficient_stack; -use rustc_hir::def_id::DefId; -use rustc_hir::HirId; -use rustc_middle::ty::{self, Ty, TyCtxt}; -use rustc_session::lint; -use rustc_session::lint::builtin::NON_EXHAUSTIVE_OMITTED_PATTERNS; -use rustc_span::{Span, DUMMY_SP}; - use smallvec::{smallvec, SmallVec}; use std::fmt; -pub(crate) struct MatchCheckCtxt<'p, 'tcx> { - pub(crate) tcx: TyCtxt<'tcx>, - /// The module in which the match occurs. This is necessary for - /// checking inhabited-ness of types because whether a type is (visibly) - /// inhabited can depend on whether it was defined in the current module or - /// not. E.g., `struct Foo { _private: ! }` cannot be seen to be empty - /// outside its module and should not be matchable with an empty match statement. - pub(crate) module: DefId, - pub(crate) param_env: ty::ParamEnv<'tcx>, - pub(crate) pattern_arena: &'p TypedArena>, - /// Lint level at the match. - pub(crate) match_lint_level: HirId, - /// The span of the whole match, if applicable. - pub(crate) whole_match_span: Option, - /// Span of the scrutinee. - pub(crate) scrut_span: Span, - /// Only produce `NON_EXHAUSTIVE_OMITTED_PATTERNS` lint on refutable patterns. - pub(crate) refutable: bool, - /// Whether the data at the scrutinee is known to be valid. This is false if the scrutinee comes - /// from a union field, a pointer deref, or a reference deref (pending opsem decisions). - pub(crate) known_valid_scrutinee: bool, -} +use rustc_data_structures::{captures::Captures, stack::ensure_sufficient_stack}; +use rustc_middle::ty::{self, Ty}; +use rustc_span::{Span, DUMMY_SP}; -impl<'a, 'tcx> MatchCheckCtxt<'a, 'tcx> { - pub(super) fn is_uninhabited(&self, ty: Ty<'tcx>) -> bool { - !ty.is_inhabited_from(self.tcx, self.module, self.param_env) - } +use crate::constructor::{Constructor, ConstructorSet}; +use crate::cx::MatchCheckCtxt; +use crate::pat::{DeconstructedPat, WitnessPat}; +use crate::MatchArm; - /// Returns whether the given type is an enum from another crate declared `#[non_exhaustive]`. - pub(super) fn is_foreign_non_exhaustive_enum(&self, ty: Ty<'tcx>) -> bool { - match ty.kind() { - ty::Adt(def, ..) => { - def.is_enum() && def.is_variant_list_non_exhaustive() && !def.did().is_local() - } - _ => false, - } - } -} +use self::ValidityConstraint::*; #[derive(Copy, Clone)] -pub(super) struct PatCtxt<'a, 'p, 'tcx> { - pub(super) cx: &'a MatchCheckCtxt<'p, 'tcx>, +pub(crate) struct PatCtxt<'a, 'p, 'tcx> { + pub(crate) cx: &'a MatchCheckCtxt<'p, 'tcx>, /// Type of the current column under investigation. - pub(super) ty: Ty<'tcx>, + pub(crate) ty: Ty<'tcx>, /// Whether the current pattern is the whole pattern as found in a match arm, or if it's a /// subpattern. - pub(super) is_top_level: bool, + pub(crate) is_top_level: bool, } impl<'a, 'p, 'tcx> PatCtxt<'a, 'p, 'tcx> { /// A `PatCtxt` when code other than `is_useful` needs one. - fn new_dummy(cx: &'a MatchCheckCtxt<'p, 'tcx>, ty: Ty<'tcx>) -> Self { + pub(crate) fn new_dummy(cx: &'a MatchCheckCtxt<'p, 'tcx>, ty: Ty<'tcx>) -> Self { PatCtxt { cx, ty, is_top_level: false } } } @@ -643,7 +595,7 @@ impl<'a, 'p, 'tcx> fmt::Debug for PatCtxt<'a, 'p, 'tcx> { /// - in the matrix, track whether a given place (aka column) is known to contain a valid value or /// not. #[derive(Debug, Copy, Clone, PartialEq, Eq)] -pub(super) enum ValidityConstraint { +enum ValidityConstraint { ValidOnly, MaybeInvalid, /// Option for backwards compatibility: the place is not known to be valid but we allow omitting @@ -652,7 +604,7 @@ pub(super) enum ValidityConstraint { } impl ValidityConstraint { - pub(super) fn from_bool(is_valid_only: bool) -> Self { + fn from_bool(is_valid_only: bool) -> Self { if is_valid_only { ValidOnly } else { MaybeInvalid } } @@ -664,10 +616,10 @@ impl ValidityConstraint { } } - pub(super) fn is_known_valid(self) -> bool { + fn is_known_valid(self) -> bool { matches!(self, ValidOnly) } - pub(super) fn allows_omitting_empty_arms(self) -> bool { + fn allows_omitting_empty_arms(self) -> bool { matches!(self, ValidOnly | MaybeInvalidButAllowOmittingArms) } @@ -677,11 +629,7 @@ impl ValidityConstraint { /// /// Pending further opsem decisions, the current behavior is: validity is preserved, except /// inside `&` and union fields where validity is reset to `MaybeInvalid`. - pub(super) fn specialize<'tcx>( - self, - pcx: &PatCtxt<'_, '_, 'tcx>, - ctor: &Constructor<'tcx>, - ) -> Self { + fn specialize<'tcx>(self, pcx: &PatCtxt<'_, '_, 'tcx>, ctor: &Constructor<'tcx>) -> Self { // We preserve validity except when we go inside a reference or a union field. if matches!(ctor, Constructor::Single) && (matches!(pcx.ty.kind(), ty::Ref(..)) @@ -1072,7 +1020,7 @@ impl<'p, 'tcx> fmt::Debug for Matrix<'p, 'tcx> { /// /// See the top of the file for more detailed explanations and examples. #[derive(Debug, Clone)] -pub(crate) struct WitnessStack<'tcx>(Vec>); +struct WitnessStack<'tcx>(Vec>); impl<'tcx> WitnessStack<'tcx> { /// Asserts that the witness contains a single pattern, and returns it. @@ -1119,7 +1067,7 @@ impl<'tcx> WitnessStack<'tcx> { /// Just as the `Matrix` starts with a single column, by the end of the algorithm, this has a single /// column, which contains the patterns that are missing for the match to be exhaustive. #[derive(Debug, Clone)] -pub struct WitnessMatrix<'tcx>(Vec>); +struct WitnessMatrix<'tcx>(Vec>); impl<'tcx> WitnessMatrix<'tcx> { /// New matrix with no witnesses. @@ -1246,7 +1194,9 @@ fn compute_exhaustiveness_and_usefulness<'p, 'tcx>( // Analyze the constructors present in this column. let ctors = matrix.heads().map(|p| p.ctor()); - let split_set = ConstructorSet::for_ty(cx, ty).split(pcx, ctors); + let ctors_for_ty = &cx.ctors_for_ty(ty); + let is_integers = matches!(ctors_for_ty, ConstructorSet::Integers { .. }); // For diagnostics. + let split_set = ctors_for_ty.split(pcx, ctors); let all_missing = split_set.present.is_empty(); // Build the set of constructors we will specialize with. It must cover the whole type. @@ -1261,7 +1211,7 @@ fn compute_exhaustiveness_and_usefulness<'p, 'tcx>( } // Decide what constructors to report. - let always_report_all = is_top_level && !IntRange::is_integral(pcx.ty); + let always_report_all = is_top_level && !is_integers; // Whether we should report "Enum::A and Enum::C are missing" or "_ is missing". let report_individual_missing_ctors = always_report_all || !all_missing; // Which constructors are considered missing. We ensure that `!missing_ctors.is_empty() => @@ -1318,233 +1268,9 @@ fn compute_exhaustiveness_and_usefulness<'p, 'tcx>( ret } -/// A column of patterns in the matrix, where a column is the intuitive notion of "subpatterns that -/// inspect the same subvalue/place". -/// This is used to traverse patterns column-by-column for lints. Despite similarities with -/// [`compute_exhaustiveness_and_usefulness`], this does a different traversal. Notably this is -/// linear in the depth of patterns, whereas `compute_exhaustiveness_and_usefulness` is worst-case -/// exponential (exhaustiveness is NP-complete). The core difference is that we treat sub-columns -/// separately. -/// -/// This must not contain an or-pattern. `specialize` takes care to expand them. -/// -/// This is not used in the main algorithm; only in lints. -#[derive(Debug)] -struct PatternColumn<'p, 'tcx> { - patterns: Vec<&'p DeconstructedPat<'p, 'tcx>>, -} - -impl<'p, 'tcx> PatternColumn<'p, 'tcx> { - fn new(patterns: Vec<&'p DeconstructedPat<'p, 'tcx>>) -> Self { - Self { patterns } - } - - fn is_empty(&self) -> bool { - self.patterns.is_empty() - } - fn head_ty(&self) -> Option> { - if self.patterns.len() == 0 { - return None; - } - // If the type is opaque and it is revealed anywhere in the column, we take the revealed - // version. Otherwise we could encounter constructors for the revealed type and crash. - let is_opaque = |ty: Ty<'tcx>| matches!(ty.kind(), ty::Alias(ty::Opaque, ..)); - let first_ty = self.patterns[0].ty(); - if is_opaque(first_ty) { - for pat in &self.patterns { - let ty = pat.ty(); - if !is_opaque(ty) { - return Some(ty); - } - } - } - Some(first_ty) - } - - /// Do constructor splitting on the constructors of the column. - fn analyze_ctors(&self, pcx: &PatCtxt<'_, 'p, 'tcx>) -> SplitConstructorSet<'tcx> { - let column_ctors = self.patterns.iter().map(|p| p.ctor()); - ConstructorSet::for_ty(pcx.cx, pcx.ty).split(pcx, column_ctors) - } - - fn iter<'a>(&'a self) -> impl Iterator> + Captures<'a> { - self.patterns.iter().copied() - } - - /// Does specialization: given a constructor, this takes the patterns from the column that match - /// the constructor, and outputs their fields. - /// This returns one column per field of the constructor. They usually all have the same length - /// (the number of patterns in `self` that matched `ctor`), except that we expand or-patterns - /// which may change the lengths. - fn specialize(&self, pcx: &PatCtxt<'_, 'p, 'tcx>, ctor: &Constructor<'tcx>) -> Vec { - let arity = ctor.arity(pcx); - if arity == 0 { - return Vec::new(); - } - - // We specialize the column by `ctor`. This gives us `arity`-many columns of patterns. These - // columns may have different lengths in the presence of or-patterns (this is why we can't - // reuse `Matrix`). - let mut specialized_columns: Vec<_> = - (0..arity).map(|_| Self { patterns: Vec::new() }).collect(); - let relevant_patterns = - self.patterns.iter().filter(|pat| ctor.is_covered_by(pcx, pat.ctor())); - for pat in relevant_patterns { - let specialized = pat.specialize(pcx, ctor); - for (subpat, column) in specialized.iter().zip(&mut specialized_columns) { - if subpat.is_or_pat() { - column.patterns.extend(subpat.flatten_or_pat()) - } else { - column.patterns.push(subpat) - } - } - } - - assert!( - !specialized_columns[0].is_empty(), - "ctor {ctor:?} was listed as present but isn't; - there is an inconsistency between `Constructor::is_covered_by` and `ConstructorSet::split`" - ); - specialized_columns - } -} - -/// Traverse the patterns to collect any variants of a non_exhaustive enum that fail to be mentioned -/// in a given column. -#[instrument(level = "debug", skip(cx), ret)] -fn collect_nonexhaustive_missing_variants<'p, 'tcx>( - cx: &MatchCheckCtxt<'p, 'tcx>, - column: &PatternColumn<'p, 'tcx>, -) -> Vec> { - let Some(ty) = column.head_ty() else { - return Vec::new(); - }; - let pcx = &PatCtxt::new_dummy(cx, ty); - - let set = column.analyze_ctors(pcx); - if set.present.is_empty() { - // We can't consistently handle the case where no constructors are present (since this would - // require digging deep through any type in case there's a non_exhaustive enum somewhere), - // so for consistency we refuse to handle the top-level case, where we could handle it. - return vec![]; - } - - let mut witnesses = Vec::new(); - if cx.is_foreign_non_exhaustive_enum(ty) { - witnesses.extend( - set.missing - .into_iter() - // This will list missing visible variants. - .filter(|c| !matches!(c, Constructor::Hidden | Constructor::NonExhaustive)) - .map(|missing_ctor| WitnessPat::wild_from_ctor(pcx, missing_ctor)), - ) - } - - // Recurse into the fields. - for ctor in set.present { - let specialized_columns = column.specialize(pcx, &ctor); - let wild_pat = WitnessPat::wild_from_ctor(pcx, ctor); - for (i, col_i) in specialized_columns.iter().enumerate() { - // Compute witnesses for each column. - let wits_for_col_i = collect_nonexhaustive_missing_variants(cx, col_i); - // For each witness, we build a new pattern in the shape of `ctor(_, _, wit, _, _)`, - // adding enough wildcards to match `arity`. - for wit in wits_for_col_i { - let mut pat = wild_pat.clone(); - pat.fields[i] = wit; - witnesses.push(pat); - } - } - } - witnesses -} - -/// Traverse the patterns to warn the user about ranges that overlap on their endpoints. -#[instrument(level = "debug", skip(cx))] -fn lint_overlapping_range_endpoints<'p, 'tcx>( - cx: &MatchCheckCtxt<'p, 'tcx>, - column: &PatternColumn<'p, 'tcx>, -) { - let Some(ty) = column.head_ty() else { - return; - }; - let pcx = &PatCtxt::new_dummy(cx, ty); - - let set = column.analyze_ctors(pcx); - - if IntRange::is_integral(ty) { - let emit_lint = |overlap: &IntRange, this_span: Span, overlapped_spans: &[Span]| { - let overlap_as_pat = overlap.to_diagnostic_pat(ty, cx.tcx); - let overlaps: Vec<_> = overlapped_spans - .iter() - .copied() - .map(|span| Overlap { range: overlap_as_pat.clone(), span }) - .collect(); - cx.tcx.emit_spanned_lint( - lint::builtin::OVERLAPPING_RANGE_ENDPOINTS, - cx.match_lint_level, - this_span, - OverlappingRangeEndpoints { overlap: overlaps, range: this_span }, - ); - }; - - // If two ranges overlapped, the split set will contain their intersection as a singleton. - let split_int_ranges = set.present.iter().filter_map(|c| c.as_int_range()); - for overlap_range in split_int_ranges.clone() { - if overlap_range.is_singleton() { - let overlap: MaybeInfiniteInt = overlap_range.lo; - // Ranges that look like `lo..=overlap`. - let mut prefixes: SmallVec<[_; 1]> = Default::default(); - // Ranges that look like `overlap..=hi`. - let mut suffixes: SmallVec<[_; 1]> = Default::default(); - // Iterate on patterns that contained `overlap`. - for pat in column.iter() { - let this_span = pat.span(); - let Constructor::IntRange(this_range) = pat.ctor() else { continue }; - if this_range.is_singleton() { - // Don't lint when one of the ranges is a singleton. - continue; - } - if this_range.lo == overlap { - // `this_range` looks like `overlap..=this_range.hi`; it overlaps with any - // ranges that look like `lo..=overlap`. - if !prefixes.is_empty() { - emit_lint(overlap_range, this_span, &prefixes); - } - suffixes.push(this_span) - } else if this_range.hi == overlap.plus_one() { - // `this_range` looks like `this_range.lo..=overlap`; it overlaps with any - // ranges that look like `overlap..=hi`. - if !suffixes.is_empty() { - emit_lint(overlap_range, this_span, &suffixes); - } - prefixes.push(this_span) - } - } - } - } - } else { - // Recurse into the fields. - for ctor in set.present { - for col in column.specialize(pcx, &ctor) { - lint_overlapping_range_endpoints(cx, &col); - } - } - } -} - -/// The arm of a match expression. -#[derive(Clone, Copy, Debug)] -pub(crate) struct MatchArm<'p, 'tcx> { - /// The pattern must have been lowered through `check_match::MatchVisitor::lower_pattern`. - pub(crate) pat: &'p DeconstructedPat<'p, 'tcx>, - pub(crate) hir_id: HirId, - pub(crate) has_guard: bool, -} - /// Indicates whether or not a given arm is useful. #[derive(Clone, Debug)] -pub(crate) enum Usefulness { +pub enum Usefulness { /// The arm is useful. This additionally carries a set of or-pattern branches that have been /// found to be redundant despite the overall arm being useful. Used only in the presence of /// or-patterns, otherwise it stays empty. @@ -1555,16 +1281,15 @@ pub(crate) enum Usefulness { } /// The output of checking a match for exhaustiveness and arm usefulness. -pub(crate) struct UsefulnessReport<'p, 'tcx> { +pub struct UsefulnessReport<'p, 'tcx> { /// For each arm of the input, whether that arm is useful after the arms above it. - pub(crate) arm_usefulness: Vec<(MatchArm<'p, 'tcx>, Usefulness)>, + pub arm_usefulness: Vec<(MatchArm<'p, 'tcx>, Usefulness)>, /// If the match is exhaustive, this is empty. If not, this contains witnesses for the lack of /// exhaustiveness. - pub(crate) non_exhaustiveness_witnesses: Vec>, + pub non_exhaustiveness_witnesses: Vec>, } -/// The entrypoint for this file. Computes whether a match is exhaustive and which of its arms are -/// useful. +/// Computes whether a match is exhaustive and which of its arms are useful. #[instrument(skip(cx, arms), level = "debug")] pub(crate) fn compute_match_usefulness<'p, 'tcx>( cx: &MatchCheckCtxt<'p, 'tcx>, @@ -1590,59 +1315,5 @@ pub(crate) fn compute_match_usefulness<'p, 'tcx>( (arm, usefulness) }) .collect(); - let report = UsefulnessReport { arm_usefulness, non_exhaustiveness_witnesses }; - - let pat_column = PatternColumn::new(matrix.heads().collect()); - // Lint on ranges that overlap on their endpoints, which is likely a mistake. - lint_overlapping_range_endpoints(cx, &pat_column); - - // Run the non_exhaustive_omitted_patterns lint. Only run on refutable patterns to avoid hitting - // `if let`s. Only run if the match is exhaustive otherwise the error is redundant. - if cx.refutable && report.non_exhaustiveness_witnesses.is_empty() { - if !matches!( - cx.tcx.lint_level_at_node(NON_EXHAUSTIVE_OMITTED_PATTERNS, cx.match_lint_level).0, - rustc_session::lint::Level::Allow - ) { - let witnesses = collect_nonexhaustive_missing_variants(cx, &pat_column); - if !witnesses.is_empty() { - // Report that a match of a `non_exhaustive` enum marked with `non_exhaustive_omitted_patterns` - // is not exhaustive enough. - // - // NB: The partner lint for structs lives in `compiler/rustc_hir_analysis/src/check/pat.rs`. - cx.tcx.emit_spanned_lint( - NON_EXHAUSTIVE_OMITTED_PATTERNS, - cx.match_lint_level, - cx.scrut_span, - NonExhaustiveOmittedPattern { - scrut_ty, - uncovered: Uncovered::new(cx.scrut_span, cx, witnesses), - }, - ); - } - } else { - // We used to allow putting the `#[allow(non_exhaustive_omitted_patterns)]` on a match - // arm. This no longer makes sense so we warn users, to avoid silently breaking their - // usage of the lint. - for arm in arms { - let (lint_level, lint_level_source) = - cx.tcx.lint_level_at_node(NON_EXHAUSTIVE_OMITTED_PATTERNS, arm.hir_id); - if !matches!(lint_level, rustc_session::lint::Level::Allow) { - let decorator = NonExhaustiveOmittedPatternLintOnArm { - lint_span: lint_level_source.span(), - suggest_lint_on_match: cx.whole_match_span.map(|span| span.shrink_to_lo()), - lint_level: lint_level.as_str(), - lint_name: "non_exhaustive_omitted_patterns", - }; - - use rustc_errors::DecorateLint; - let mut err = cx.tcx.sess.struct_span_warn(arm.pat.span(), ""); - err.set_primary_message(decorator.msg()); - decorator.decorate_lint(&mut err); - err.emit(); - } - } - } - } - - report + UsefulnessReport { arm_usefulness, non_exhaustiveness_witnesses } } diff --git a/compiler/rustc_resolve/src/build_reduced_graph.rs b/compiler/rustc_resolve/src/build_reduced_graph.rs index 65901eedb2148..213b320ed1a66 100644 --- a/compiler/rustc_resolve/src/build_reduced_graph.rs +++ b/compiler/rustc_resolve/src/build_reduced_graph.rs @@ -210,6 +210,11 @@ impl<'a, 'tcx> AsMut> for BuildReducedGraphVisitor<'a, '_, 't } impl<'a, 'b, 'tcx> BuildReducedGraphVisitor<'a, 'b, 'tcx> { + fn res(&self, def_id: impl Into) -> Res { + let def_id = def_id.into(); + Res::Def(self.r.tcx.def_kind(def_id), def_id) + } + fn resolve_visibility(&mut self, vis: &ast::Visibility) -> ty::Visibility { self.try_resolve_visibility(vis, true).unwrap_or_else(|err| { self.r.report_vis_error(err); @@ -628,6 +633,8 @@ impl<'a, 'b, 'tcx> BuildReducedGraphVisitor<'a, 'b, 'tcx> { let vis = self.resolve_visibility(&item.vis); let local_def_id = self.r.local_def_id(item.id); let def_id = local_def_id.to_def_id(); + let def_kind = self.r.tcx.def_kind(def_id); + let res = Res::Def(def_kind, def_id); self.r.visibilities.insert(local_def_id, vis); @@ -659,7 +666,7 @@ impl<'a, 'b, 'tcx> BuildReducedGraphVisitor<'a, 'b, 'tcx> { ItemKind::Mod(..) => { let module = self.r.new_module( Some(parent), - ModuleKind::Def(DefKind::Mod, def_id, ident.name), + ModuleKind::Def(def_kind, def_id, ident.name), expansion.to_expn_id(), item.span, parent.no_implicit_prelude @@ -672,16 +679,13 @@ impl<'a, 'b, 'tcx> BuildReducedGraphVisitor<'a, 'b, 'tcx> { } // These items live in the value namespace. - ItemKind::Static(box ast::StaticItem { mutability, .. }) => { - let res = Res::Def(DefKind::Static(mutability), def_id); + ItemKind::Static(..) => { self.r.define(parent, ident, ValueNS, (res, vis, sp, expansion)); } ItemKind::Const(..) => { - let res = Res::Def(DefKind::Const, def_id); self.r.define(parent, ident, ValueNS, (res, vis, sp, expansion)); } ItemKind::Fn(..) => { - let res = Res::Def(DefKind::Fn, def_id); self.r.define(parent, ident, ValueNS, (res, vis, sp, expansion)); // Functions introducing procedural macros reserve a slot @@ -691,14 +695,13 @@ impl<'a, 'b, 'tcx> BuildReducedGraphVisitor<'a, 'b, 'tcx> { // These items live in the type namespace. ItemKind::TyAlias(..) => { - let res = Res::Def(DefKind::TyAlias, def_id); self.r.define(parent, ident, TypeNS, (res, vis, sp, expansion)); } ItemKind::Enum(_, _) => { let module = self.r.new_module( Some(parent), - ModuleKind::Def(DefKind::Enum, def_id, ident.name), + ModuleKind::Def(def_kind, def_id, ident.name), expansion.to_expn_id(), item.span, parent.no_implicit_prelude, @@ -708,14 +711,12 @@ impl<'a, 'b, 'tcx> BuildReducedGraphVisitor<'a, 'b, 'tcx> { } ItemKind::TraitAlias(..) => { - let res = Res::Def(DefKind::TraitAlias, def_id); self.r.define(parent, ident, TypeNS, (res, vis, sp, expansion)); } // These items live in both the type and value namespaces. ItemKind::Struct(ref vdata, _) => { // Define a name in the type namespace. - let res = Res::Def(DefKind::Struct, def_id); self.r.define(parent, ident, TypeNS, (res, vis, sp, expansion)); // Record field names for error reporting. @@ -724,7 +725,7 @@ impl<'a, 'b, 'tcx> BuildReducedGraphVisitor<'a, 'b, 'tcx> { // If this is a tuple or unit struct, define a name // in the value namespace as well. - if let Some((ctor_kind, ctor_node_id)) = CtorKind::from_ast(vdata) { + if let Some(ctor_node_id) = vdata.ctor_node_id() { // If the structure is marked as non_exhaustive then lower the visibility // to within the crate. let mut ctor_vis = if vis.is_public() @@ -750,8 +751,7 @@ impl<'a, 'b, 'tcx> BuildReducedGraphVisitor<'a, 'b, 'tcx> { ret_fields.push(field_vis.to_def_id()); } let ctor_def_id = self.r.local_def_id(ctor_node_id); - let ctor_res = - Res::Def(DefKind::Ctor(CtorOf::Struct, ctor_kind), ctor_def_id.to_def_id()); + let ctor_res = self.res(ctor_def_id); self.r.define(parent, ident, ValueNS, (ctor_res, ctor_vis, sp, expansion)); self.r.visibilities.insert(ctor_def_id, ctor_vis); // We need the field visibility spans also for the constructor for E0603. @@ -764,7 +764,6 @@ impl<'a, 'b, 'tcx> BuildReducedGraphVisitor<'a, 'b, 'tcx> { } ItemKind::Union(ref vdata, _) => { - let res = Res::Def(DefKind::Union, def_id); self.r.define(parent, ident, TypeNS, (res, vis, sp, expansion)); // Record field names for error reporting. @@ -776,7 +775,7 @@ impl<'a, 'b, 'tcx> BuildReducedGraphVisitor<'a, 'b, 'tcx> { // Add all the items within to a new module. let module = self.r.new_module( Some(parent), - ModuleKind::Def(DefKind::Trait, def_id, ident.name), + ModuleKind::Def(def_kind, def_id, ident.name), expansion.to_expn_id(), item.span, parent.no_implicit_prelude, @@ -888,17 +887,16 @@ impl<'a, 'b, 'tcx> BuildReducedGraphVisitor<'a, 'b, 'tcx> { fn build_reduced_graph_for_foreign_item(&mut self, item: &ForeignItem) { let local_def_id = self.r.local_def_id(item.id); let def_id = local_def_id.to_def_id(); - let (def_kind, ns) = match item.kind { - ForeignItemKind::Fn(..) => (DefKind::Fn, ValueNS), - ForeignItemKind::Static(_, mt, _) => (DefKind::Static(mt), ValueNS), - ForeignItemKind::TyAlias(..) => (DefKind::ForeignTy, TypeNS), - ForeignItemKind::MacCall(_) => unreachable!(), + let ns = match item.kind { + ForeignItemKind::Fn(..) => ValueNS, + ForeignItemKind::Static(..) => ValueNS, + ForeignItemKind::TyAlias(..) => TypeNS, + ForeignItemKind::MacCall(..) => unreachable!(), }; let parent = self.parent_scope.module; let expansion = self.parent_scope.expansion; let vis = self.resolve_visibility(&item.vis); - let res = Res::Def(def_kind, def_id); - self.r.define(parent, item.ident, ns, (res, vis, item.span, expansion)); + self.r.define(parent, item.ident, ns, (self.res(def_id), vis, item.span, expansion)); self.r.visibilities.insert(local_def_id, vis); } @@ -1180,24 +1178,21 @@ impl<'a, 'b, 'tcx> BuildReducedGraphVisitor<'a, 'b, 'tcx> { let parent_scope = self.parent_scope; let expansion = parent_scope.expansion; let def_id = self.r.local_def_id(item.id); - let (macro_kind, ident, span, macro_rules) = match &item.kind { - ItemKind::MacroDef(def) => { - let macro_kind = self.r.macro_map[&def_id.to_def_id()].ext.macro_kind(); - (macro_kind, item.ident, item.span, def.macro_rules) - } + let (res, ident, span, macro_rules) = match &item.kind { + ItemKind::MacroDef(def) => (self.res(def_id), item.ident, item.span, def.macro_rules), ItemKind::Fn(..) => match self.proc_macro_stub(item) { Some((macro_kind, ident, span)) => { + let res = Res::Def(DefKind::Macro(macro_kind), def_id.to_def_id()); let macro_data = MacroData::new(self.r.dummy_ext(macro_kind)); self.r.macro_map.insert(def_id.to_def_id(), macro_data); self.r.proc_macro_stubs.insert(def_id); - (macro_kind, ident, span, false) + (res, ident, span, false) } None => return parent_scope.macro_rules, }, _ => unreachable!(), }; - let res = Res::Def(DefKind::Macro(macro_kind), def_id.to_def_id()); self.r.local_macro_def_scopes.insert(def_id, parent_scope.module); if macro_rules { @@ -1363,22 +1358,21 @@ impl<'a, 'b, 'tcx> Visitor<'b> for BuildReducedGraphVisitor<'a, 'b, 'tcx> { } if ctxt == AssocCtxt::Trait { - let (def_kind, ns) = match item.kind { - AssocItemKind::Const(..) => (DefKind::AssocConst, ValueNS), + let ns = match item.kind { + AssocItemKind::Const(..) => ValueNS, AssocItemKind::Fn(box Fn { ref sig, .. }) => { if sig.decl.has_self() { self.r.has_self.insert(local_def_id); } - (DefKind::AssocFn, ValueNS) + ValueNS } - AssocItemKind::Type(..) => (DefKind::AssocTy, TypeNS), + AssocItemKind::Type(..) => TypeNS, AssocItemKind::MacCall(_) => bug!(), // handled above }; let parent = self.parent_scope.module; let expansion = self.parent_scope.expansion; - let res = Res::Def(def_kind, def_id); - self.r.define(parent, item.ident, ns, (res, vis, item.span, expansion)); + self.r.define(parent, item.ident, ns, (self.res(def_id), vis, item.span, expansion)); } visit::walk_assoc_item(self, item, ctxt); @@ -1457,9 +1451,8 @@ impl<'a, 'b, 'tcx> Visitor<'b> for BuildReducedGraphVisitor<'a, 'b, 'tcx> { // Define a name in the type namespace. let def_id = self.r.local_def_id(variant.id); - let res = Res::Def(DefKind::Variant, def_id.to_def_id()); let vis = self.resolve_visibility(&variant.vis); - self.r.define(parent, ident, TypeNS, (res, vis, variant.span, expn_id)); + self.r.define(parent, ident, TypeNS, (self.res(def_id), vis, variant.span, expn_id)); self.r.visibilities.insert(def_id, vis); // If the variant is marked as non_exhaustive then lower the visibility to within the crate. @@ -1471,10 +1464,9 @@ impl<'a, 'b, 'tcx> Visitor<'b> for BuildReducedGraphVisitor<'a, 'b, 'tcx> { }; // Define a constructor name in the value namespace. - if let Some((ctor_kind, ctor_node_id)) = CtorKind::from_ast(&variant.data) { + if let Some(ctor_node_id) = variant.data.ctor_node_id() { let ctor_def_id = self.r.local_def_id(ctor_node_id); - let ctor_res = - Res::Def(DefKind::Ctor(CtorOf::Variant, ctor_kind), ctor_def_id.to_def_id()); + let ctor_res = self.res(ctor_def_id); self.r.define(parent, ident, ValueNS, (ctor_res, ctor_vis, variant.span, expn_id)); self.r.visibilities.insert(ctor_def_id, ctor_vis); } diff --git a/src/bootstrap/src/bin/main.rs b/src/bootstrap/src/bin/main.rs index b3c427721ea3b..32f6a3f80d064 100644 --- a/src/bootstrap/src/bin/main.rs +++ b/src/bootstrap/src/bin/main.rs @@ -10,8 +10,9 @@ use std::io::Write; #[cfg(all(any(unix, windows), not(target_os = "solaris")))] use std::process; use std::{ - env, fs, - io::{self, IsTerminal}, + env, + fs::{self, OpenOptions}, + io::{self, BufRead, BufReader, IsTerminal}, }; use bootstrap::{ @@ -79,6 +80,9 @@ fn main() { } let pre_commit = config.src.join(".git").join("hooks").join("pre-commit"); + let dump_bootstrap_shims = config.dump_bootstrap_shims; + let out_dir = config.out.clone(); + Build::new(config).build(); if suggest_setup { @@ -107,6 +111,29 @@ fn main() { if suggest_setup || changelog_suggestion.is_some() { println!("NOTE: this message was printed twice to make it more likely to be seen"); } + + if dump_bootstrap_shims { + let dump_dir = out_dir.join("bootstrap-shims-dump"); + assert!(dump_dir.exists()); + + for entry in walkdir::WalkDir::new(&dump_dir) { + let entry = t!(entry); + + if !entry.file_type().is_file() { + continue; + } + + let file = t!(fs::File::open(&entry.path())); + + // To ensure deterministic results we must sort the dump lines. + // This is necessary because the order of rustc invocations different + // almost all the time. + let mut lines: Vec = t!(BufReader::new(&file).lines().collect()); + lines.sort_by_key(|t| t.to_lowercase()); + let mut file = t!(OpenOptions::new().write(true).truncate(true).open(&entry.path())); + t!(file.write_all(lines.join("\n").as_bytes())); + } + } } fn check_version(config: &Config) -> Option { diff --git a/src/bootstrap/src/bin/rustc.rs b/src/bootstrap/src/bin/rustc.rs index af66cb3ffd7c2..18136635c42a8 100644 --- a/src/bootstrap/src/bin/rustc.rs +++ b/src/bootstrap/src/bin/rustc.rs @@ -32,9 +32,7 @@ fn main() { let args = env::args_os().skip(1).collect::>(); let arg = |name| args.windows(2).find(|args| args[0] == name).and_then(|args| args[1].to_str()); - // We don't use the stage in this shim, but let's parse it to make sure that we're invoked - // by bootstrap, or that we provide a helpful error message if not. - bin_helpers::parse_rustc_stage(); + let stage = bin_helpers::parse_rustc_stage(); let verbose = bin_helpers::parse_rustc_verbose(); // Detect whether or not we're a build script depending on whether --target @@ -214,6 +212,8 @@ fn main() { } } + bin_helpers::maybe_dump(format!("stage{stage}-rustc"), &cmd); + let start = Instant::now(); let (child, status) = { let errmsg = format!("\nFailed to run:\n{cmd:?}\n-------------"); diff --git a/src/bootstrap/src/bin/rustdoc.rs b/src/bootstrap/src/bin/rustdoc.rs index 90bd822699c1f..b4d1415189cfa 100644 --- a/src/bootstrap/src/bin/rustdoc.rs +++ b/src/bootstrap/src/bin/rustdoc.rs @@ -62,6 +62,8 @@ fn main() { cmd.arg("-Zunstable-options"); cmd.arg("--check-cfg=cfg(bootstrap)"); + bin_helpers::maybe_dump(format!("stage{stage}-rustdoc"), &cmd); + if verbose > 1 { eprintln!( "rustdoc command: {:?}={:?} {:?}", diff --git a/src/bootstrap/src/core/build_steps/clean.rs b/src/bootstrap/src/core/build_steps/clean.rs index a3fb330ddfb6f..6372db96afb37 100644 --- a/src/bootstrap/src/core/build_steps/clean.rs +++ b/src/bootstrap/src/core/build_steps/clean.rs @@ -146,6 +146,7 @@ fn clean_default(build: &Build) { rm_rf(&build.out.join("tmp")); rm_rf(&build.out.join("dist")); rm_rf(&build.out.join("bootstrap").join(".last-warned-change-id")); + rm_rf(&build.out.join("bootstrap-shims-dump")); rm_rf(&build.out.join("rustfmt.stamp")); for host in &build.hosts { diff --git a/src/bootstrap/src/core/builder.rs b/src/bootstrap/src/core/builder.rs index 2356565190bdc..054c3ab14c08d 100644 --- a/src/bootstrap/src/core/builder.rs +++ b/src/bootstrap/src/core/builder.rs @@ -18,12 +18,12 @@ use crate::core::build_steps::tool::{self, SourceType}; use crate::core::build_steps::{check, clean, compile, dist, doc, install, run, setup, test}; use crate::core::config::flags::{Color, Subcommand}; use crate::core::config::{DryRun, SplitDebuginfo, TargetSelection}; +use crate::prepare_behaviour_dump_dir; use crate::utils::cache::{Cache, Interned, INTERNER}; use crate::utils::helpers::{self, add_dylib_path, add_link_lib_path, exe, linker_args}; use crate::utils::helpers::{libdir, linker_flags, output, t, LldThreads}; -use crate::Crate; use crate::EXTRA_CHECK_CFGS; -use crate::{Build, CLang, DocTests, GitRepo, Mode}; +use crate::{Build, CLang, Crate, DocTests, GitRepo, Mode}; pub use crate::Compiler; @@ -1788,6 +1788,16 @@ impl<'a> Builder<'a> { // Enable usage of unstable features cargo.env("RUSTC_BOOTSTRAP", "1"); + + if self.config.dump_bootstrap_shims { + prepare_behaviour_dump_dir(&self.build); + + cargo + .env("DUMP_BOOTSTRAP_SHIMS", self.build.out.join("bootstrap-shims-dump")) + .env("BUILD_OUT", &self.build.out) + .env("CARGO_HOME", t!(home::cargo_home())); + }; + self.add_rust_test_threads(&mut cargo); // Almost all of the crates that we compile as part of the bootstrap may diff --git a/src/bootstrap/src/core/config/config.rs b/src/bootstrap/src/core/config/config.rs index 5a9e78f19d6e8..c3db5641ea47e 100644 --- a/src/bootstrap/src/core/config/config.rs +++ b/src/bootstrap/src/core/config/config.rs @@ -193,6 +193,7 @@ pub struct Config { pub cmd: Subcommand, pub incremental: bool, pub dry_run: DryRun, + pub dump_bootstrap_shims: bool, /// Arguments appearing after `--` to be forwarded to tools, /// e.g. `--fix-broken` or test arguments. pub free_args: Vec, @@ -1210,6 +1211,7 @@ impl Config { config.cmd = flags.cmd; config.incremental = flags.incremental; config.dry_run = if flags.dry_run { DryRun::UserSelected } else { DryRun::Disabled }; + config.dump_bootstrap_shims = flags.dump_bootstrap_shims; config.keep_stage = flags.keep_stage; config.keep_stage_std = flags.keep_stage_std; config.color = flags.color; diff --git a/src/bootstrap/src/core/config/flags.rs b/src/bootstrap/src/core/config/flags.rs index 27d0425df548a..0b13726081b12 100644 --- a/src/bootstrap/src/core/config/flags.rs +++ b/src/bootstrap/src/core/config/flags.rs @@ -85,6 +85,9 @@ pub struct Flags { #[arg(global(true), long)] /// dry run; don't build anything pub dry_run: bool, + /// Indicates whether to dump the work done from bootstrap shims + #[arg(global(true), long)] + pub dump_bootstrap_shims: bool, #[arg(global(true), value_hint = clap::ValueHint::Other, long, value_name = "N")] /// stage to build (indicates compiler to use/test, e.g., stage 0 uses the /// bootstrap compiler, stage 1 the stage 0 rustc artifacts, etc.) diff --git a/src/bootstrap/src/lib.rs b/src/bootstrap/src/lib.rs index fd0b61eae8d7c..18f6dc5337694 100644 --- a/src/bootstrap/src/lib.rs +++ b/src/bootstrap/src/lib.rs @@ -1871,3 +1871,22 @@ pub fn generate_smart_stamp_hash(dir: &Path, additional_input: &str) -> String { hex::encode(hasher.finalize().as_slice()) } + +/// Ensures that the behavior dump directory is properly initialized. +pub fn prepare_behaviour_dump_dir(build: &Build) { + static INITIALIZED: OnceLock = OnceLock::new(); + + let dump_path = build.out.join("bootstrap-shims-dump"); + + let initialized = INITIALIZED.get().unwrap_or_else(|| &false); + if !initialized { + // clear old dumps + if dump_path.exists() { + t!(fs::remove_dir_all(&dump_path)); + } + + t!(fs::create_dir_all(&dump_path)); + + t!(INITIALIZED.set(true)); + } +} diff --git a/src/bootstrap/src/utils/bin_helpers.rs b/src/bootstrap/src/utils/bin_helpers.rs index c90fd2805e283..9c4e039ea69dd 100644 --- a/src/bootstrap/src/utils/bin_helpers.rs +++ b/src/bootstrap/src/utils/bin_helpers.rs @@ -2,14 +2,18 @@ //! dependency on the bootstrap library. This reduces the binary size and //! improves compilation time by reducing the linking time. +use std::env; +use std::fs::OpenOptions; +use std::io::Write; +use std::process::Command; +use std::str::FromStr; + /// Parses the value of the "RUSTC_VERBOSE" environment variable and returns it as a `usize`. /// If it was not defined, returns 0 by default. /// /// Panics if "RUSTC_VERBOSE" is defined with the value that is not an unsigned integer. pub(crate) fn parse_rustc_verbose() -> usize { - use std::str::FromStr; - - match std::env::var("RUSTC_VERBOSE") { + match env::var("RUSTC_VERBOSE") { Ok(s) => usize::from_str(&s).expect("RUSTC_VERBOSE should be an integer"), Err(_) => 0, } @@ -19,10 +23,29 @@ pub(crate) fn parse_rustc_verbose() -> usize { /// /// If "RUSTC_STAGE" was not set, the program will be terminated with 101. pub(crate) fn parse_rustc_stage() -> String { - std::env::var("RUSTC_STAGE").unwrap_or_else(|_| { + env::var("RUSTC_STAGE").unwrap_or_else(|_| { // Don't panic here; it's reasonable to try and run these shims directly. Give a helpful error instead. eprintln!("rustc shim: FATAL: RUSTC_STAGE was not set"); eprintln!("rustc shim: NOTE: use `x.py build -vvv` to see all environment variables set by bootstrap"); std::process::exit(101); }) } + +/// Writes the command invocation to a file if `DUMP_BOOTSTRAP_SHIMS` is set during bootstrap. +/// +/// Before writing it, replaces user-specific values to create generic dumps for cross-environment +/// comparisons. +pub(crate) fn maybe_dump(dump_name: String, cmd: &Command) { + if let Ok(dump_dir) = env::var("DUMP_BOOTSTRAP_SHIMS") { + let dump_file = format!("{dump_dir}/{dump_name}"); + + let mut file = + OpenOptions::new().create(true).write(true).append(true).open(&dump_file).unwrap(); + + let cmd_dump = format!("{:?}\n", cmd); + let cmd_dump = cmd_dump.replace(&env::var("BUILD_OUT").unwrap(), "${BUILD_OUT}"); + let cmd_dump = cmd_dump.replace(&env::var("CARGO_HOME").unwrap(), "${CARGO_HOME}"); + + file.write_all(cmd_dump.as_bytes()).expect("Unable to write file"); + } +} diff --git a/src/doc/rustc/src/linker-plugin-lto.md b/src/doc/rustc/src/linker-plugin-lto.md index 858b7bc79c4c8..ff80f140482db 100644 --- a/src/doc/rustc/src/linker-plugin-lto.md +++ b/src/doc/rustc/src/linker-plugin-lto.md @@ -136,6 +136,7 @@ able to get around this problem by setting `-Clinker=lld-link` in RUSTFLAGS ```python from collections import defaultdict import subprocess +import sys def minor_version(version): return int(version.split('.')[1]) @@ -143,7 +144,7 @@ def minor_version(version): INSTALL_TOOLCHAIN = ["rustup", "toolchain", "install", "--profile", "minimal"] subprocess.run(INSTALL_TOOLCHAIN + ["nightly"]) -LOWER_BOUND = 65 +LOWER_BOUND = 73 NIGHTLY_VERSION = minor_version(subprocess.run( ["rustc", "+nightly", "--version"], capture_output=True, @@ -159,6 +160,7 @@ def llvm_version(toolchain): version_map = defaultdict(lambda: []) for version in range(LOWER_BOUND, NIGHTLY_VERSION - 1): toolchain = "1.{}.0".format(version) + print("Checking", toolchain, file=sys.stderr) subprocess.run( INSTALL_TOOLCHAIN + ["--no-self-update", toolchain], capture_output=True) @@ -196,6 +198,8 @@ The following table shows known good combinations of toolchain versions. | 1.52 - 1.55 | 12 | | 1.56 - 1.59 | 13 | | 1.60 - 1.64 | 14 | -| 1.65 | 15 | +| 1.65 - 1.69 | 15 | +| 1.70 - 1.72 | 16 | +| 1.73 - 1.74 | 17 | Note that the compatibility policy for this feature might change in the future. diff --git a/src/doc/rustc/src/platform-support/TEMPLATE.md b/src/doc/rustc/src/platform-support/TEMPLATE.md index e64783fcf194a..87dde722558b0 100644 --- a/src/doc/rustc/src/platform-support/TEMPLATE.md +++ b/src/doc/rustc/src/platform-support/TEMPLATE.md @@ -6,7 +6,7 @@ One-sentence description of the target (e.g. CPU, OS) ## Target maintainers -- Some Person, `email@example.org`, https://github.com/... +- Some Person, https://github.com/... ## Requirements diff --git a/src/etc/completions/x.py.fish b/src/etc/completions/x.py.fish index 9909630703938..95bce9e31a4db 100644 --- a/src/etc/completions/x.py.fish +++ b/src/etc/completions/x.py.fish @@ -25,6 +25,7 @@ complete -c x.py -n "__fish_use_subcommand" -s v -l verbose -d 'use verbose outp complete -c x.py -n "__fish_use_subcommand" -s i -l incremental -d 'use incremental compilation' complete -c x.py -n "__fish_use_subcommand" -l include-default-paths -d 'include default paths in addition to the provided ones' complete -c x.py -n "__fish_use_subcommand" -l dry-run -d 'dry run; don\'t build anything' +complete -c x.py -n "__fish_use_subcommand" -l dump-bootstrap-shims -d 'Indicates whether to dump the work done from bootstrap shims' complete -c x.py -n "__fish_use_subcommand" -l json-output -d 'use message-format=json' complete -c x.py -n "__fish_use_subcommand" -l bypass-bootstrap-lock -d 'Bootstrap uses this value to decide whether it should bypass locking the build process. This is rarely needed (e.g., compiling the std library for different targets in parallel)' complete -c x.py -n "__fish_use_subcommand" -l llvm-profile-generate -d 'generate PGO profile with llvm built for rustc' @@ -72,6 +73,7 @@ complete -c x.py -n "__fish_seen_subcommand_from build" -s v -l verbose -d 'use complete -c x.py -n "__fish_seen_subcommand_from build" -s i -l incremental -d 'use incremental compilation' complete -c x.py -n "__fish_seen_subcommand_from build" -l include-default-paths -d 'include default paths in addition to the provided ones' complete -c x.py -n "__fish_seen_subcommand_from build" -l dry-run -d 'dry run; don\'t build anything' +complete -c x.py -n "__fish_seen_subcommand_from build" -l dump-bootstrap-shims -d 'Indicates whether to dump the work done from bootstrap shims' complete -c x.py -n "__fish_seen_subcommand_from build" -l json-output -d 'use message-format=json' complete -c x.py -n "__fish_seen_subcommand_from build" -l bypass-bootstrap-lock -d 'Bootstrap uses this value to decide whether it should bypass locking the build process. This is rarely needed (e.g., compiling the std library for different targets in parallel)' complete -c x.py -n "__fish_seen_subcommand_from build" -l llvm-profile-generate -d 'generate PGO profile with llvm built for rustc' @@ -106,6 +108,7 @@ complete -c x.py -n "__fish_seen_subcommand_from check" -s v -l verbose -d 'use complete -c x.py -n "__fish_seen_subcommand_from check" -s i -l incremental -d 'use incremental compilation' complete -c x.py -n "__fish_seen_subcommand_from check" -l include-default-paths -d 'include default paths in addition to the provided ones' complete -c x.py -n "__fish_seen_subcommand_from check" -l dry-run -d 'dry run; don\'t build anything' +complete -c x.py -n "__fish_seen_subcommand_from check" -l dump-bootstrap-shims -d 'Indicates whether to dump the work done from bootstrap shims' complete -c x.py -n "__fish_seen_subcommand_from check" -l json-output -d 'use message-format=json' complete -c x.py -n "__fish_seen_subcommand_from check" -l bypass-bootstrap-lock -d 'Bootstrap uses this value to decide whether it should bypass locking the build process. This is rarely needed (e.g., compiling the std library for different targets in parallel)' complete -c x.py -n "__fish_seen_subcommand_from check" -l llvm-profile-generate -d 'generate PGO profile with llvm built for rustc' @@ -144,6 +147,7 @@ complete -c x.py -n "__fish_seen_subcommand_from clippy" -s v -l verbose -d 'use complete -c x.py -n "__fish_seen_subcommand_from clippy" -s i -l incremental -d 'use incremental compilation' complete -c x.py -n "__fish_seen_subcommand_from clippy" -l include-default-paths -d 'include default paths in addition to the provided ones' complete -c x.py -n "__fish_seen_subcommand_from clippy" -l dry-run -d 'dry run; don\'t build anything' +complete -c x.py -n "__fish_seen_subcommand_from clippy" -l dump-bootstrap-shims -d 'Indicates whether to dump the work done from bootstrap shims' complete -c x.py -n "__fish_seen_subcommand_from clippy" -l json-output -d 'use message-format=json' complete -c x.py -n "__fish_seen_subcommand_from clippy" -l bypass-bootstrap-lock -d 'Bootstrap uses this value to decide whether it should bypass locking the build process. This is rarely needed (e.g., compiling the std library for different targets in parallel)' complete -c x.py -n "__fish_seen_subcommand_from clippy" -l llvm-profile-generate -d 'generate PGO profile with llvm built for rustc' @@ -177,6 +181,7 @@ complete -c x.py -n "__fish_seen_subcommand_from fix" -s v -l verbose -d 'use ve complete -c x.py -n "__fish_seen_subcommand_from fix" -s i -l incremental -d 'use incremental compilation' complete -c x.py -n "__fish_seen_subcommand_from fix" -l include-default-paths -d 'include default paths in addition to the provided ones' complete -c x.py -n "__fish_seen_subcommand_from fix" -l dry-run -d 'dry run; don\'t build anything' +complete -c x.py -n "__fish_seen_subcommand_from fix" -l dump-bootstrap-shims -d 'Indicates whether to dump the work done from bootstrap shims' complete -c x.py -n "__fish_seen_subcommand_from fix" -l json-output -d 'use message-format=json' complete -c x.py -n "__fish_seen_subcommand_from fix" -l bypass-bootstrap-lock -d 'Bootstrap uses this value to decide whether it should bypass locking the build process. This is rarely needed (e.g., compiling the std library for different targets in parallel)' complete -c x.py -n "__fish_seen_subcommand_from fix" -l llvm-profile-generate -d 'generate PGO profile with llvm built for rustc' @@ -211,6 +216,7 @@ complete -c x.py -n "__fish_seen_subcommand_from fmt" -s v -l verbose -d 'use ve complete -c x.py -n "__fish_seen_subcommand_from fmt" -s i -l incremental -d 'use incremental compilation' complete -c x.py -n "__fish_seen_subcommand_from fmt" -l include-default-paths -d 'include default paths in addition to the provided ones' complete -c x.py -n "__fish_seen_subcommand_from fmt" -l dry-run -d 'dry run; don\'t build anything' +complete -c x.py -n "__fish_seen_subcommand_from fmt" -l dump-bootstrap-shims -d 'Indicates whether to dump the work done from bootstrap shims' complete -c x.py -n "__fish_seen_subcommand_from fmt" -l json-output -d 'use message-format=json' complete -c x.py -n "__fish_seen_subcommand_from fmt" -l bypass-bootstrap-lock -d 'Bootstrap uses this value to decide whether it should bypass locking the build process. This is rarely needed (e.g., compiling the std library for different targets in parallel)' complete -c x.py -n "__fish_seen_subcommand_from fmt" -l llvm-profile-generate -d 'generate PGO profile with llvm built for rustc' @@ -246,6 +252,7 @@ complete -c x.py -n "__fish_seen_subcommand_from doc" -s v -l verbose -d 'use ve complete -c x.py -n "__fish_seen_subcommand_from doc" -s i -l incremental -d 'use incremental compilation' complete -c x.py -n "__fish_seen_subcommand_from doc" -l include-default-paths -d 'include default paths in addition to the provided ones' complete -c x.py -n "__fish_seen_subcommand_from doc" -l dry-run -d 'dry run; don\'t build anything' +complete -c x.py -n "__fish_seen_subcommand_from doc" -l dump-bootstrap-shims -d 'Indicates whether to dump the work done from bootstrap shims' complete -c x.py -n "__fish_seen_subcommand_from doc" -l json-output -d 'use message-format=json' complete -c x.py -n "__fish_seen_subcommand_from doc" -l bypass-bootstrap-lock -d 'Bootstrap uses this value to decide whether it should bypass locking the build process. This is rarely needed (e.g., compiling the std library for different targets in parallel)' complete -c x.py -n "__fish_seen_subcommand_from doc" -l llvm-profile-generate -d 'generate PGO profile with llvm built for rustc' @@ -292,6 +299,7 @@ complete -c x.py -n "__fish_seen_subcommand_from test" -s v -l verbose -d 'use v complete -c x.py -n "__fish_seen_subcommand_from test" -s i -l incremental -d 'use incremental compilation' complete -c x.py -n "__fish_seen_subcommand_from test" -l include-default-paths -d 'include default paths in addition to the provided ones' complete -c x.py -n "__fish_seen_subcommand_from test" -l dry-run -d 'dry run; don\'t build anything' +complete -c x.py -n "__fish_seen_subcommand_from test" -l dump-bootstrap-shims -d 'Indicates whether to dump the work done from bootstrap shims' complete -c x.py -n "__fish_seen_subcommand_from test" -l json-output -d 'use message-format=json' complete -c x.py -n "__fish_seen_subcommand_from test" -l bypass-bootstrap-lock -d 'Bootstrap uses this value to decide whether it should bypass locking the build process. This is rarely needed (e.g., compiling the std library for different targets in parallel)' complete -c x.py -n "__fish_seen_subcommand_from test" -l llvm-profile-generate -d 'generate PGO profile with llvm built for rustc' @@ -326,6 +334,7 @@ complete -c x.py -n "__fish_seen_subcommand_from bench" -s v -l verbose -d 'use complete -c x.py -n "__fish_seen_subcommand_from bench" -s i -l incremental -d 'use incremental compilation' complete -c x.py -n "__fish_seen_subcommand_from bench" -l include-default-paths -d 'include default paths in addition to the provided ones' complete -c x.py -n "__fish_seen_subcommand_from bench" -l dry-run -d 'dry run; don\'t build anything' +complete -c x.py -n "__fish_seen_subcommand_from bench" -l dump-bootstrap-shims -d 'Indicates whether to dump the work done from bootstrap shims' complete -c x.py -n "__fish_seen_subcommand_from bench" -l json-output -d 'use message-format=json' complete -c x.py -n "__fish_seen_subcommand_from bench" -l bypass-bootstrap-lock -d 'Bootstrap uses this value to decide whether it should bypass locking the build process. This is rarely needed (e.g., compiling the std library for different targets in parallel)' complete -c x.py -n "__fish_seen_subcommand_from bench" -l llvm-profile-generate -d 'generate PGO profile with llvm built for rustc' @@ -360,6 +369,7 @@ complete -c x.py -n "__fish_seen_subcommand_from clean" -s v -l verbose -d 'use complete -c x.py -n "__fish_seen_subcommand_from clean" -s i -l incremental -d 'use incremental compilation' complete -c x.py -n "__fish_seen_subcommand_from clean" -l include-default-paths -d 'include default paths in addition to the provided ones' complete -c x.py -n "__fish_seen_subcommand_from clean" -l dry-run -d 'dry run; don\'t build anything' +complete -c x.py -n "__fish_seen_subcommand_from clean" -l dump-bootstrap-shims -d 'Indicates whether to dump the work done from bootstrap shims' complete -c x.py -n "__fish_seen_subcommand_from clean" -l json-output -d 'use message-format=json' complete -c x.py -n "__fish_seen_subcommand_from clean" -l bypass-bootstrap-lock -d 'Bootstrap uses this value to decide whether it should bypass locking the build process. This is rarely needed (e.g., compiling the std library for different targets in parallel)' complete -c x.py -n "__fish_seen_subcommand_from clean" -l llvm-profile-generate -d 'generate PGO profile with llvm built for rustc' @@ -393,6 +403,7 @@ complete -c x.py -n "__fish_seen_subcommand_from dist" -s v -l verbose -d 'use v complete -c x.py -n "__fish_seen_subcommand_from dist" -s i -l incremental -d 'use incremental compilation' complete -c x.py -n "__fish_seen_subcommand_from dist" -l include-default-paths -d 'include default paths in addition to the provided ones' complete -c x.py -n "__fish_seen_subcommand_from dist" -l dry-run -d 'dry run; don\'t build anything' +complete -c x.py -n "__fish_seen_subcommand_from dist" -l dump-bootstrap-shims -d 'Indicates whether to dump the work done from bootstrap shims' complete -c x.py -n "__fish_seen_subcommand_from dist" -l json-output -d 'use message-format=json' complete -c x.py -n "__fish_seen_subcommand_from dist" -l bypass-bootstrap-lock -d 'Bootstrap uses this value to decide whether it should bypass locking the build process. This is rarely needed (e.g., compiling the std library for different targets in parallel)' complete -c x.py -n "__fish_seen_subcommand_from dist" -l llvm-profile-generate -d 'generate PGO profile with llvm built for rustc' @@ -426,6 +437,7 @@ complete -c x.py -n "__fish_seen_subcommand_from install" -s v -l verbose -d 'us complete -c x.py -n "__fish_seen_subcommand_from install" -s i -l incremental -d 'use incremental compilation' complete -c x.py -n "__fish_seen_subcommand_from install" -l include-default-paths -d 'include default paths in addition to the provided ones' complete -c x.py -n "__fish_seen_subcommand_from install" -l dry-run -d 'dry run; don\'t build anything' +complete -c x.py -n "__fish_seen_subcommand_from install" -l dump-bootstrap-shims -d 'Indicates whether to dump the work done from bootstrap shims' complete -c x.py -n "__fish_seen_subcommand_from install" -l json-output -d 'use message-format=json' complete -c x.py -n "__fish_seen_subcommand_from install" -l bypass-bootstrap-lock -d 'Bootstrap uses this value to decide whether it should bypass locking the build process. This is rarely needed (e.g., compiling the std library for different targets in parallel)' complete -c x.py -n "__fish_seen_subcommand_from install" -l llvm-profile-generate -d 'generate PGO profile with llvm built for rustc' @@ -460,6 +472,7 @@ complete -c x.py -n "__fish_seen_subcommand_from run" -s v -l verbose -d 'use ve complete -c x.py -n "__fish_seen_subcommand_from run" -s i -l incremental -d 'use incremental compilation' complete -c x.py -n "__fish_seen_subcommand_from run" -l include-default-paths -d 'include default paths in addition to the provided ones' complete -c x.py -n "__fish_seen_subcommand_from run" -l dry-run -d 'dry run; don\'t build anything' +complete -c x.py -n "__fish_seen_subcommand_from run" -l dump-bootstrap-shims -d 'Indicates whether to dump the work done from bootstrap shims' complete -c x.py -n "__fish_seen_subcommand_from run" -l json-output -d 'use message-format=json' complete -c x.py -n "__fish_seen_subcommand_from run" -l bypass-bootstrap-lock -d 'Bootstrap uses this value to decide whether it should bypass locking the build process. This is rarely needed (e.g., compiling the std library for different targets in parallel)' complete -c x.py -n "__fish_seen_subcommand_from run" -l llvm-profile-generate -d 'generate PGO profile with llvm built for rustc' @@ -493,6 +506,7 @@ complete -c x.py -n "__fish_seen_subcommand_from setup" -s v -l verbose -d 'use complete -c x.py -n "__fish_seen_subcommand_from setup" -s i -l incremental -d 'use incremental compilation' complete -c x.py -n "__fish_seen_subcommand_from setup" -l include-default-paths -d 'include default paths in addition to the provided ones' complete -c x.py -n "__fish_seen_subcommand_from setup" -l dry-run -d 'dry run; don\'t build anything' +complete -c x.py -n "__fish_seen_subcommand_from setup" -l dump-bootstrap-shims -d 'Indicates whether to dump the work done from bootstrap shims' complete -c x.py -n "__fish_seen_subcommand_from setup" -l json-output -d 'use message-format=json' complete -c x.py -n "__fish_seen_subcommand_from setup" -l bypass-bootstrap-lock -d 'Bootstrap uses this value to decide whether it should bypass locking the build process. This is rarely needed (e.g., compiling the std library for different targets in parallel)' complete -c x.py -n "__fish_seen_subcommand_from setup" -l llvm-profile-generate -d 'generate PGO profile with llvm built for rustc' @@ -527,6 +541,7 @@ complete -c x.py -n "__fish_seen_subcommand_from suggest" -s v -l verbose -d 'us complete -c x.py -n "__fish_seen_subcommand_from suggest" -s i -l incremental -d 'use incremental compilation' complete -c x.py -n "__fish_seen_subcommand_from suggest" -l include-default-paths -d 'include default paths in addition to the provided ones' complete -c x.py -n "__fish_seen_subcommand_from suggest" -l dry-run -d 'dry run; don\'t build anything' +complete -c x.py -n "__fish_seen_subcommand_from suggest" -l dump-bootstrap-shims -d 'Indicates whether to dump the work done from bootstrap shims' complete -c x.py -n "__fish_seen_subcommand_from suggest" -l json-output -d 'use message-format=json' complete -c x.py -n "__fish_seen_subcommand_from suggest" -l bypass-bootstrap-lock -d 'Bootstrap uses this value to decide whether it should bypass locking the build process. This is rarely needed (e.g., compiling the std library for different targets in parallel)' complete -c x.py -n "__fish_seen_subcommand_from suggest" -l llvm-profile-generate -d 'generate PGO profile with llvm built for rustc' diff --git a/src/etc/completions/x.py.ps1 b/src/etc/completions/x.py.ps1 index 7e926d10b3fe2..dabc3b16e4d79 100644 --- a/src/etc/completions/x.py.ps1 +++ b/src/etc/completions/x.py.ps1 @@ -51,6 +51,7 @@ Register-ArgumentCompleter -Native -CommandName 'x.py' -ScriptBlock { [CompletionResult]::new('--incremental', 'incremental', [CompletionResultType]::ParameterName, 'use incremental compilation') [CompletionResult]::new('--include-default-paths', 'include-default-paths', [CompletionResultType]::ParameterName, 'include default paths in addition to the provided ones') [CompletionResult]::new('--dry-run', 'dry-run', [CompletionResultType]::ParameterName, 'dry run; don''t build anything') + [CompletionResult]::new('--dump-bootstrap-shims', 'dump-bootstrap-shims', [CompletionResultType]::ParameterName, 'Indicates whether to dump the work done from bootstrap shims') [CompletionResult]::new('--json-output', 'json-output', [CompletionResultType]::ParameterName, 'use message-format=json') [CompletionResult]::new('--bypass-bootstrap-lock', 'bypass-bootstrap-lock', [CompletionResultType]::ParameterName, 'Bootstrap uses this value to decide whether it should bypass locking the build process. This is rarely needed (e.g., compiling the std library for different targets in parallel)') [CompletionResult]::new('--llvm-profile-generate', 'llvm-profile-generate', [CompletionResultType]::ParameterName, 'generate PGO profile with llvm built for rustc') @@ -105,6 +106,7 @@ Register-ArgumentCompleter -Native -CommandName 'x.py' -ScriptBlock { [CompletionResult]::new('--incremental', 'incremental', [CompletionResultType]::ParameterName, 'use incremental compilation') [CompletionResult]::new('--include-default-paths', 'include-default-paths', [CompletionResultType]::ParameterName, 'include default paths in addition to the provided ones') [CompletionResult]::new('--dry-run', 'dry-run', [CompletionResultType]::ParameterName, 'dry run; don''t build anything') + [CompletionResult]::new('--dump-bootstrap-shims', 'dump-bootstrap-shims', [CompletionResultType]::ParameterName, 'Indicates whether to dump the work done from bootstrap shims') [CompletionResult]::new('--json-output', 'json-output', [CompletionResultType]::ParameterName, 'use message-format=json') [CompletionResult]::new('--bypass-bootstrap-lock', 'bypass-bootstrap-lock', [CompletionResultType]::ParameterName, 'Bootstrap uses this value to decide whether it should bypass locking the build process. This is rarely needed (e.g., compiling the std library for different targets in parallel)') [CompletionResult]::new('--llvm-profile-generate', 'llvm-profile-generate', [CompletionResultType]::ParameterName, 'generate PGO profile with llvm built for rustc') @@ -146,6 +148,7 @@ Register-ArgumentCompleter -Native -CommandName 'x.py' -ScriptBlock { [CompletionResult]::new('--incremental', 'incremental', [CompletionResultType]::ParameterName, 'use incremental compilation') [CompletionResult]::new('--include-default-paths', 'include-default-paths', [CompletionResultType]::ParameterName, 'include default paths in addition to the provided ones') [CompletionResult]::new('--dry-run', 'dry-run', [CompletionResultType]::ParameterName, 'dry run; don''t build anything') + [CompletionResult]::new('--dump-bootstrap-shims', 'dump-bootstrap-shims', [CompletionResultType]::ParameterName, 'Indicates whether to dump the work done from bootstrap shims') [CompletionResult]::new('--json-output', 'json-output', [CompletionResultType]::ParameterName, 'use message-format=json') [CompletionResult]::new('--bypass-bootstrap-lock', 'bypass-bootstrap-lock', [CompletionResultType]::ParameterName, 'Bootstrap uses this value to decide whether it should bypass locking the build process. This is rarely needed (e.g., compiling the std library for different targets in parallel)') [CompletionResult]::new('--llvm-profile-generate', 'llvm-profile-generate', [CompletionResultType]::ParameterName, 'generate PGO profile with llvm built for rustc') @@ -191,6 +194,7 @@ Register-ArgumentCompleter -Native -CommandName 'x.py' -ScriptBlock { [CompletionResult]::new('--incremental', 'incremental', [CompletionResultType]::ParameterName, 'use incremental compilation') [CompletionResult]::new('--include-default-paths', 'include-default-paths', [CompletionResultType]::ParameterName, 'include default paths in addition to the provided ones') [CompletionResult]::new('--dry-run', 'dry-run', [CompletionResultType]::ParameterName, 'dry run; don''t build anything') + [CompletionResult]::new('--dump-bootstrap-shims', 'dump-bootstrap-shims', [CompletionResultType]::ParameterName, 'Indicates whether to dump the work done from bootstrap shims') [CompletionResult]::new('--json-output', 'json-output', [CompletionResultType]::ParameterName, 'use message-format=json') [CompletionResult]::new('--bypass-bootstrap-lock', 'bypass-bootstrap-lock', [CompletionResultType]::ParameterName, 'Bootstrap uses this value to decide whether it should bypass locking the build process. This is rarely needed (e.g., compiling the std library for different targets in parallel)') [CompletionResult]::new('--llvm-profile-generate', 'llvm-profile-generate', [CompletionResultType]::ParameterName, 'generate PGO profile with llvm built for rustc') @@ -231,6 +235,7 @@ Register-ArgumentCompleter -Native -CommandName 'x.py' -ScriptBlock { [CompletionResult]::new('--incremental', 'incremental', [CompletionResultType]::ParameterName, 'use incremental compilation') [CompletionResult]::new('--include-default-paths', 'include-default-paths', [CompletionResultType]::ParameterName, 'include default paths in addition to the provided ones') [CompletionResult]::new('--dry-run', 'dry-run', [CompletionResultType]::ParameterName, 'dry run; don''t build anything') + [CompletionResult]::new('--dump-bootstrap-shims', 'dump-bootstrap-shims', [CompletionResultType]::ParameterName, 'Indicates whether to dump the work done from bootstrap shims') [CompletionResult]::new('--json-output', 'json-output', [CompletionResultType]::ParameterName, 'use message-format=json') [CompletionResult]::new('--bypass-bootstrap-lock', 'bypass-bootstrap-lock', [CompletionResultType]::ParameterName, 'Bootstrap uses this value to decide whether it should bypass locking the build process. This is rarely needed (e.g., compiling the std library for different targets in parallel)') [CompletionResult]::new('--llvm-profile-generate', 'llvm-profile-generate', [CompletionResultType]::ParameterName, 'generate PGO profile with llvm built for rustc') @@ -272,6 +277,7 @@ Register-ArgumentCompleter -Native -CommandName 'x.py' -ScriptBlock { [CompletionResult]::new('--incremental', 'incremental', [CompletionResultType]::ParameterName, 'use incremental compilation') [CompletionResult]::new('--include-default-paths', 'include-default-paths', [CompletionResultType]::ParameterName, 'include default paths in addition to the provided ones') [CompletionResult]::new('--dry-run', 'dry-run', [CompletionResultType]::ParameterName, 'dry run; don''t build anything') + [CompletionResult]::new('--dump-bootstrap-shims', 'dump-bootstrap-shims', [CompletionResultType]::ParameterName, 'Indicates whether to dump the work done from bootstrap shims') [CompletionResult]::new('--json-output', 'json-output', [CompletionResultType]::ParameterName, 'use message-format=json') [CompletionResult]::new('--bypass-bootstrap-lock', 'bypass-bootstrap-lock', [CompletionResultType]::ParameterName, 'Bootstrap uses this value to decide whether it should bypass locking the build process. This is rarely needed (e.g., compiling the std library for different targets in parallel)') [CompletionResult]::new('--llvm-profile-generate', 'llvm-profile-generate', [CompletionResultType]::ParameterName, 'generate PGO profile with llvm built for rustc') @@ -314,6 +320,7 @@ Register-ArgumentCompleter -Native -CommandName 'x.py' -ScriptBlock { [CompletionResult]::new('--incremental', 'incremental', [CompletionResultType]::ParameterName, 'use incremental compilation') [CompletionResult]::new('--include-default-paths', 'include-default-paths', [CompletionResultType]::ParameterName, 'include default paths in addition to the provided ones') [CompletionResult]::new('--dry-run', 'dry-run', [CompletionResultType]::ParameterName, 'dry run; don''t build anything') + [CompletionResult]::new('--dump-bootstrap-shims', 'dump-bootstrap-shims', [CompletionResultType]::ParameterName, 'Indicates whether to dump the work done from bootstrap shims') [CompletionResult]::new('--json-output', 'json-output', [CompletionResultType]::ParameterName, 'use message-format=json') [CompletionResult]::new('--bypass-bootstrap-lock', 'bypass-bootstrap-lock', [CompletionResultType]::ParameterName, 'Bootstrap uses this value to decide whether it should bypass locking the build process. This is rarely needed (e.g., compiling the std library for different targets in parallel)') [CompletionResult]::new('--llvm-profile-generate', 'llvm-profile-generate', [CompletionResultType]::ParameterName, 'generate PGO profile with llvm built for rustc') @@ -367,6 +374,7 @@ Register-ArgumentCompleter -Native -CommandName 'x.py' -ScriptBlock { [CompletionResult]::new('--incremental', 'incremental', [CompletionResultType]::ParameterName, 'use incremental compilation') [CompletionResult]::new('--include-default-paths', 'include-default-paths', [CompletionResultType]::ParameterName, 'include default paths in addition to the provided ones') [CompletionResult]::new('--dry-run', 'dry-run', [CompletionResultType]::ParameterName, 'dry run; don''t build anything') + [CompletionResult]::new('--dump-bootstrap-shims', 'dump-bootstrap-shims', [CompletionResultType]::ParameterName, 'Indicates whether to dump the work done from bootstrap shims') [CompletionResult]::new('--json-output', 'json-output', [CompletionResultType]::ParameterName, 'use message-format=json') [CompletionResult]::new('--bypass-bootstrap-lock', 'bypass-bootstrap-lock', [CompletionResultType]::ParameterName, 'Bootstrap uses this value to decide whether it should bypass locking the build process. This is rarely needed (e.g., compiling the std library for different targets in parallel)') [CompletionResult]::new('--llvm-profile-generate', 'llvm-profile-generate', [CompletionResultType]::ParameterName, 'generate PGO profile with llvm built for rustc') @@ -408,6 +416,7 @@ Register-ArgumentCompleter -Native -CommandName 'x.py' -ScriptBlock { [CompletionResult]::new('--incremental', 'incremental', [CompletionResultType]::ParameterName, 'use incremental compilation') [CompletionResult]::new('--include-default-paths', 'include-default-paths', [CompletionResultType]::ParameterName, 'include default paths in addition to the provided ones') [CompletionResult]::new('--dry-run', 'dry-run', [CompletionResultType]::ParameterName, 'dry run; don''t build anything') + [CompletionResult]::new('--dump-bootstrap-shims', 'dump-bootstrap-shims', [CompletionResultType]::ParameterName, 'Indicates whether to dump the work done from bootstrap shims') [CompletionResult]::new('--json-output', 'json-output', [CompletionResultType]::ParameterName, 'use message-format=json') [CompletionResult]::new('--bypass-bootstrap-lock', 'bypass-bootstrap-lock', [CompletionResultType]::ParameterName, 'Bootstrap uses this value to decide whether it should bypass locking the build process. This is rarely needed (e.g., compiling the std library for different targets in parallel)') [CompletionResult]::new('--llvm-profile-generate', 'llvm-profile-generate', [CompletionResultType]::ParameterName, 'generate PGO profile with llvm built for rustc') @@ -449,6 +458,7 @@ Register-ArgumentCompleter -Native -CommandName 'x.py' -ScriptBlock { [CompletionResult]::new('--incremental', 'incremental', [CompletionResultType]::ParameterName, 'use incremental compilation') [CompletionResult]::new('--include-default-paths', 'include-default-paths', [CompletionResultType]::ParameterName, 'include default paths in addition to the provided ones') [CompletionResult]::new('--dry-run', 'dry-run', [CompletionResultType]::ParameterName, 'dry run; don''t build anything') + [CompletionResult]::new('--dump-bootstrap-shims', 'dump-bootstrap-shims', [CompletionResultType]::ParameterName, 'Indicates whether to dump the work done from bootstrap shims') [CompletionResult]::new('--json-output', 'json-output', [CompletionResultType]::ParameterName, 'use message-format=json') [CompletionResult]::new('--bypass-bootstrap-lock', 'bypass-bootstrap-lock', [CompletionResultType]::ParameterName, 'Bootstrap uses this value to decide whether it should bypass locking the build process. This is rarely needed (e.g., compiling the std library for different targets in parallel)') [CompletionResult]::new('--llvm-profile-generate', 'llvm-profile-generate', [CompletionResultType]::ParameterName, 'generate PGO profile with llvm built for rustc') @@ -489,6 +499,7 @@ Register-ArgumentCompleter -Native -CommandName 'x.py' -ScriptBlock { [CompletionResult]::new('--incremental', 'incremental', [CompletionResultType]::ParameterName, 'use incremental compilation') [CompletionResult]::new('--include-default-paths', 'include-default-paths', [CompletionResultType]::ParameterName, 'include default paths in addition to the provided ones') [CompletionResult]::new('--dry-run', 'dry-run', [CompletionResultType]::ParameterName, 'dry run; don''t build anything') + [CompletionResult]::new('--dump-bootstrap-shims', 'dump-bootstrap-shims', [CompletionResultType]::ParameterName, 'Indicates whether to dump the work done from bootstrap shims') [CompletionResult]::new('--json-output', 'json-output', [CompletionResultType]::ParameterName, 'use message-format=json') [CompletionResult]::new('--bypass-bootstrap-lock', 'bypass-bootstrap-lock', [CompletionResultType]::ParameterName, 'Bootstrap uses this value to decide whether it should bypass locking the build process. This is rarely needed (e.g., compiling the std library for different targets in parallel)') [CompletionResult]::new('--llvm-profile-generate', 'llvm-profile-generate', [CompletionResultType]::ParameterName, 'generate PGO profile with llvm built for rustc') @@ -529,6 +540,7 @@ Register-ArgumentCompleter -Native -CommandName 'x.py' -ScriptBlock { [CompletionResult]::new('--incremental', 'incremental', [CompletionResultType]::ParameterName, 'use incremental compilation') [CompletionResult]::new('--include-default-paths', 'include-default-paths', [CompletionResultType]::ParameterName, 'include default paths in addition to the provided ones') [CompletionResult]::new('--dry-run', 'dry-run', [CompletionResultType]::ParameterName, 'dry run; don''t build anything') + [CompletionResult]::new('--dump-bootstrap-shims', 'dump-bootstrap-shims', [CompletionResultType]::ParameterName, 'Indicates whether to dump the work done from bootstrap shims') [CompletionResult]::new('--json-output', 'json-output', [CompletionResultType]::ParameterName, 'use message-format=json') [CompletionResult]::new('--bypass-bootstrap-lock', 'bypass-bootstrap-lock', [CompletionResultType]::ParameterName, 'Bootstrap uses this value to decide whether it should bypass locking the build process. This is rarely needed (e.g., compiling the std library for different targets in parallel)') [CompletionResult]::new('--llvm-profile-generate', 'llvm-profile-generate', [CompletionResultType]::ParameterName, 'generate PGO profile with llvm built for rustc') @@ -570,6 +582,7 @@ Register-ArgumentCompleter -Native -CommandName 'x.py' -ScriptBlock { [CompletionResult]::new('--incremental', 'incremental', [CompletionResultType]::ParameterName, 'use incremental compilation') [CompletionResult]::new('--include-default-paths', 'include-default-paths', [CompletionResultType]::ParameterName, 'include default paths in addition to the provided ones') [CompletionResult]::new('--dry-run', 'dry-run', [CompletionResultType]::ParameterName, 'dry run; don''t build anything') + [CompletionResult]::new('--dump-bootstrap-shims', 'dump-bootstrap-shims', [CompletionResultType]::ParameterName, 'Indicates whether to dump the work done from bootstrap shims') [CompletionResult]::new('--json-output', 'json-output', [CompletionResultType]::ParameterName, 'use message-format=json') [CompletionResult]::new('--bypass-bootstrap-lock', 'bypass-bootstrap-lock', [CompletionResultType]::ParameterName, 'Bootstrap uses this value to decide whether it should bypass locking the build process. This is rarely needed (e.g., compiling the std library for different targets in parallel)') [CompletionResult]::new('--llvm-profile-generate', 'llvm-profile-generate', [CompletionResultType]::ParameterName, 'generate PGO profile with llvm built for rustc') @@ -610,6 +623,7 @@ Register-ArgumentCompleter -Native -CommandName 'x.py' -ScriptBlock { [CompletionResult]::new('--incremental', 'incremental', [CompletionResultType]::ParameterName, 'use incremental compilation') [CompletionResult]::new('--include-default-paths', 'include-default-paths', [CompletionResultType]::ParameterName, 'include default paths in addition to the provided ones') [CompletionResult]::new('--dry-run', 'dry-run', [CompletionResultType]::ParameterName, 'dry run; don''t build anything') + [CompletionResult]::new('--dump-bootstrap-shims', 'dump-bootstrap-shims', [CompletionResultType]::ParameterName, 'Indicates whether to dump the work done from bootstrap shims') [CompletionResult]::new('--json-output', 'json-output', [CompletionResultType]::ParameterName, 'use message-format=json') [CompletionResult]::new('--bypass-bootstrap-lock', 'bypass-bootstrap-lock', [CompletionResultType]::ParameterName, 'Bootstrap uses this value to decide whether it should bypass locking the build process. This is rarely needed (e.g., compiling the std library for different targets in parallel)') [CompletionResult]::new('--llvm-profile-generate', 'llvm-profile-generate', [CompletionResultType]::ParameterName, 'generate PGO profile with llvm built for rustc') @@ -651,6 +665,7 @@ Register-ArgumentCompleter -Native -CommandName 'x.py' -ScriptBlock { [CompletionResult]::new('--incremental', 'incremental', [CompletionResultType]::ParameterName, 'use incremental compilation') [CompletionResult]::new('--include-default-paths', 'include-default-paths', [CompletionResultType]::ParameterName, 'include default paths in addition to the provided ones') [CompletionResult]::new('--dry-run', 'dry-run', [CompletionResultType]::ParameterName, 'dry run; don''t build anything') + [CompletionResult]::new('--dump-bootstrap-shims', 'dump-bootstrap-shims', [CompletionResultType]::ParameterName, 'Indicates whether to dump the work done from bootstrap shims') [CompletionResult]::new('--json-output', 'json-output', [CompletionResultType]::ParameterName, 'use message-format=json') [CompletionResult]::new('--bypass-bootstrap-lock', 'bypass-bootstrap-lock', [CompletionResultType]::ParameterName, 'Bootstrap uses this value to decide whether it should bypass locking the build process. This is rarely needed (e.g., compiling the std library for different targets in parallel)') [CompletionResult]::new('--llvm-profile-generate', 'llvm-profile-generate', [CompletionResultType]::ParameterName, 'generate PGO profile with llvm built for rustc') diff --git a/src/etc/completions/x.py.sh b/src/etc/completions/x.py.sh index 78754f6e7b2a9..f739f46b88bb2 100644 --- a/src/etc/completions/x.py.sh +++ b/src/etc/completions/x.py.sh @@ -61,7 +61,7 @@ _x.py() { case "${cmd}" in x.py) - opts="-v -i -j -h --verbose --incremental --config --build-dir --build --host --target --exclude --skip --include-default-paths --rustc-error-format --on-fail --dry-run --stage --keep-stage --keep-stage-std --src --jobs --warnings --error-format --json-output --color --bypass-bootstrap-lock --llvm-skip-rebuild --rust-profile-generate --rust-profile-use --llvm-profile-use --llvm-profile-generate --enable-bolt-settings --skip-stage0-validation --reproducible-artifact --set --help [PATHS]... [ARGS]... build check clippy fix fmt doc test bench clean dist install run setup suggest" + opts="-v -i -j -h --verbose --incremental --config --build-dir --build --host --target --exclude --skip --include-default-paths --rustc-error-format --on-fail --dry-run --dump-bootstrap-shims --stage --keep-stage --keep-stage-std --src --jobs --warnings --error-format --json-output --color --bypass-bootstrap-lock --llvm-skip-rebuild --rust-profile-generate --rust-profile-use --llvm-profile-use --llvm-profile-generate --enable-bolt-settings --skip-stage0-validation --reproducible-artifact --set --help [PATHS]... [ARGS]... build check clippy fix fmt doc test bench clean dist install run setup suggest" if [[ ${cur} == -* || ${COMP_CWORD} -eq 1 ]] ; then COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 @@ -171,7 +171,7 @@ _x.py() { return 0 ;; x.py__bench) - opts="-v -i -j -h --test-args --verbose --incremental --config --build-dir --build --host --target --exclude --skip --include-default-paths --rustc-error-format --on-fail --dry-run --stage --keep-stage --keep-stage-std --src --jobs --warnings --error-format --json-output --color --bypass-bootstrap-lock --llvm-skip-rebuild --rust-profile-generate --rust-profile-use --llvm-profile-use --llvm-profile-generate --enable-bolt-settings --skip-stage0-validation --reproducible-artifact --set --help [PATHS]... [ARGS]..." + opts="-v -i -j -h --test-args --verbose --incremental --config --build-dir --build --host --target --exclude --skip --include-default-paths --rustc-error-format --on-fail --dry-run --dump-bootstrap-shims --stage --keep-stage --keep-stage-std --src --jobs --warnings --error-format --json-output --color --bypass-bootstrap-lock --llvm-skip-rebuild --rust-profile-generate --rust-profile-use --llvm-profile-use --llvm-profile-generate --enable-bolt-settings --skip-stage0-validation --reproducible-artifact --set --help [PATHS]... [ARGS]..." if [[ ${cur} == -* || ${COMP_CWORD} -eq 2 ]] ; then COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 @@ -285,7 +285,7 @@ _x.py() { return 0 ;; x.py__build) - opts="-v -i -j -h --verbose --incremental --config --build-dir --build --host --target --exclude --skip --include-default-paths --rustc-error-format --on-fail --dry-run --stage --keep-stage --keep-stage-std --src --jobs --warnings --error-format --json-output --color --bypass-bootstrap-lock --llvm-skip-rebuild --rust-profile-generate --rust-profile-use --llvm-profile-use --llvm-profile-generate --enable-bolt-settings --skip-stage0-validation --reproducible-artifact --set --help [PATHS]... [ARGS]..." + opts="-v -i -j -h --verbose --incremental --config --build-dir --build --host --target --exclude --skip --include-default-paths --rustc-error-format --on-fail --dry-run --dump-bootstrap-shims --stage --keep-stage --keep-stage-std --src --jobs --warnings --error-format --json-output --color --bypass-bootstrap-lock --llvm-skip-rebuild --rust-profile-generate --rust-profile-use --llvm-profile-use --llvm-profile-generate --enable-bolt-settings --skip-stage0-validation --reproducible-artifact --set --help [PATHS]... [ARGS]..." if [[ ${cur} == -* || ${COMP_CWORD} -eq 2 ]] ; then COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 @@ -395,7 +395,7 @@ _x.py() { return 0 ;; x.py__check) - opts="-v -i -j -h --all-targets --verbose --incremental --config --build-dir --build --host --target --exclude --skip --include-default-paths --rustc-error-format --on-fail --dry-run --stage --keep-stage --keep-stage-std --src --jobs --warnings --error-format --json-output --color --bypass-bootstrap-lock --llvm-skip-rebuild --rust-profile-generate --rust-profile-use --llvm-profile-use --llvm-profile-generate --enable-bolt-settings --skip-stage0-validation --reproducible-artifact --set --help [PATHS]... [ARGS]..." + opts="-v -i -j -h --all-targets --verbose --incremental --config --build-dir --build --host --target --exclude --skip --include-default-paths --rustc-error-format --on-fail --dry-run --dump-bootstrap-shims --stage --keep-stage --keep-stage-std --src --jobs --warnings --error-format --json-output --color --bypass-bootstrap-lock --llvm-skip-rebuild --rust-profile-generate --rust-profile-use --llvm-profile-use --llvm-profile-generate --enable-bolt-settings --skip-stage0-validation --reproducible-artifact --set --help [PATHS]... [ARGS]..." if [[ ${cur} == -* || ${COMP_CWORD} -eq 2 ]] ; then COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 @@ -505,7 +505,7 @@ _x.py() { return 0 ;; x.py__clean) - opts="-v -i -j -h --all --stage --verbose --incremental --config --build-dir --build --host --target --exclude --skip --include-default-paths --rustc-error-format --on-fail --dry-run --keep-stage --keep-stage-std --src --jobs --warnings --error-format --json-output --color --bypass-bootstrap-lock --llvm-skip-rebuild --rust-profile-generate --rust-profile-use --llvm-profile-use --llvm-profile-generate --enable-bolt-settings --skip-stage0-validation --reproducible-artifact --set --help [PATHS]... [ARGS]..." + opts="-v -i -j -h --all --stage --verbose --incremental --config --build-dir --build --host --target --exclude --skip --include-default-paths --rustc-error-format --on-fail --dry-run --dump-bootstrap-shims --keep-stage --keep-stage-std --src --jobs --warnings --error-format --json-output --color --bypass-bootstrap-lock --llvm-skip-rebuild --rust-profile-generate --rust-profile-use --llvm-profile-use --llvm-profile-generate --enable-bolt-settings --skip-stage0-validation --reproducible-artifact --set --help [PATHS]... [ARGS]..." if [[ ${cur} == -* || ${COMP_CWORD} -eq 2 ]] ; then COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 @@ -615,7 +615,7 @@ _x.py() { return 0 ;; x.py__clippy) - opts="-A -D -W -F -v -i -j -h --fix --verbose --incremental --config --build-dir --build --host --target --exclude --skip --include-default-paths --rustc-error-format --on-fail --dry-run --stage --keep-stage --keep-stage-std --src --jobs --warnings --error-format --json-output --color --bypass-bootstrap-lock --llvm-skip-rebuild --rust-profile-generate --rust-profile-use --llvm-profile-use --llvm-profile-generate --enable-bolt-settings --skip-stage0-validation --reproducible-artifact --set --help [PATHS]... [ARGS]..." + opts="-A -D -W -F -v -i -j -h --fix --verbose --incremental --config --build-dir --build --host --target --exclude --skip --include-default-paths --rustc-error-format --on-fail --dry-run --dump-bootstrap-shims --stage --keep-stage --keep-stage-std --src --jobs --warnings --error-format --json-output --color --bypass-bootstrap-lock --llvm-skip-rebuild --rust-profile-generate --rust-profile-use --llvm-profile-use --llvm-profile-generate --enable-bolt-settings --skip-stage0-validation --reproducible-artifact --set --help [PATHS]... [ARGS]..." if [[ ${cur} == -* || ${COMP_CWORD} -eq 2 ]] ; then COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 @@ -741,7 +741,7 @@ _x.py() { return 0 ;; x.py__dist) - opts="-v -i -j -h --verbose --incremental --config --build-dir --build --host --target --exclude --skip --include-default-paths --rustc-error-format --on-fail --dry-run --stage --keep-stage --keep-stage-std --src --jobs --warnings --error-format --json-output --color --bypass-bootstrap-lock --llvm-skip-rebuild --rust-profile-generate --rust-profile-use --llvm-profile-use --llvm-profile-generate --enable-bolt-settings --skip-stage0-validation --reproducible-artifact --set --help [PATHS]... [ARGS]..." + opts="-v -i -j -h --verbose --incremental --config --build-dir --build --host --target --exclude --skip --include-default-paths --rustc-error-format --on-fail --dry-run --dump-bootstrap-shims --stage --keep-stage --keep-stage-std --src --jobs --warnings --error-format --json-output --color --bypass-bootstrap-lock --llvm-skip-rebuild --rust-profile-generate --rust-profile-use --llvm-profile-use --llvm-profile-generate --enable-bolt-settings --skip-stage0-validation --reproducible-artifact --set --help [PATHS]... [ARGS]..." if [[ ${cur} == -* || ${COMP_CWORD} -eq 2 ]] ; then COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 @@ -851,7 +851,7 @@ _x.py() { return 0 ;; x.py__doc) - opts="-v -i -j -h --open --json --verbose --incremental --config --build-dir --build --host --target --exclude --skip --include-default-paths --rustc-error-format --on-fail --dry-run --stage --keep-stage --keep-stage-std --src --jobs --warnings --error-format --json-output --color --bypass-bootstrap-lock --llvm-skip-rebuild --rust-profile-generate --rust-profile-use --llvm-profile-use --llvm-profile-generate --enable-bolt-settings --skip-stage0-validation --reproducible-artifact --set --help [PATHS]... [ARGS]..." + opts="-v -i -j -h --open --json --verbose --incremental --config --build-dir --build --host --target --exclude --skip --include-default-paths --rustc-error-format --on-fail --dry-run --dump-bootstrap-shims --stage --keep-stage --keep-stage-std --src --jobs --warnings --error-format --json-output --color --bypass-bootstrap-lock --llvm-skip-rebuild --rust-profile-generate --rust-profile-use --llvm-profile-use --llvm-profile-generate --enable-bolt-settings --skip-stage0-validation --reproducible-artifact --set --help [PATHS]... [ARGS]..." if [[ ${cur} == -* || ${COMP_CWORD} -eq 2 ]] ; then COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 @@ -961,7 +961,7 @@ _x.py() { return 0 ;; x.py__fix) - opts="-v -i -j -h --verbose --incremental --config --build-dir --build --host --target --exclude --skip --include-default-paths --rustc-error-format --on-fail --dry-run --stage --keep-stage --keep-stage-std --src --jobs --warnings --error-format --json-output --color --bypass-bootstrap-lock --llvm-skip-rebuild --rust-profile-generate --rust-profile-use --llvm-profile-use --llvm-profile-generate --enable-bolt-settings --skip-stage0-validation --reproducible-artifact --set --help [PATHS]... [ARGS]..." + opts="-v -i -j -h --verbose --incremental --config --build-dir --build --host --target --exclude --skip --include-default-paths --rustc-error-format --on-fail --dry-run --dump-bootstrap-shims --stage --keep-stage --keep-stage-std --src --jobs --warnings --error-format --json-output --color --bypass-bootstrap-lock --llvm-skip-rebuild --rust-profile-generate --rust-profile-use --llvm-profile-use --llvm-profile-generate --enable-bolt-settings --skip-stage0-validation --reproducible-artifact --set --help [PATHS]... [ARGS]..." if [[ ${cur} == -* || ${COMP_CWORD} -eq 2 ]] ; then COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 @@ -1071,7 +1071,7 @@ _x.py() { return 0 ;; x.py__fmt) - opts="-v -i -j -h --check --verbose --incremental --config --build-dir --build --host --target --exclude --skip --include-default-paths --rustc-error-format --on-fail --dry-run --stage --keep-stage --keep-stage-std --src --jobs --warnings --error-format --json-output --color --bypass-bootstrap-lock --llvm-skip-rebuild --rust-profile-generate --rust-profile-use --llvm-profile-use --llvm-profile-generate --enable-bolt-settings --skip-stage0-validation --reproducible-artifact --set --help [PATHS]... [ARGS]..." + opts="-v -i -j -h --check --verbose --incremental --config --build-dir --build --host --target --exclude --skip --include-default-paths --rustc-error-format --on-fail --dry-run --dump-bootstrap-shims --stage --keep-stage --keep-stage-std --src --jobs --warnings --error-format --json-output --color --bypass-bootstrap-lock --llvm-skip-rebuild --rust-profile-generate --rust-profile-use --llvm-profile-use --llvm-profile-generate --enable-bolt-settings --skip-stage0-validation --reproducible-artifact --set --help [PATHS]... [ARGS]..." if [[ ${cur} == -* || ${COMP_CWORD} -eq 2 ]] ; then COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 @@ -1181,7 +1181,7 @@ _x.py() { return 0 ;; x.py__install) - opts="-v -i -j -h --verbose --incremental --config --build-dir --build --host --target --exclude --skip --include-default-paths --rustc-error-format --on-fail --dry-run --stage --keep-stage --keep-stage-std --src --jobs --warnings --error-format --json-output --color --bypass-bootstrap-lock --llvm-skip-rebuild --rust-profile-generate --rust-profile-use --llvm-profile-use --llvm-profile-generate --enable-bolt-settings --skip-stage0-validation --reproducible-artifact --set --help [PATHS]... [ARGS]..." + opts="-v -i -j -h --verbose --incremental --config --build-dir --build --host --target --exclude --skip --include-default-paths --rustc-error-format --on-fail --dry-run --dump-bootstrap-shims --stage --keep-stage --keep-stage-std --src --jobs --warnings --error-format --json-output --color --bypass-bootstrap-lock --llvm-skip-rebuild --rust-profile-generate --rust-profile-use --llvm-profile-use --llvm-profile-generate --enable-bolt-settings --skip-stage0-validation --reproducible-artifact --set --help [PATHS]... [ARGS]..." if [[ ${cur} == -* || ${COMP_CWORD} -eq 2 ]] ; then COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 @@ -1291,7 +1291,7 @@ _x.py() { return 0 ;; x.py__run) - opts="-v -i -j -h --args --verbose --incremental --config --build-dir --build --host --target --exclude --skip --include-default-paths --rustc-error-format --on-fail --dry-run --stage --keep-stage --keep-stage-std --src --jobs --warnings --error-format --json-output --color --bypass-bootstrap-lock --llvm-skip-rebuild --rust-profile-generate --rust-profile-use --llvm-profile-use --llvm-profile-generate --enable-bolt-settings --skip-stage0-validation --reproducible-artifact --set --help [PATHS]... [ARGS]..." + opts="-v -i -j -h --args --verbose --incremental --config --build-dir --build --host --target --exclude --skip --include-default-paths --rustc-error-format --on-fail --dry-run --dump-bootstrap-shims --stage --keep-stage --keep-stage-std --src --jobs --warnings --error-format --json-output --color --bypass-bootstrap-lock --llvm-skip-rebuild --rust-profile-generate --rust-profile-use --llvm-profile-use --llvm-profile-generate --enable-bolt-settings --skip-stage0-validation --reproducible-artifact --set --help [PATHS]... [ARGS]..." if [[ ${cur} == -* || ${COMP_CWORD} -eq 2 ]] ; then COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 @@ -1405,7 +1405,7 @@ _x.py() { return 0 ;; x.py__setup) - opts="-v -i -j -h --verbose --incremental --config --build-dir --build --host --target --exclude --skip --include-default-paths --rustc-error-format --on-fail --dry-run --stage --keep-stage --keep-stage-std --src --jobs --warnings --error-format --json-output --color --bypass-bootstrap-lock --llvm-skip-rebuild --rust-profile-generate --rust-profile-use --llvm-profile-use --llvm-profile-generate --enable-bolt-settings --skip-stage0-validation --reproducible-artifact --set --help [|hook|vscode|link] [PATHS]... [ARGS]..." + opts="-v -i -j -h --verbose --incremental --config --build-dir --build --host --target --exclude --skip --include-default-paths --rustc-error-format --on-fail --dry-run --dump-bootstrap-shims --stage --keep-stage --keep-stage-std --src --jobs --warnings --error-format --json-output --color --bypass-bootstrap-lock --llvm-skip-rebuild --rust-profile-generate --rust-profile-use --llvm-profile-use --llvm-profile-generate --enable-bolt-settings --skip-stage0-validation --reproducible-artifact --set --help [|hook|vscode|link] [PATHS]... [ARGS]..." if [[ ${cur} == -* || ${COMP_CWORD} -eq 2 ]] ; then COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 @@ -1515,7 +1515,7 @@ _x.py() { return 0 ;; x.py__suggest) - opts="-v -i -j -h --run --verbose --incremental --config --build-dir --build --host --target --exclude --skip --include-default-paths --rustc-error-format --on-fail --dry-run --stage --keep-stage --keep-stage-std --src --jobs --warnings --error-format --json-output --color --bypass-bootstrap-lock --llvm-skip-rebuild --rust-profile-generate --rust-profile-use --llvm-profile-use --llvm-profile-generate --enable-bolt-settings --skip-stage0-validation --reproducible-artifact --set --help [PATHS]... [ARGS]..." + opts="-v -i -j -h --run --verbose --incremental --config --build-dir --build --host --target --exclude --skip --include-default-paths --rustc-error-format --on-fail --dry-run --dump-bootstrap-shims --stage --keep-stage --keep-stage-std --src --jobs --warnings --error-format --json-output --color --bypass-bootstrap-lock --llvm-skip-rebuild --rust-profile-generate --rust-profile-use --llvm-profile-use --llvm-profile-generate --enable-bolt-settings --skip-stage0-validation --reproducible-artifact --set --help [PATHS]... [ARGS]..." if [[ ${cur} == -* || ${COMP_CWORD} -eq 2 ]] ; then COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 @@ -1625,7 +1625,7 @@ _x.py() { return 0 ;; x.py__test) - opts="-v -i -j -h --no-fail-fast --skip --test-args --rustc-args --no-doc --doc --bless --extra-checks --force-rerun --only-modified --compare-mode --pass --run --rustfix-coverage --verbose --incremental --config --build-dir --build --host --target --exclude --include-default-paths --rustc-error-format --on-fail --dry-run --stage --keep-stage --keep-stage-std --src --jobs --warnings --error-format --json-output --color --bypass-bootstrap-lock --llvm-skip-rebuild --rust-profile-generate --rust-profile-use --llvm-profile-use --llvm-profile-generate --enable-bolt-settings --skip-stage0-validation --reproducible-artifact --set --help [PATHS]... [ARGS]..." + opts="-v -i -j -h --no-fail-fast --skip --test-args --rustc-args --no-doc --doc --bless --extra-checks --force-rerun --only-modified --compare-mode --pass --run --rustfix-coverage --verbose --incremental --config --build-dir --build --host --target --exclude --include-default-paths --rustc-error-format --on-fail --dry-run --dump-bootstrap-shims --stage --keep-stage --keep-stage-std --src --jobs --warnings --error-format --json-output --color --bypass-bootstrap-lock --llvm-skip-rebuild --rust-profile-generate --rust-profile-use --llvm-profile-use --llvm-profile-generate --enable-bolt-settings --skip-stage0-validation --reproducible-artifact --set --help [PATHS]... [ARGS]..." if [[ ${cur} == -* || ${COMP_CWORD} -eq 2 ]] ; then COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 diff --git a/src/etc/completions/x.py.zsh b/src/etc/completions/x.py.zsh index 29dfc29261560..639f0487f8b02 100644 --- a/src/etc/completions/x.py.zsh +++ b/src/etc/completions/x.py.zsh @@ -45,6 +45,7 @@ _x.py() { '--incremental[use incremental compilation]' \ '--include-default-paths[include default paths in addition to the provided ones]' \ '--dry-run[dry run; don'\''t build anything]' \ +'--dump-bootstrap-shims[Indicates whether to dump the work done from bootstrap shims]' \ '--json-output[use message-format=json]' \ '--bypass-bootstrap-lock[Bootstrap uses this value to decide whether it should bypass locking the build process. This is rarely needed (e.g., compiling the std library for different targets in parallel)]' \ '--llvm-profile-generate[generate PGO profile with llvm built for rustc]' \ @@ -95,6 +96,7 @@ _arguments "${_arguments_options[@]}" \ '--incremental[use incremental compilation]' \ '--include-default-paths[include default paths in addition to the provided ones]' \ '--dry-run[dry run; don'\''t build anything]' \ +'--dump-bootstrap-shims[Indicates whether to dump the work done from bootstrap shims]' \ '--json-output[use message-format=json]' \ '--bypass-bootstrap-lock[Bootstrap uses this value to decide whether it should bypass locking the build process. This is rarely needed (e.g., compiling the std library for different targets in parallel)]' \ '--llvm-profile-generate[generate PGO profile with llvm built for rustc]' \ @@ -138,6 +140,7 @@ _arguments "${_arguments_options[@]}" \ '--incremental[use incremental compilation]' \ '--include-default-paths[include default paths in addition to the provided ones]' \ '--dry-run[dry run; don'\''t build anything]' \ +'--dump-bootstrap-shims[Indicates whether to dump the work done from bootstrap shims]' \ '--json-output[use message-format=json]' \ '--bypass-bootstrap-lock[Bootstrap uses this value to decide whether it should bypass locking the build process. This is rarely needed (e.g., compiling the std library for different targets in parallel)]' \ '--llvm-profile-generate[generate PGO profile with llvm built for rustc]' \ @@ -185,6 +188,7 @@ _arguments "${_arguments_options[@]}" \ '--incremental[use incremental compilation]' \ '--include-default-paths[include default paths in addition to the provided ones]' \ '--dry-run[dry run; don'\''t build anything]' \ +'--dump-bootstrap-shims[Indicates whether to dump the work done from bootstrap shims]' \ '--json-output[use message-format=json]' \ '--bypass-bootstrap-lock[Bootstrap uses this value to decide whether it should bypass locking the build process. This is rarely needed (e.g., compiling the std library for different targets in parallel)]' \ '--llvm-profile-generate[generate PGO profile with llvm built for rustc]' \ @@ -227,6 +231,7 @@ _arguments "${_arguments_options[@]}" \ '--incremental[use incremental compilation]' \ '--include-default-paths[include default paths in addition to the provided ones]' \ '--dry-run[dry run; don'\''t build anything]' \ +'--dump-bootstrap-shims[Indicates whether to dump the work done from bootstrap shims]' \ '--json-output[use message-format=json]' \ '--bypass-bootstrap-lock[Bootstrap uses this value to decide whether it should bypass locking the build process. This is rarely needed (e.g., compiling the std library for different targets in parallel)]' \ '--llvm-profile-generate[generate PGO profile with llvm built for rustc]' \ @@ -270,6 +275,7 @@ _arguments "${_arguments_options[@]}" \ '--incremental[use incremental compilation]' \ '--include-default-paths[include default paths in addition to the provided ones]' \ '--dry-run[dry run; don'\''t build anything]' \ +'--dump-bootstrap-shims[Indicates whether to dump the work done from bootstrap shims]' \ '--json-output[use message-format=json]' \ '--bypass-bootstrap-lock[Bootstrap uses this value to decide whether it should bypass locking the build process. This is rarely needed (e.g., compiling the std library for different targets in parallel)]' \ '--llvm-profile-generate[generate PGO profile with llvm built for rustc]' \ @@ -314,6 +320,7 @@ _arguments "${_arguments_options[@]}" \ '--incremental[use incremental compilation]' \ '--include-default-paths[include default paths in addition to the provided ones]' \ '--dry-run[dry run; don'\''t build anything]' \ +'--dump-bootstrap-shims[Indicates whether to dump the work done from bootstrap shims]' \ '--json-output[use message-format=json]' \ '--bypass-bootstrap-lock[Bootstrap uses this value to decide whether it should bypass locking the build process. This is rarely needed (e.g., compiling the std library for different targets in parallel)]' \ '--llvm-profile-generate[generate PGO profile with llvm built for rustc]' \ @@ -369,6 +376,7 @@ _arguments "${_arguments_options[@]}" \ '--incremental[use incremental compilation]' \ '--include-default-paths[include default paths in addition to the provided ones]' \ '--dry-run[dry run; don'\''t build anything]' \ +'--dump-bootstrap-shims[Indicates whether to dump the work done from bootstrap shims]' \ '--json-output[use message-format=json]' \ '--bypass-bootstrap-lock[Bootstrap uses this value to decide whether it should bypass locking the build process. This is rarely needed (e.g., compiling the std library for different targets in parallel)]' \ '--llvm-profile-generate[generate PGO profile with llvm built for rustc]' \ @@ -412,6 +420,7 @@ _arguments "${_arguments_options[@]}" \ '--incremental[use incremental compilation]' \ '--include-default-paths[include default paths in addition to the provided ones]' \ '--dry-run[dry run; don'\''t build anything]' \ +'--dump-bootstrap-shims[Indicates whether to dump the work done from bootstrap shims]' \ '--json-output[use message-format=json]' \ '--bypass-bootstrap-lock[Bootstrap uses this value to decide whether it should bypass locking the build process. This is rarely needed (e.g., compiling the std library for different targets in parallel)]' \ '--llvm-profile-generate[generate PGO profile with llvm built for rustc]' \ @@ -455,6 +464,7 @@ _arguments "${_arguments_options[@]}" \ '--incremental[use incremental compilation]' \ '--include-default-paths[include default paths in addition to the provided ones]' \ '--dry-run[dry run; don'\''t build anything]' \ +'--dump-bootstrap-shims[Indicates whether to dump the work done from bootstrap shims]' \ '--json-output[use message-format=json]' \ '--bypass-bootstrap-lock[Bootstrap uses this value to decide whether it should bypass locking the build process. This is rarely needed (e.g., compiling the std library for different targets in parallel)]' \ '--llvm-profile-generate[generate PGO profile with llvm built for rustc]' \ @@ -497,6 +507,7 @@ _arguments "${_arguments_options[@]}" \ '--incremental[use incremental compilation]' \ '--include-default-paths[include default paths in addition to the provided ones]' \ '--dry-run[dry run; don'\''t build anything]' \ +'--dump-bootstrap-shims[Indicates whether to dump the work done from bootstrap shims]' \ '--json-output[use message-format=json]' \ '--bypass-bootstrap-lock[Bootstrap uses this value to decide whether it should bypass locking the build process. This is rarely needed (e.g., compiling the std library for different targets in parallel)]' \ '--llvm-profile-generate[generate PGO profile with llvm built for rustc]' \ @@ -539,6 +550,7 @@ _arguments "${_arguments_options[@]}" \ '--incremental[use incremental compilation]' \ '--include-default-paths[include default paths in addition to the provided ones]' \ '--dry-run[dry run; don'\''t build anything]' \ +'--dump-bootstrap-shims[Indicates whether to dump the work done from bootstrap shims]' \ '--json-output[use message-format=json]' \ '--bypass-bootstrap-lock[Bootstrap uses this value to decide whether it should bypass locking the build process. This is rarely needed (e.g., compiling the std library for different targets in parallel)]' \ '--llvm-profile-generate[generate PGO profile with llvm built for rustc]' \ @@ -582,6 +594,7 @@ _arguments "${_arguments_options[@]}" \ '--incremental[use incremental compilation]' \ '--include-default-paths[include default paths in addition to the provided ones]' \ '--dry-run[dry run; don'\''t build anything]' \ +'--dump-bootstrap-shims[Indicates whether to dump the work done from bootstrap shims]' \ '--json-output[use message-format=json]' \ '--bypass-bootstrap-lock[Bootstrap uses this value to decide whether it should bypass locking the build process. This is rarely needed (e.g., compiling the std library for different targets in parallel)]' \ '--llvm-profile-generate[generate PGO profile with llvm built for rustc]' \ @@ -624,6 +637,7 @@ _arguments "${_arguments_options[@]}" \ '--incremental[use incremental compilation]' \ '--include-default-paths[include default paths in addition to the provided ones]' \ '--dry-run[dry run; don'\''t build anything]' \ +'--dump-bootstrap-shims[Indicates whether to dump the work done from bootstrap shims]' \ '--json-output[use message-format=json]' \ '--bypass-bootstrap-lock[Bootstrap uses this value to decide whether it should bypass locking the build process. This is rarely needed (e.g., compiling the std library for different targets in parallel)]' \ '--llvm-profile-generate[generate PGO profile with llvm built for rustc]' \ @@ -668,6 +682,7 @@ _arguments "${_arguments_options[@]}" \ '--incremental[use incremental compilation]' \ '--include-default-paths[include default paths in addition to the provided ones]' \ '--dry-run[dry run; don'\''t build anything]' \ +'--dump-bootstrap-shims[Indicates whether to dump the work done from bootstrap shims]' \ '--json-output[use message-format=json]' \ '--bypass-bootstrap-lock[Bootstrap uses this value to decide whether it should bypass locking the build process. This is rarely needed (e.g., compiling the std library for different targets in parallel)]' \ '--llvm-profile-generate[generate PGO profile with llvm built for rustc]' \ diff --git a/tests/ui/macros/stringify.rs b/tests/ui/macros/stringify.rs index d65f7a8ea8c85..6fc12509aad6c 100644 --- a/tests/ui/macros/stringify.rs +++ b/tests/ui/macros/stringify.rs @@ -10,6 +10,8 @@ #![feature(coroutines)] #![feature(decl_macro)] #![feature(explicit_tail_calls)] +#![feature(if_let_guard)] +#![feature(let_chains)] #![feature(more_qualified_paths)] #![feature(never_patterns)] #![feature(raw_ref_op)] @@ -47,7 +49,7 @@ macro_rules! c1 { // easy to find the cases where the two pretty-printing approaches give // different results. macro_rules! c2 { - ($frag:ident, [$($tt:tt)*], $s1:literal, $s2:literal) => { + ($frag:ident, [$($tt:tt)*], $s1:literal, $s2:literal $(,)?) => { assert_ne!($s1, $s2, "should use `c1!` instead"); assert_eq!($frag!($($tt)*), $s1); assert_eq!(stringify!($($tt)*), $s2); @@ -127,6 +129,23 @@ fn test_expr() { // ExprKind::Let c1!(expr, [ if let Some(a) = b { c } else { d } ], "if let Some(a) = b { c } else { d }"); + c1!(expr, [ if let _ = true && false {} ], "if let _ = true && false {}"); + c1!(expr, [ if let _ = (true && false) {} ], "if let _ = (true && false) {}"); + macro_rules! c2_if_let { + ($expr:expr, $expr_expected:expr, $tokens_expected:expr $(,)?) => { + c2!(expr, [ if let _ = $expr {} ], $expr_expected, $tokens_expected); + }; + } + c2_if_let!( + true && false, + "if let _ = (true && false) {}", + "if let _ = true && false {}", + ); + c2!(expr, + [ match () { _ if let _ = Struct {} => {} } ], + "match () { _ if let _ = Struct {} => {} }", + "match() { _ if let _ = Struct {} => {} }", + ); // ExprKind::If c1!(expr, [ if true {} ], "if true {}"); diff --git a/triagebot.toml b/triagebot.toml index 775d9714c6062..2d9a5dabc8aa8 100644 --- a/triagebot.toml +++ b/triagebot.toml @@ -480,6 +480,10 @@ cc = ["@GuillaumeGomez"] message = "Some changes might have occurred in exhaustiveness checking" cc = ["@Nadrieril"] +[mentions."compiler/rustc_pattern_analysis"] +message = "Some changes might have occurred in exhaustiveness checking" +cc = ["@Nadrieril"] + [mentions."library/portable-simd"] message = """ Portable SIMD is developed in its own repository. If possible, consider \