Gather rustc-specific functions around MatchCheckCtxt
This commit is contained in:
parent
281002d42c
commit
3691a0aee5
8 changed files with 903 additions and 900 deletions
|
@ -6,7 +6,7 @@ use rustc_errors::{
|
|||
};
|
||||
use rustc_macros::{Diagnostic, LintDiagnostic, Subdiagnostic};
|
||||
use rustc_middle::ty::{self, Ty};
|
||||
use rustc_pattern_analysis::{errors::Uncovered, usefulness::MatchCheckCtxt};
|
||||
use rustc_pattern_analysis::{cx::MatchCheckCtxt, errors::Uncovered};
|
||||
use rustc_span::symbol::Symbol;
|
||||
use rustc_span::Span;
|
||||
|
||||
|
|
|
@ -1,8 +1,9 @@
|
|||
use rustc_pattern_analysis::constructor::Constructor;
|
||||
use rustc_pattern_analysis::cx::MatchCheckCtxt;
|
||||
use rustc_pattern_analysis::errors::Uncovered;
|
||||
use rustc_pattern_analysis::pat::{DeconstructedPat, WitnessPat};
|
||||
use rustc_pattern_analysis::usefulness::{
|
||||
compute_match_usefulness, MatchArm, MatchCheckCtxt, Usefulness, UsefulnessReport,
|
||||
compute_match_usefulness, MatchArm, Usefulness, UsefulnessReport,
|
||||
};
|
||||
|
||||
use crate::errors::*;
|
||||
|
@ -286,7 +287,7 @@ impl<'thir, 'p, 'tcx> MatchVisitor<'thir, 'p, 'tcx> {
|
|||
check_borrow_conflicts_in_at_patterns(self, pat);
|
||||
check_for_bindings_named_same_as_variants(self, pat, refutable);
|
||||
});
|
||||
Ok(cx.pattern_arena.alloc(DeconstructedPat::from_pat(cx, pat)))
|
||||
Ok(cx.pattern_arena.alloc(cx.lower_pat(pat)))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -926,7 +927,7 @@ fn report_non_exhaustive_match<'p, 'tcx>(
|
|||
pattern = if witnesses.len() < 4 {
|
||||
witnesses
|
||||
.iter()
|
||||
.map(|witness| witness.to_diagnostic_pat(cx).to_string())
|
||||
.map(|witness| cx.hoist_witness_pat(witness).to_string())
|
||||
.collect::<Vec<String>>()
|
||||
.join(" | ")
|
||||
} else {
|
||||
|
@ -950,7 +951,7 @@ fn report_non_exhaustive_match<'p, 'tcx>(
|
|||
if !is_empty_match {
|
||||
let mut non_exhaustive_tys = FxHashSet::default();
|
||||
// Look at the first witness.
|
||||
collect_non_exhaustive_tys(cx.tcx, &witnesses[0], &mut non_exhaustive_tys);
|
||||
collect_non_exhaustive_tys(cx, &witnesses[0], &mut non_exhaustive_tys);
|
||||
|
||||
for ty in non_exhaustive_tys {
|
||||
if ty.is_ptr_sized_integral() {
|
||||
|
@ -1085,13 +1086,13 @@ fn joined_uncovered_patterns<'p, 'tcx>(
|
|||
witnesses: &[WitnessPat<'tcx>],
|
||||
) -> String {
|
||||
const LIMIT: usize = 3;
|
||||
let pat_to_str = |pat: &WitnessPat<'tcx>| pat.to_diagnostic_pat(cx).to_string();
|
||||
let pat_to_str = |pat: &WitnessPat<'tcx>| cx.hoist_witness_pat(pat).to_string();
|
||||
match witnesses {
|
||||
[] => bug!(),
|
||||
[witness] => format!("`{}`", witness.to_diagnostic_pat(cx)),
|
||||
[witness] => format!("`{}`", cx.hoist_witness_pat(witness)),
|
||||
[head @ .., tail] if head.len() < LIMIT => {
|
||||
let head: Vec<_> = head.iter().map(pat_to_str).collect();
|
||||
format!("`{}` and `{}`", head.join("`, `"), tail.to_diagnostic_pat(cx))
|
||||
format!("`{}` and `{}`", head.join("`, `"), cx.hoist_witness_pat(tail))
|
||||
}
|
||||
_ => {
|
||||
let (head, tail) = witnesses.split_at(LIMIT);
|
||||
|
@ -1102,7 +1103,7 @@ fn joined_uncovered_patterns<'p, 'tcx>(
|
|||
}
|
||||
|
||||
fn collect_non_exhaustive_tys<'tcx>(
|
||||
tcx: TyCtxt<'tcx>,
|
||||
cx: &MatchCheckCtxt<'_, 'tcx>,
|
||||
pat: &WitnessPat<'tcx>,
|
||||
non_exhaustive_tys: &mut FxHashSet<Ty<'tcx>>,
|
||||
) {
|
||||
|
@ -1110,13 +1111,13 @@ fn collect_non_exhaustive_tys<'tcx>(
|
|||
non_exhaustive_tys.insert(pat.ty());
|
||||
}
|
||||
if let Constructor::IntRange(range) = pat.ctor() {
|
||||
if range.is_beyond_boundaries(pat.ty(), tcx) {
|
||||
if cx.is_range_beyond_boundaries(range, pat.ty()) {
|
||||
// The range denotes the values before `isize::MIN` or the values after `usize::MAX`/`isize::MAX`.
|
||||
non_exhaustive_tys.insert(pat.ty());
|
||||
}
|
||||
}
|
||||
pat.iter_fields()
|
||||
.for_each(|field_pat| collect_non_exhaustive_tys(tcx, field_pat, non_exhaustive_tys))
|
||||
.for_each(|field_pat| collect_non_exhaustive_tys(cx, field_pat, non_exhaustive_tys))
|
||||
}
|
||||
|
||||
fn report_adt_defined_here<'tcx>(
|
||||
|
|
|
@ -158,21 +158,16 @@ use rustc_apfloat::ieee::{DoubleS, IeeeFloat, SingleS};
|
|||
use rustc_data_structures::fx::FxHashSet;
|
||||
use rustc_hir::RangeEnd;
|
||||
use rustc_index::IndexVec;
|
||||
use rustc_middle::middle::stability::EvalResult;
|
||||
use rustc_middle::mir;
|
||||
use rustc_middle::mir::interpret::Scalar;
|
||||
use rustc_middle::thir::{Pat, PatKind, PatRange, PatRangeBoundary};
|
||||
use rustc_middle::mir::Const;
|
||||
use rustc_middle::ty::layout::IntegerExt;
|
||||
use rustc_middle::ty::{self, Ty, TyCtxt};
|
||||
use rustc_span::DUMMY_SP;
|
||||
use rustc_target::abi::{Integer, VariantIdx, FIRST_VARIANT};
|
||||
use rustc_target::abi::{Integer, VariantIdx};
|
||||
|
||||
use self::Constructor::*;
|
||||
use self::MaybeInfiniteInt::*;
|
||||
use self::SliceKind::*;
|
||||
|
||||
use crate::pat::Fields;
|
||||
use crate::usefulness::{MatchCheckCtxt, PatCtxt};
|
||||
use crate::usefulness::PatCtxt;
|
||||
|
||||
/// Whether we have seen a constructor in the column or not.
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
|
||||
|
@ -196,7 +191,7 @@ pub enum MaybeInfiniteInt {
|
|||
|
||||
impl MaybeInfiniteInt {
|
||||
// The return value of `signed_bias` should be XORed with a value to encode/decode it.
|
||||
fn signed_bias(tcx: TyCtxt<'_>, ty: Ty<'_>) -> u128 {
|
||||
pub(crate) fn signed_bias(tcx: TyCtxt<'_>, ty: Ty<'_>) -> u128 {
|
||||
match *ty.kind() {
|
||||
ty::Int(ity) => {
|
||||
let bits = Integer::from_int_ty(&tcx, ity).size().bits() as u128;
|
||||
|
@ -206,58 +201,13 @@ impl MaybeInfiniteInt {
|
|||
}
|
||||
}
|
||||
|
||||
fn new_finite(tcx: TyCtxt<'_>, ty: Ty<'_>, bits: u128) -> Self {
|
||||
pub fn new_finite(tcx: TyCtxt<'_>, ty: Ty<'_>, bits: u128) -> Self {
|
||||
let bias = Self::signed_bias(tcx, ty);
|
||||
// Perform a shift if the underlying types are signed, which makes the interval arithmetic
|
||||
// type-independent.
|
||||
let x = bits ^ bias;
|
||||
Finite(x)
|
||||
}
|
||||
pub(crate) fn from_pat_range_bdy<'tcx>(
|
||||
bdy: PatRangeBoundary<'tcx>,
|
||||
ty: Ty<'tcx>,
|
||||
tcx: TyCtxt<'tcx>,
|
||||
param_env: ty::ParamEnv<'tcx>,
|
||||
) -> Self {
|
||||
match bdy {
|
||||
PatRangeBoundary::NegInfinity => NegInfinity,
|
||||
PatRangeBoundary::Finite(value) => {
|
||||
let bits = value.eval_bits(tcx, param_env);
|
||||
Self::new_finite(tcx, ty, bits)
|
||||
}
|
||||
PatRangeBoundary::PosInfinity => PosInfinity,
|
||||
}
|
||||
}
|
||||
|
||||
/// Used only for diagnostics.
|
||||
/// Note: it is possible to get `isize/usize::MAX+1` here, as explained in the doc for
|
||||
/// [`IntRange::split`]. This cannot be represented as a `Const`, so we represent it with
|
||||
/// `PosInfinity`.
|
||||
fn to_diagnostic_pat_range_bdy<'tcx>(
|
||||
self,
|
||||
ty: Ty<'tcx>,
|
||||
tcx: TyCtxt<'tcx>,
|
||||
) -> PatRangeBoundary<'tcx> {
|
||||
match self {
|
||||
NegInfinity => PatRangeBoundary::NegInfinity,
|
||||
Finite(x) => {
|
||||
let bias = Self::signed_bias(tcx, ty);
|
||||
let bits = x ^ bias;
|
||||
let size = ty.primitive_size(tcx);
|
||||
match Scalar::try_from_uint(bits, size) {
|
||||
Some(scalar) => {
|
||||
let value = mir::Const::from_scalar(tcx, scalar, ty);
|
||||
PatRangeBoundary::Finite(value)
|
||||
}
|
||||
// The value doesn't fit. Since `x >= 0` and 0 always encodes the minimum value
|
||||
// for a type, the problem isn't that the value is too small. So it must be too
|
||||
// large.
|
||||
None => PatRangeBoundary::PosInfinity,
|
||||
}
|
||||
}
|
||||
JustAfterMax | PosInfinity => PatRangeBoundary::PosInfinity,
|
||||
}
|
||||
}
|
||||
|
||||
/// Note: this will not turn a finite value into an infinite one or vice-versa.
|
||||
pub fn minus_one(self) -> Self {
|
||||
|
@ -290,16 +240,11 @@ impl MaybeInfiniteInt {
|
|||
/// space: i.e., `range.lo < range.hi`.
|
||||
#[derive(Clone, Copy, PartialEq, Eq)]
|
||||
pub struct IntRange {
|
||||
pub(crate) lo: MaybeInfiniteInt, // Must not be `PosInfinity`.
|
||||
pub(crate) hi: MaybeInfiniteInt, // Must not be `NegInfinity`.
|
||||
pub lo: MaybeInfiniteInt, // Must not be `PosInfinity`.
|
||||
pub hi: MaybeInfiniteInt, // Must not be `NegInfinity`.
|
||||
}
|
||||
|
||||
impl IntRange {
|
||||
#[inline]
|
||||
pub(super) fn is_integral(ty: Ty<'_>) -> bool {
|
||||
matches!(ty.kind(), ty::Char | ty::Int(_) | ty::Uint(_))
|
||||
}
|
||||
|
||||
/// Best effort; will not know that e.g. `255u8..` is a singleton.
|
||||
pub fn is_singleton(&self) -> bool {
|
||||
// Since `lo` and `hi` can't be the same `Infinity` and `plus_one` never changes from finite
|
||||
|
@ -421,55 +366,6 @@ impl IntRange {
|
|||
(presence, range)
|
||||
})
|
||||
}
|
||||
|
||||
/// Whether the range denotes the fictitious values before `isize::MIN` or after
|
||||
/// `usize::MAX`/`isize::MAX` (see doc of [`IntRange::split`] for why these exist).
|
||||
pub fn is_beyond_boundaries<'tcx>(&self, ty: Ty<'tcx>, tcx: TyCtxt<'tcx>) -> bool {
|
||||
ty.is_ptr_sized_integral() && {
|
||||
// The two invalid ranges are `NegInfinity..isize::MIN` (represented as
|
||||
// `NegInfinity..0`), and `{u,i}size::MAX+1..PosInfinity`. `to_diagnostic_pat_range_bdy`
|
||||
// converts `MAX+1` to `PosInfinity`, and we couldn't have `PosInfinity` in `self.lo`
|
||||
// otherwise.
|
||||
let lo = self.lo.to_diagnostic_pat_range_bdy(ty, tcx);
|
||||
matches!(lo, PatRangeBoundary::PosInfinity)
|
||||
|| matches!(self.hi, MaybeInfiniteInt::Finite(0))
|
||||
}
|
||||
}
|
||||
/// Only used for displaying the range.
|
||||
pub(super) fn to_diagnostic_pat<'tcx>(&self, ty: Ty<'tcx>, tcx: TyCtxt<'tcx>) -> Pat<'tcx> {
|
||||
let kind = if matches!((self.lo, self.hi), (NegInfinity, PosInfinity)) {
|
||||
PatKind::Wild
|
||||
} else if self.is_singleton() {
|
||||
let lo = self.lo.to_diagnostic_pat_range_bdy(ty, tcx);
|
||||
let value = lo.as_finite().unwrap();
|
||||
PatKind::Constant { value }
|
||||
} else {
|
||||
// We convert to an inclusive range for diagnostics.
|
||||
let mut end = RangeEnd::Included;
|
||||
let mut lo = self.lo.to_diagnostic_pat_range_bdy(ty, tcx);
|
||||
if matches!(lo, PatRangeBoundary::PosInfinity) {
|
||||
// The only reason to get `PosInfinity` here is the special case where
|
||||
// `to_diagnostic_pat_range_bdy` found `{u,i}size::MAX+1`. So the range denotes the
|
||||
// fictitious values after `{u,i}size::MAX` (see [`IntRange::split`] for why we do
|
||||
// this). We show this to the user as `usize::MAX..` which is slightly incorrect but
|
||||
// probably clear enough.
|
||||
let c = ty.numeric_max_val(tcx).unwrap();
|
||||
let value = mir::Const::from_ty_const(c, tcx);
|
||||
lo = PatRangeBoundary::Finite(value);
|
||||
}
|
||||
let hi = if matches!(self.hi, MaybeInfiniteInt::Finite(0)) {
|
||||
// The range encodes `..ty::MIN`, so we can't convert it to an inclusive range.
|
||||
end = RangeEnd::Excluded;
|
||||
self.hi
|
||||
} else {
|
||||
self.hi.minus_one()
|
||||
};
|
||||
let hi = hi.to_diagnostic_pat_range_bdy(ty, tcx);
|
||||
PatKind::Range(Box::new(PatRange { lo, hi, end, ty }))
|
||||
};
|
||||
|
||||
Pat { ty, span: DUMMY_SP, kind }
|
||||
}
|
||||
}
|
||||
|
||||
/// Note: this will render signed ranges incorrectly. To render properly, convert to a pattern
|
||||
|
@ -742,7 +638,7 @@ pub enum Constructor<'tcx> {
|
|||
F32Range(IeeeFloat<SingleS>, IeeeFloat<SingleS>, RangeEnd),
|
||||
F64Range(IeeeFloat<DoubleS>, IeeeFloat<DoubleS>, RangeEnd),
|
||||
/// String literals. Strings are not quite the same as `&[u8]` so we treat them separately.
|
||||
Str(mir::Const<'tcx>),
|
||||
Str(Const<'tcx>),
|
||||
/// Array and slice patterns.
|
||||
Slice(Slice),
|
||||
/// Constants that must not be matched structurally. They are treated as black boxes for the
|
||||
|
@ -797,49 +693,10 @@ impl<'tcx> Constructor<'tcx> {
|
|||
}
|
||||
}
|
||||
|
||||
pub(crate) fn variant_index_for_adt(&self, adt: ty::AdtDef<'tcx>) -> VariantIdx {
|
||||
match *self {
|
||||
Variant(idx) => idx,
|
||||
Single => {
|
||||
assert!(!adt.is_enum());
|
||||
FIRST_VARIANT
|
||||
}
|
||||
_ => bug!("bad constructor {:?} for adt {:?}", self, adt),
|
||||
}
|
||||
}
|
||||
|
||||
/// The number of fields for this constructor. This must be kept in sync with
|
||||
/// `Fields::wildcards`.
|
||||
pub(crate) fn arity(&self, pcx: &PatCtxt<'_, '_, 'tcx>) -> usize {
|
||||
match self {
|
||||
Single | Variant(_) => match pcx.ty.kind() {
|
||||
ty::Tuple(fs) => fs.len(),
|
||||
ty::Ref(..) => 1,
|
||||
ty::Adt(adt, ..) => {
|
||||
if adt.is_box() {
|
||||
// The only legal patterns of type `Box` (outside `std`) are `_` and box
|
||||
// patterns. If we're here we can assume this is a box pattern.
|
||||
1
|
||||
} else {
|
||||
let variant = &adt.variant(self.variant_index_for_adt(*adt));
|
||||
Fields::list_variant_nonhidden_fields(pcx.cx, pcx.ty, variant).count()
|
||||
}
|
||||
}
|
||||
_ => bug!("Unexpected type for `Single` constructor: {:?}", pcx.ty),
|
||||
},
|
||||
Slice(slice) => slice.arity(),
|
||||
Bool(..)
|
||||
| IntRange(..)
|
||||
| F32Range(..)
|
||||
| F64Range(..)
|
||||
| Str(..)
|
||||
| Opaque(..)
|
||||
| NonExhaustive
|
||||
| Hidden
|
||||
| Missing { .. }
|
||||
| Wildcard => 0,
|
||||
Or => bug!("The `Or` constructor doesn't have a fixed arity"),
|
||||
}
|
||||
pcx.cx.ctor_arity(self, pcx.ty)
|
||||
}
|
||||
|
||||
/// Returns whether `self` is covered by `other`, i.e. whether `self` is a subset of `other`.
|
||||
|
@ -974,123 +831,6 @@ pub(super) struct SplitConstructorSet<'tcx> {
|
|||
}
|
||||
|
||||
impl ConstructorSet {
|
||||
/// Creates a set that represents all the constructors of `ty`.
|
||||
///
|
||||
/// See at the top of the file for considerations of emptiness.
|
||||
#[instrument(level = "debug", skip(cx), ret)]
|
||||
pub fn for_ty<'p, 'tcx>(cx: &MatchCheckCtxt<'p, 'tcx>, ty: Ty<'tcx>) -> Self {
|
||||
let make_range = |start, end| {
|
||||
IntRange::from_range(
|
||||
MaybeInfiniteInt::new_finite(cx.tcx, ty, start),
|
||||
MaybeInfiniteInt::new_finite(cx.tcx, ty, end),
|
||||
RangeEnd::Included,
|
||||
)
|
||||
};
|
||||
// This determines the set of all possible constructors for the type `ty`. For numbers,
|
||||
// arrays and slices we use ranges and variable-length slices when appropriate.
|
||||
match ty.kind() {
|
||||
ty::Bool => Self::Bool,
|
||||
ty::Char => {
|
||||
// The valid Unicode Scalar Value ranges.
|
||||
Self::Integers {
|
||||
range_1: make_range('\u{0000}' as u128, '\u{D7FF}' as u128),
|
||||
range_2: Some(make_range('\u{E000}' as u128, '\u{10FFFF}' as u128)),
|
||||
}
|
||||
}
|
||||
&ty::Int(ity) => {
|
||||
let range = if ty.is_ptr_sized_integral() {
|
||||
// The min/max values of `isize` are not allowed to be observed.
|
||||
IntRange { lo: NegInfinity, hi: PosInfinity }
|
||||
} else {
|
||||
let bits = Integer::from_int_ty(&cx.tcx, ity).size().bits() as u128;
|
||||
let min = 1u128 << (bits - 1);
|
||||
let max = min - 1;
|
||||
make_range(min, max)
|
||||
};
|
||||
Self::Integers { range_1: range, range_2: None }
|
||||
}
|
||||
&ty::Uint(uty) => {
|
||||
let range = if ty.is_ptr_sized_integral() {
|
||||
// The max value of `usize` is not allowed to be observed.
|
||||
let lo = MaybeInfiniteInt::new_finite(cx.tcx, ty, 0);
|
||||
IntRange { lo, hi: PosInfinity }
|
||||
} else {
|
||||
let size = Integer::from_uint_ty(&cx.tcx, uty).size();
|
||||
let max = size.truncate(u128::MAX);
|
||||
make_range(0, max)
|
||||
};
|
||||
Self::Integers { range_1: range, range_2: None }
|
||||
}
|
||||
ty::Slice(sub_ty) => {
|
||||
Self::Slice { array_len: None, subtype_is_empty: cx.is_uninhabited(*sub_ty) }
|
||||
}
|
||||
ty::Array(sub_ty, len) => {
|
||||
// We treat arrays of a constant but unknown length like slices.
|
||||
Self::Slice {
|
||||
array_len: len.try_eval_target_usize(cx.tcx, cx.param_env).map(|l| l as usize),
|
||||
subtype_is_empty: cx.is_uninhabited(*sub_ty),
|
||||
}
|
||||
}
|
||||
ty::Adt(def, args) if def.is_enum() => {
|
||||
let is_declared_nonexhaustive = cx.is_foreign_non_exhaustive_enum(ty);
|
||||
if def.variants().is_empty() && !is_declared_nonexhaustive {
|
||||
Self::NoConstructors
|
||||
} else {
|
||||
let mut variants =
|
||||
IndexVec::from_elem(VariantVisibility::Visible, def.variants());
|
||||
for (idx, v) in def.variants().iter_enumerated() {
|
||||
let variant_def_id = def.variant(idx).def_id;
|
||||
// Visibly uninhabited variants.
|
||||
let is_inhabited = v
|
||||
.inhabited_predicate(cx.tcx, *def)
|
||||
.instantiate(cx.tcx, args)
|
||||
.apply(cx.tcx, cx.param_env, cx.module);
|
||||
// Variants that depend on a disabled unstable feature.
|
||||
let is_unstable = matches!(
|
||||
cx.tcx.eval_stability(variant_def_id, None, DUMMY_SP, None),
|
||||
EvalResult::Deny { .. }
|
||||
);
|
||||
// Foreign `#[doc(hidden)]` variants.
|
||||
let is_doc_hidden =
|
||||
cx.tcx.is_doc_hidden(variant_def_id) && !variant_def_id.is_local();
|
||||
let visibility = if !is_inhabited {
|
||||
// FIXME: handle empty+hidden
|
||||
VariantVisibility::Empty
|
||||
} else if is_unstable || is_doc_hidden {
|
||||
VariantVisibility::Hidden
|
||||
} else {
|
||||
VariantVisibility::Visible
|
||||
};
|
||||
variants[idx] = visibility;
|
||||
}
|
||||
|
||||
Self::Variants { variants, non_exhaustive: is_declared_nonexhaustive }
|
||||
}
|
||||
}
|
||||
ty::Adt(..) | ty::Tuple(..) | ty::Ref(..) => {
|
||||
Self::Single { empty: cx.is_uninhabited(ty) }
|
||||
}
|
||||
ty::Never => Self::NoConstructors,
|
||||
// This type is one for which we cannot list constructors, like `str` or `f64`.
|
||||
// FIXME(Nadrieril): which of these are actually allowed?
|
||||
ty::Float(_)
|
||||
| ty::Str
|
||||
| ty::Foreign(_)
|
||||
| ty::RawPtr(_)
|
||||
| ty::FnDef(_, _)
|
||||
| ty::FnPtr(_)
|
||||
| ty::Dynamic(_, _, _)
|
||||
| ty::Closure(_, _)
|
||||
| ty::Coroutine(_, _, _)
|
||||
| ty::Alias(_, _)
|
||||
| ty::Param(_)
|
||||
| ty::Error(_) => Self::Unlistable,
|
||||
ty::CoroutineWitness(_, _) | ty::Bound(_, _) | ty::Placeholder(_) | ty::Infer(_) => {
|
||||
bug!("Encountered unexpected type in `ConstructorSet::for_ty`: {ty:?}")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// This analyzes a column of constructors to 1/ determine which constructors of the type (if
|
||||
/// any) are missing; 2/ split constructors to handle non-trivial intersections e.g. on ranges
|
||||
/// or slices. This can get subtle; see [`SplitConstructorSet`] for details of this operation
|
||||
|
|
837
compiler/rustc_pattern_analysis/src/cx.rs
Normal file
837
compiler/rustc_pattern_analysis/src/cx.rs
Normal file
|
@ -0,0 +1,837 @@
|
|||
use std::fmt;
|
||||
use std::iter::once;
|
||||
|
||||
use rustc_arena::TypedArena;
|
||||
use rustc_data_structures::captures::Captures;
|
||||
use rustc_hir::def_id::DefId;
|
||||
use rustc_hir::{HirId, RangeEnd};
|
||||
use rustc_index::Idx;
|
||||
use rustc_index::IndexVec;
|
||||
use rustc_middle::middle::stability::EvalResult;
|
||||
use rustc_middle::mir;
|
||||
use rustc_middle::mir::interpret::Scalar;
|
||||
use rustc_middle::thir::{FieldPat, Pat, PatKind, PatRange, PatRangeBoundary};
|
||||
use rustc_middle::ty::layout::IntegerExt;
|
||||
use rustc_middle::ty::{self, Ty, TyCtxt, VariantDef};
|
||||
use rustc_span::{Span, DUMMY_SP};
|
||||
use rustc_target::abi::{FieldIdx, Integer, VariantIdx, FIRST_VARIANT};
|
||||
use smallvec::SmallVec;
|
||||
|
||||
use crate::constructor::{
|
||||
Constructor, ConstructorSet, IntRange, MaybeInfiniteInt, OpaqueId, Slice, SliceKind,
|
||||
VariantVisibility,
|
||||
};
|
||||
use crate::pat::{DeconstructedPat, WitnessPat};
|
||||
|
||||
use Constructor::*;
|
||||
|
||||
pub struct MatchCheckCtxt<'p, 'tcx> {
|
||||
pub tcx: TyCtxt<'tcx>,
|
||||
/// The module in which the match occurs. This is necessary for
|
||||
/// checking inhabited-ness of types because whether a type is (visibly)
|
||||
/// inhabited can depend on whether it was defined in the current module or
|
||||
/// not. E.g., `struct Foo { _private: ! }` cannot be seen to be empty
|
||||
/// outside its module and should not be matchable with an empty match statement.
|
||||
pub module: DefId,
|
||||
pub param_env: ty::ParamEnv<'tcx>,
|
||||
pub pattern_arena: &'p TypedArena<DeconstructedPat<'p, 'tcx>>,
|
||||
/// Lint level at the match.
|
||||
pub match_lint_level: HirId,
|
||||
/// The span of the whole match, if applicable.
|
||||
pub whole_match_span: Option<Span>,
|
||||
/// Span of the scrutinee.
|
||||
pub scrut_span: Span,
|
||||
/// Only produce `NON_EXHAUSTIVE_OMITTED_PATTERNS` lint on refutable patterns.
|
||||
pub refutable: bool,
|
||||
/// Whether the data at the scrutinee is known to be valid. This is false if the scrutinee comes
|
||||
/// from a union field, a pointer deref, or a reference deref (pending opsem decisions).
|
||||
pub known_valid_scrutinee: bool,
|
||||
}
|
||||
|
||||
impl<'p, 'tcx> MatchCheckCtxt<'p, 'tcx> {
|
||||
pub(super) fn is_uninhabited(&self, ty: Ty<'tcx>) -> bool {
|
||||
!ty.is_inhabited_from(self.tcx, self.module, self.param_env)
|
||||
}
|
||||
|
||||
/// Returns whether the given type is an enum from another crate declared `#[non_exhaustive]`.
|
||||
pub fn is_foreign_non_exhaustive_enum(&self, ty: Ty<'tcx>) -> bool {
|
||||
match ty.kind() {
|
||||
ty::Adt(def, ..) => {
|
||||
def.is_enum() && def.is_variant_list_non_exhaustive() && !def.did().is_local()
|
||||
}
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn alloc_wildcard_slice(
|
||||
&self,
|
||||
tys: impl IntoIterator<Item = Ty<'tcx>>,
|
||||
) -> &'p [DeconstructedPat<'p, 'tcx>] {
|
||||
self.pattern_arena
|
||||
.alloc_from_iter(tys.into_iter().map(|ty| DeconstructedPat::wildcard(ty, DUMMY_SP)))
|
||||
}
|
||||
|
||||
// In the cases of either a `#[non_exhaustive]` field list or a non-public field, we hide
|
||||
// uninhabited fields in order not to reveal the uninhabitedness of the whole variant.
|
||||
// This lists the fields we keep along with their types.
|
||||
pub(crate) fn list_variant_nonhidden_fields<'a>(
|
||||
&'a self,
|
||||
ty: Ty<'tcx>,
|
||||
variant: &'a VariantDef,
|
||||
) -> impl Iterator<Item = (FieldIdx, Ty<'tcx>)> + Captures<'p> + Captures<'a> {
|
||||
let cx = self;
|
||||
let ty::Adt(adt, args) = ty.kind() else { bug!() };
|
||||
// Whether we must not match the fields of this variant exhaustively.
|
||||
let is_non_exhaustive = variant.is_field_list_non_exhaustive() && !adt.did().is_local();
|
||||
|
||||
variant.fields.iter().enumerate().filter_map(move |(i, field)| {
|
||||
let ty = field.ty(cx.tcx, args);
|
||||
// `field.ty()` doesn't normalize after substituting.
|
||||
let ty = cx.tcx.normalize_erasing_regions(cx.param_env, ty);
|
||||
let is_visible = adt.is_enum() || field.vis.is_accessible_from(cx.module, cx.tcx);
|
||||
let is_uninhabited = cx.tcx.features().exhaustive_patterns && cx.is_uninhabited(ty);
|
||||
|
||||
if is_uninhabited && (!is_visible || is_non_exhaustive) {
|
||||
None
|
||||
} else {
|
||||
Some((FieldIdx::new(i), ty))
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
pub(crate) fn variant_index_for_adt(
|
||||
ctor: &Constructor<'tcx>,
|
||||
adt: ty::AdtDef<'tcx>,
|
||||
) -> VariantIdx {
|
||||
match *ctor {
|
||||
Variant(idx) => idx,
|
||||
Single => {
|
||||
assert!(!adt.is_enum());
|
||||
FIRST_VARIANT
|
||||
}
|
||||
_ => bug!("bad constructor {:?} for adt {:?}", ctor, adt),
|
||||
}
|
||||
}
|
||||
|
||||
/// Creates a new list of wildcard fields for a given constructor. The result must have a length
|
||||
/// of `ctor.arity()`.
|
||||
#[instrument(level = "trace", skip(self))]
|
||||
pub(crate) fn ctor_wildcard_fields(
|
||||
&self,
|
||||
ctor: &Constructor<'tcx>,
|
||||
ty: Ty<'tcx>,
|
||||
) -> &'p [DeconstructedPat<'p, 'tcx>] {
|
||||
let cx = self;
|
||||
match ctor {
|
||||
Single | Variant(_) => match ty.kind() {
|
||||
ty::Tuple(fs) => cx.alloc_wildcard_slice(fs.iter()),
|
||||
ty::Ref(_, rty, _) => cx.alloc_wildcard_slice(once(*rty)),
|
||||
ty::Adt(adt, args) => {
|
||||
if adt.is_box() {
|
||||
// The only legal patterns of type `Box` (outside `std`) are `_` and box
|
||||
// patterns. If we're here we can assume this is a box pattern.
|
||||
cx.alloc_wildcard_slice(once(args.type_at(0)))
|
||||
} else {
|
||||
let variant =
|
||||
&adt.variant(MatchCheckCtxt::variant_index_for_adt(&ctor, *adt));
|
||||
let tys = cx.list_variant_nonhidden_fields(ty, variant).map(|(_, ty)| ty);
|
||||
cx.alloc_wildcard_slice(tys)
|
||||
}
|
||||
}
|
||||
_ => bug!("Unexpected type for `Single` constructor: {:?}", ty),
|
||||
},
|
||||
Slice(slice) => match *ty.kind() {
|
||||
ty::Slice(ty) | ty::Array(ty, _) => {
|
||||
let arity = slice.arity();
|
||||
cx.alloc_wildcard_slice((0..arity).map(|_| ty))
|
||||
}
|
||||
_ => bug!("bad slice pattern {:?} {:?}", ctor, ty),
|
||||
},
|
||||
Bool(..)
|
||||
| IntRange(..)
|
||||
| F32Range(..)
|
||||
| F64Range(..)
|
||||
| Str(..)
|
||||
| Opaque(..)
|
||||
| NonExhaustive
|
||||
| Hidden
|
||||
| Missing { .. }
|
||||
| Wildcard => &[],
|
||||
Or => {
|
||||
bug!("called `Fields::wildcards` on an `Or` ctor")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// The number of fields for this constructor. This must be kept in sync with
|
||||
/// `Fields::wildcards`.
|
||||
pub(crate) fn ctor_arity(&self, ctor: &Constructor<'tcx>, ty: Ty<'tcx>) -> usize {
|
||||
match ctor {
|
||||
Single | Variant(_) => match ty.kind() {
|
||||
ty::Tuple(fs) => fs.len(),
|
||||
ty::Ref(..) => 1,
|
||||
ty::Adt(adt, ..) => {
|
||||
if adt.is_box() {
|
||||
// The only legal patterns of type `Box` (outside `std`) are `_` and box
|
||||
// patterns. If we're here we can assume this is a box pattern.
|
||||
1
|
||||
} else {
|
||||
let variant =
|
||||
&adt.variant(MatchCheckCtxt::variant_index_for_adt(&ctor, *adt));
|
||||
self.list_variant_nonhidden_fields(ty, variant).count()
|
||||
}
|
||||
}
|
||||
_ => bug!("Unexpected type for `Single` constructor: {:?}", ty),
|
||||
},
|
||||
Slice(slice) => slice.arity(),
|
||||
Bool(..)
|
||||
| IntRange(..)
|
||||
| F32Range(..)
|
||||
| F64Range(..)
|
||||
| Str(..)
|
||||
| Opaque(..)
|
||||
| NonExhaustive
|
||||
| Hidden
|
||||
| Missing { .. }
|
||||
| Wildcard => 0,
|
||||
Or => bug!("The `Or` constructor doesn't have a fixed arity"),
|
||||
}
|
||||
}
|
||||
|
||||
/// Creates a set that represents all the constructors of `ty`.
|
||||
///
|
||||
/// See [`crate::constructor`] for considerations of emptiness.
|
||||
#[instrument(level = "debug", skip(self), ret)]
|
||||
pub fn ctors_for_ty(&self, ty: Ty<'tcx>) -> ConstructorSet {
|
||||
let cx = self;
|
||||
let make_range = |start, end| {
|
||||
IntRange::from_range(
|
||||
MaybeInfiniteInt::new_finite(cx.tcx, ty, start),
|
||||
MaybeInfiniteInt::new_finite(cx.tcx, ty, end),
|
||||
RangeEnd::Included,
|
||||
)
|
||||
};
|
||||
// This determines the set of all possible constructors for the type `ty`. For numbers,
|
||||
// arrays and slices we use ranges and variable-length slices when appropriate.
|
||||
match ty.kind() {
|
||||
ty::Bool => ConstructorSet::Bool,
|
||||
ty::Char => {
|
||||
// The valid Unicode Scalar Value ranges.
|
||||
ConstructorSet::Integers {
|
||||
range_1: make_range('\u{0000}' as u128, '\u{D7FF}' as u128),
|
||||
range_2: Some(make_range('\u{E000}' as u128, '\u{10FFFF}' as u128)),
|
||||
}
|
||||
}
|
||||
&ty::Int(ity) => {
|
||||
let range = if ty.is_ptr_sized_integral() {
|
||||
// The min/max values of `isize` are not allowed to be observed.
|
||||
IntRange {
|
||||
lo: MaybeInfiniteInt::NegInfinity,
|
||||
hi: MaybeInfiniteInt::PosInfinity,
|
||||
}
|
||||
} else {
|
||||
let bits = Integer::from_int_ty(&cx.tcx, ity).size().bits() as u128;
|
||||
let min = 1u128 << (bits - 1);
|
||||
let max = min - 1;
|
||||
make_range(min, max)
|
||||
};
|
||||
ConstructorSet::Integers { range_1: range, range_2: None }
|
||||
}
|
||||
&ty::Uint(uty) => {
|
||||
let range = if ty.is_ptr_sized_integral() {
|
||||
// The max value of `usize` is not allowed to be observed.
|
||||
let lo = MaybeInfiniteInt::new_finite(cx.tcx, ty, 0);
|
||||
IntRange { lo, hi: MaybeInfiniteInt::PosInfinity }
|
||||
} else {
|
||||
let size = Integer::from_uint_ty(&cx.tcx, uty).size();
|
||||
let max = size.truncate(u128::MAX);
|
||||
make_range(0, max)
|
||||
};
|
||||
ConstructorSet::Integers { range_1: range, range_2: None }
|
||||
}
|
||||
ty::Slice(sub_ty) => ConstructorSet::Slice {
|
||||
array_len: None,
|
||||
subtype_is_empty: cx.is_uninhabited(*sub_ty),
|
||||
},
|
||||
ty::Array(sub_ty, len) => {
|
||||
// We treat arrays of a constant but unknown length like slices.
|
||||
ConstructorSet::Slice {
|
||||
array_len: len.try_eval_target_usize(cx.tcx, cx.param_env).map(|l| l as usize),
|
||||
subtype_is_empty: cx.is_uninhabited(*sub_ty),
|
||||
}
|
||||
}
|
||||
ty::Adt(def, args) if def.is_enum() => {
|
||||
let is_declared_nonexhaustive = cx.is_foreign_non_exhaustive_enum(ty);
|
||||
if def.variants().is_empty() && !is_declared_nonexhaustive {
|
||||
ConstructorSet::NoConstructors
|
||||
} else {
|
||||
let mut variants =
|
||||
IndexVec::from_elem(VariantVisibility::Visible, def.variants());
|
||||
for (idx, v) in def.variants().iter_enumerated() {
|
||||
let variant_def_id = def.variant(idx).def_id;
|
||||
// Visibly uninhabited variants.
|
||||
let is_inhabited = v
|
||||
.inhabited_predicate(cx.tcx, *def)
|
||||
.instantiate(cx.tcx, args)
|
||||
.apply(cx.tcx, cx.param_env, cx.module);
|
||||
// Variants that depend on a disabled unstable feature.
|
||||
let is_unstable = matches!(
|
||||
cx.tcx.eval_stability(variant_def_id, None, DUMMY_SP, None),
|
||||
EvalResult::Deny { .. }
|
||||
);
|
||||
// Foreign `#[doc(hidden)]` variants.
|
||||
let is_doc_hidden =
|
||||
cx.tcx.is_doc_hidden(variant_def_id) && !variant_def_id.is_local();
|
||||
let visibility = if !is_inhabited {
|
||||
// FIXME: handle empty+hidden
|
||||
VariantVisibility::Empty
|
||||
} else if is_unstable || is_doc_hidden {
|
||||
VariantVisibility::Hidden
|
||||
} else {
|
||||
VariantVisibility::Visible
|
||||
};
|
||||
variants[idx] = visibility;
|
||||
}
|
||||
|
||||
ConstructorSet::Variants { variants, non_exhaustive: is_declared_nonexhaustive }
|
||||
}
|
||||
}
|
||||
ty::Adt(..) | ty::Tuple(..) | ty::Ref(..) => {
|
||||
ConstructorSet::Single { empty: cx.is_uninhabited(ty) }
|
||||
}
|
||||
ty::Never => ConstructorSet::NoConstructors,
|
||||
// This type is one for which we cannot list constructors, like `str` or `f64`.
|
||||
// FIXME(Nadrieril): which of these are actually allowed?
|
||||
ty::Float(_)
|
||||
| ty::Str
|
||||
| ty::Foreign(_)
|
||||
| ty::RawPtr(_)
|
||||
| ty::FnDef(_, _)
|
||||
| ty::FnPtr(_)
|
||||
| ty::Dynamic(_, _, _)
|
||||
| ty::Closure(_, _)
|
||||
| ty::Coroutine(_, _, _)
|
||||
| ty::Alias(_, _)
|
||||
| ty::Param(_)
|
||||
| ty::Error(_) => ConstructorSet::Unlistable,
|
||||
ty::CoroutineWitness(_, _) | ty::Bound(_, _) | ty::Placeholder(_) | ty::Infer(_) => {
|
||||
bug!("Encountered unexpected type in `ConstructorSet::for_ty`: {ty:?}")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn lower_pat_range_bdy(
|
||||
&self,
|
||||
bdy: PatRangeBoundary<'tcx>,
|
||||
ty: Ty<'tcx>,
|
||||
) -> MaybeInfiniteInt {
|
||||
match bdy {
|
||||
PatRangeBoundary::NegInfinity => MaybeInfiniteInt::NegInfinity,
|
||||
PatRangeBoundary::Finite(value) => {
|
||||
let bits = value.eval_bits(self.tcx, self.param_env);
|
||||
MaybeInfiniteInt::new_finite(self.tcx, ty, bits)
|
||||
}
|
||||
PatRangeBoundary::PosInfinity => MaybeInfiniteInt::PosInfinity,
|
||||
}
|
||||
}
|
||||
|
||||
/// Note: the input patterns must have been lowered through
|
||||
/// `rustc_mir_build::thir::pattern::check_match::MatchVisitor::lower_pattern`.
|
||||
pub fn lower_pat(&self, pat: &Pat<'tcx>) -> DeconstructedPat<'p, 'tcx> {
|
||||
let singleton = |pat| std::slice::from_ref(self.pattern_arena.alloc(pat));
|
||||
let cx = self;
|
||||
let ctor;
|
||||
let fields: &[_];
|
||||
match &pat.kind {
|
||||
PatKind::AscribeUserType { subpattern, .. }
|
||||
| PatKind::InlineConstant { subpattern, .. } => return self.lower_pat(subpattern),
|
||||
PatKind::Binding { subpattern: Some(subpat), .. } => return self.lower_pat(subpat),
|
||||
PatKind::Binding { subpattern: None, .. } | PatKind::Wild => {
|
||||
ctor = Wildcard;
|
||||
fields = &[];
|
||||
}
|
||||
PatKind::Deref { subpattern } => {
|
||||
ctor = Single;
|
||||
fields = singleton(self.lower_pat(subpattern));
|
||||
}
|
||||
PatKind::Leaf { subpatterns } | PatKind::Variant { subpatterns, .. } => {
|
||||
match pat.ty.kind() {
|
||||
ty::Tuple(fs) => {
|
||||
ctor = Single;
|
||||
let mut wilds: SmallVec<[_; 2]> =
|
||||
fs.iter().map(|ty| DeconstructedPat::wildcard(ty, pat.span)).collect();
|
||||
for pat in subpatterns {
|
||||
wilds[pat.field.index()] = self.lower_pat(&pat.pattern);
|
||||
}
|
||||
fields = cx.pattern_arena.alloc_from_iter(wilds);
|
||||
}
|
||||
ty::Adt(adt, args) if adt.is_box() => {
|
||||
// The only legal patterns of type `Box` (outside `std`) are `_` and box
|
||||
// patterns. If we're here we can assume this is a box pattern.
|
||||
// FIXME(Nadrieril): A `Box` can in theory be matched either with `Box(_,
|
||||
// _)` or a box pattern. As a hack to avoid an ICE with the former, we
|
||||
// ignore other fields than the first one. This will trigger an error later
|
||||
// anyway.
|
||||
// See https://github.com/rust-lang/rust/issues/82772 ,
|
||||
// explanation: https://github.com/rust-lang/rust/pull/82789#issuecomment-796921977
|
||||
// The problem is that we can't know from the type whether we'll match
|
||||
// normally or through box-patterns. We'll have to figure out a proper
|
||||
// solution when we introduce generalized deref patterns. Also need to
|
||||
// prevent mixing of those two options.
|
||||
let pattern = subpatterns.into_iter().find(|pat| pat.field.index() == 0);
|
||||
let pat = if let Some(pat) = pattern {
|
||||
self.lower_pat(&pat.pattern)
|
||||
} else {
|
||||
DeconstructedPat::wildcard(args.type_at(0), pat.span)
|
||||
};
|
||||
ctor = Single;
|
||||
fields = singleton(pat);
|
||||
}
|
||||
ty::Adt(adt, _) => {
|
||||
ctor = match pat.kind {
|
||||
PatKind::Leaf { .. } => Single,
|
||||
PatKind::Variant { variant_index, .. } => Variant(variant_index),
|
||||
_ => bug!(),
|
||||
};
|
||||
let variant =
|
||||
&adt.variant(MatchCheckCtxt::variant_index_for_adt(&ctor, *adt));
|
||||
// For each field in the variant, we store the relevant index into `self.fields` if any.
|
||||
let mut field_id_to_id: Vec<Option<usize>> =
|
||||
(0..variant.fields.len()).map(|_| None).collect();
|
||||
let tys = cx
|
||||
.list_variant_nonhidden_fields(pat.ty, variant)
|
||||
.enumerate()
|
||||
.map(|(i, (field, ty))| {
|
||||
field_id_to_id[field.index()] = Some(i);
|
||||
ty
|
||||
});
|
||||
let mut wilds: SmallVec<[_; 2]> =
|
||||
tys.map(|ty| DeconstructedPat::wildcard(ty, pat.span)).collect();
|
||||
for pat in subpatterns {
|
||||
if let Some(i) = field_id_to_id[pat.field.index()] {
|
||||
wilds[i] = self.lower_pat(&pat.pattern);
|
||||
}
|
||||
}
|
||||
fields = cx.pattern_arena.alloc_from_iter(wilds);
|
||||
}
|
||||
_ => bug!("pattern has unexpected type: pat: {:?}, ty: {:?}", pat, pat.ty),
|
||||
}
|
||||
}
|
||||
PatKind::Constant { value } => {
|
||||
match pat.ty.kind() {
|
||||
ty::Bool => {
|
||||
ctor = match value.try_eval_bool(cx.tcx, cx.param_env) {
|
||||
Some(b) => Bool(b),
|
||||
None => Opaque(OpaqueId::new()),
|
||||
};
|
||||
fields = &[];
|
||||
}
|
||||
ty::Char | ty::Int(_) | ty::Uint(_) => {
|
||||
ctor = match value.try_eval_bits(cx.tcx, cx.param_env) {
|
||||
Some(bits) => IntRange(IntRange::from_bits(cx.tcx, pat.ty, bits)),
|
||||
None => Opaque(OpaqueId::new()),
|
||||
};
|
||||
fields = &[];
|
||||
}
|
||||
ty::Float(ty::FloatTy::F32) => {
|
||||
ctor = match value.try_eval_bits(cx.tcx, cx.param_env) {
|
||||
Some(bits) => {
|
||||
use rustc_apfloat::Float;
|
||||
let value = rustc_apfloat::ieee::Single::from_bits(bits);
|
||||
F32Range(value, value, RangeEnd::Included)
|
||||
}
|
||||
None => Opaque(OpaqueId::new()),
|
||||
};
|
||||
fields = &[];
|
||||
}
|
||||
ty::Float(ty::FloatTy::F64) => {
|
||||
ctor = match value.try_eval_bits(cx.tcx, cx.param_env) {
|
||||
Some(bits) => {
|
||||
use rustc_apfloat::Float;
|
||||
let value = rustc_apfloat::ieee::Double::from_bits(bits);
|
||||
F64Range(value, value, RangeEnd::Included)
|
||||
}
|
||||
None => Opaque(OpaqueId::new()),
|
||||
};
|
||||
fields = &[];
|
||||
}
|
||||
ty::Ref(_, t, _) if t.is_str() => {
|
||||
// We want a `&str` constant to behave like a `Deref` pattern, to be compatible
|
||||
// with other `Deref` patterns. This could have been done in `const_to_pat`,
|
||||
// but that causes issues with the rest of the matching code.
|
||||
// So here, the constructor for a `"foo"` pattern is `&` (represented by
|
||||
// `Single`), and has one field. That field has constructor `Str(value)` and no
|
||||
// fields.
|
||||
// Note: `t` is `str`, not `&str`.
|
||||
let subpattern = DeconstructedPat::new(Str(*value), &[], *t, pat.span);
|
||||
ctor = Single;
|
||||
fields = singleton(subpattern)
|
||||
}
|
||||
// All constants that can be structurally matched have already been expanded
|
||||
// into the corresponding `Pat`s by `const_to_pat`. Constants that remain are
|
||||
// opaque.
|
||||
_ => {
|
||||
ctor = Opaque(OpaqueId::new());
|
||||
fields = &[];
|
||||
}
|
||||
}
|
||||
}
|
||||
PatKind::Range(patrange) => {
|
||||
let PatRange { lo, hi, end, .. } = patrange.as_ref();
|
||||
let ty = pat.ty;
|
||||
ctor = match ty.kind() {
|
||||
ty::Char | ty::Int(_) | ty::Uint(_) => {
|
||||
let lo = cx.lower_pat_range_bdy(*lo, ty);
|
||||
let hi = cx.lower_pat_range_bdy(*hi, ty);
|
||||
IntRange(IntRange::from_range(lo, hi, *end))
|
||||
}
|
||||
ty::Float(fty) => {
|
||||
use rustc_apfloat::Float;
|
||||
let lo = lo.as_finite().map(|c| c.eval_bits(cx.tcx, cx.param_env));
|
||||
let hi = hi.as_finite().map(|c| c.eval_bits(cx.tcx, cx.param_env));
|
||||
match fty {
|
||||
ty::FloatTy::F32 => {
|
||||
use rustc_apfloat::ieee::Single;
|
||||
let lo = lo.map(Single::from_bits).unwrap_or(-Single::INFINITY);
|
||||
let hi = hi.map(Single::from_bits).unwrap_or(Single::INFINITY);
|
||||
F32Range(lo, hi, *end)
|
||||
}
|
||||
ty::FloatTy::F64 => {
|
||||
use rustc_apfloat::ieee::Double;
|
||||
let lo = lo.map(Double::from_bits).unwrap_or(-Double::INFINITY);
|
||||
let hi = hi.map(Double::from_bits).unwrap_or(Double::INFINITY);
|
||||
F64Range(lo, hi, *end)
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => bug!("invalid type for range pattern: {}", ty),
|
||||
};
|
||||
fields = &[];
|
||||
}
|
||||
PatKind::Array { prefix, slice, suffix } | PatKind::Slice { prefix, slice, suffix } => {
|
||||
let array_len = match pat.ty.kind() {
|
||||
ty::Array(_, length) => {
|
||||
Some(length.eval_target_usize(cx.tcx, cx.param_env) as usize)
|
||||
}
|
||||
ty::Slice(_) => None,
|
||||
_ => span_bug!(pat.span, "bad ty {:?} for slice pattern", pat.ty),
|
||||
};
|
||||
let kind = if slice.is_some() {
|
||||
SliceKind::VarLen(prefix.len(), suffix.len())
|
||||
} else {
|
||||
SliceKind::FixedLen(prefix.len() + suffix.len())
|
||||
};
|
||||
ctor = Slice(Slice::new(array_len, kind));
|
||||
fields = cx.pattern_arena.alloc_from_iter(
|
||||
prefix.iter().chain(suffix.iter()).map(|p| self.lower_pat(&*p)),
|
||||
)
|
||||
}
|
||||
PatKind::Or { .. } => {
|
||||
ctor = Or;
|
||||
let pats = expand_or_pat(pat);
|
||||
fields =
|
||||
cx.pattern_arena.alloc_from_iter(pats.into_iter().map(|p| self.lower_pat(p)))
|
||||
}
|
||||
PatKind::Never => {
|
||||
// FIXME(never_patterns): handle `!` in exhaustiveness. This is a sane default
|
||||
// in the meantime.
|
||||
ctor = Wildcard;
|
||||
fields = &[];
|
||||
}
|
||||
PatKind::Error(_) => {
|
||||
ctor = Opaque(OpaqueId::new());
|
||||
fields = &[];
|
||||
}
|
||||
}
|
||||
DeconstructedPat::new(ctor, fields, pat.ty, pat.span)
|
||||
}
|
||||
|
||||
/// Convert back to a `thir::PatRangeBoundary` for diagnostic purposes.
|
||||
/// Note: it is possible to get `isize/usize::MAX+1` here, as explained in the doc for
|
||||
/// [`IntRange::split`]. This cannot be represented as a `Const`, so we represent it with
|
||||
/// `PosInfinity`.
|
||||
pub(crate) fn hoist_pat_range_bdy(
|
||||
&self,
|
||||
miint: MaybeInfiniteInt,
|
||||
ty: Ty<'tcx>,
|
||||
) -> PatRangeBoundary<'tcx> {
|
||||
use MaybeInfiniteInt::*;
|
||||
let tcx = self.tcx;
|
||||
match miint {
|
||||
NegInfinity => PatRangeBoundary::NegInfinity,
|
||||
Finite(x) => {
|
||||
let bias = MaybeInfiniteInt::signed_bias(tcx, ty);
|
||||
let bits = x ^ bias;
|
||||
let size = ty.primitive_size(tcx);
|
||||
match Scalar::try_from_uint(bits, size) {
|
||||
Some(scalar) => {
|
||||
let value = mir::Const::from_scalar(tcx, scalar, ty);
|
||||
PatRangeBoundary::Finite(value)
|
||||
}
|
||||
// The value doesn't fit. Since `x >= 0` and 0 always encodes the minimum value
|
||||
// for a type, the problem isn't that the value is too small. So it must be too
|
||||
// large.
|
||||
None => PatRangeBoundary::PosInfinity,
|
||||
}
|
||||
}
|
||||
JustAfterMax | PosInfinity => PatRangeBoundary::PosInfinity,
|
||||
}
|
||||
}
|
||||
|
||||
/// Whether the range denotes the fictitious values before `isize::MIN` or after
|
||||
/// `usize::MAX`/`isize::MAX` (see doc of [`IntRange::split`] for why these exist).
|
||||
pub fn is_range_beyond_boundaries(&self, range: &IntRange, ty: Ty<'tcx>) -> bool {
|
||||
ty.is_ptr_sized_integral() && {
|
||||
// The two invalid ranges are `NegInfinity..isize::MIN` (represented as
|
||||
// `NegInfinity..0`), and `{u,i}size::MAX+1..PosInfinity`. `hoist_pat_range_bdy`
|
||||
// converts `MAX+1` to `PosInfinity`, and we couldn't have `PosInfinity` in `range.lo`
|
||||
// otherwise.
|
||||
let lo = self.hoist_pat_range_bdy(range.lo, ty);
|
||||
matches!(lo, PatRangeBoundary::PosInfinity)
|
||||
|| matches!(range.hi, MaybeInfiniteInt::Finite(0))
|
||||
}
|
||||
}
|
||||
|
||||
/// Convert back to a `thir::Pat` for diagnostic purposes.
|
||||
pub(crate) fn hoist_pat_range(&self, range: &IntRange, ty: Ty<'tcx>) -> Pat<'tcx> {
|
||||
use MaybeInfiniteInt::*;
|
||||
let cx = self;
|
||||
let kind = if matches!((range.lo, range.hi), (NegInfinity, PosInfinity)) {
|
||||
PatKind::Wild
|
||||
} else if range.is_singleton() {
|
||||
let lo = cx.hoist_pat_range_bdy(range.lo, ty);
|
||||
let value = lo.as_finite().unwrap();
|
||||
PatKind::Constant { value }
|
||||
} else {
|
||||
// We convert to an inclusive range for diagnostics.
|
||||
let mut end = RangeEnd::Included;
|
||||
let mut lo = cx.hoist_pat_range_bdy(range.lo, ty);
|
||||
if matches!(lo, PatRangeBoundary::PosInfinity) {
|
||||
// The only reason to get `PosInfinity` here is the special case where
|
||||
// `hoist_pat_range_bdy` found `{u,i}size::MAX+1`. So the range denotes the
|
||||
// fictitious values after `{u,i}size::MAX` (see [`IntRange::split`] for why we do
|
||||
// this). We show this to the user as `usize::MAX..` which is slightly incorrect but
|
||||
// probably clear enough.
|
||||
let c = ty.numeric_max_val(cx.tcx).unwrap();
|
||||
let value = mir::Const::from_ty_const(c, cx.tcx);
|
||||
lo = PatRangeBoundary::Finite(value);
|
||||
}
|
||||
let hi = if matches!(range.hi, Finite(0)) {
|
||||
// The range encodes `..ty::MIN`, so we can't convert it to an inclusive range.
|
||||
end = RangeEnd::Excluded;
|
||||
range.hi
|
||||
} else {
|
||||
range.hi.minus_one()
|
||||
};
|
||||
let hi = cx.hoist_pat_range_bdy(hi, ty);
|
||||
PatKind::Range(Box::new(PatRange { lo, hi, end, ty }))
|
||||
};
|
||||
|
||||
Pat { ty, span: DUMMY_SP, kind }
|
||||
}
|
||||
/// Convert back to a `thir::Pat` for diagnostic purposes. This panics for patterns that don't
|
||||
/// appear in diagnostics, like float ranges.
|
||||
pub fn hoist_witness_pat(&self, pat: &WitnessPat<'tcx>) -> Pat<'tcx> {
|
||||
let cx = self;
|
||||
let is_wildcard = |pat: &Pat<'_>| matches!(pat.kind, PatKind::Wild);
|
||||
let mut subpatterns = pat.iter_fields().map(|p| Box::new(cx.hoist_witness_pat(p)));
|
||||
let kind = match pat.ctor() {
|
||||
Bool(b) => PatKind::Constant { value: mir::Const::from_bool(cx.tcx, *b) },
|
||||
IntRange(range) => return self.hoist_pat_range(range, pat.ty()),
|
||||
Single | Variant(_) => match pat.ty().kind() {
|
||||
ty::Tuple(..) => PatKind::Leaf {
|
||||
subpatterns: subpatterns
|
||||
.enumerate()
|
||||
.map(|(i, pattern)| FieldPat { field: FieldIdx::new(i), pattern })
|
||||
.collect(),
|
||||
},
|
||||
ty::Adt(adt_def, _) if adt_def.is_box() => {
|
||||
// Without `box_patterns`, the only legal pattern of type `Box` is `_` (outside
|
||||
// of `std`). So this branch is only reachable when the feature is enabled and
|
||||
// the pattern is a box pattern.
|
||||
PatKind::Deref { subpattern: subpatterns.next().unwrap() }
|
||||
}
|
||||
ty::Adt(adt_def, args) => {
|
||||
let variant_index =
|
||||
MatchCheckCtxt::variant_index_for_adt(&pat.ctor(), *adt_def);
|
||||
let variant = &adt_def.variant(variant_index);
|
||||
let subpatterns = cx
|
||||
.list_variant_nonhidden_fields(pat.ty(), variant)
|
||||
.zip(subpatterns)
|
||||
.map(|((field, _ty), pattern)| FieldPat { field, pattern })
|
||||
.collect();
|
||||
|
||||
if adt_def.is_enum() {
|
||||
PatKind::Variant { adt_def: *adt_def, args, variant_index, subpatterns }
|
||||
} else {
|
||||
PatKind::Leaf { subpatterns }
|
||||
}
|
||||
}
|
||||
// Note: given the expansion of `&str` patterns done in `expand_pattern`, we should
|
||||
// be careful to reconstruct the correct constant pattern here. However a string
|
||||
// literal pattern will never be reported as a non-exhaustiveness witness, so we
|
||||
// ignore this issue.
|
||||
ty::Ref(..) => PatKind::Deref { subpattern: subpatterns.next().unwrap() },
|
||||
_ => bug!("unexpected ctor for type {:?} {:?}", pat.ctor(), pat.ty()),
|
||||
},
|
||||
Slice(slice) => {
|
||||
match slice.kind {
|
||||
SliceKind::FixedLen(_) => PatKind::Slice {
|
||||
prefix: subpatterns.collect(),
|
||||
slice: None,
|
||||
suffix: Box::new([]),
|
||||
},
|
||||
SliceKind::VarLen(prefix, _) => {
|
||||
let mut subpatterns = subpatterns.peekable();
|
||||
let mut prefix: Vec<_> = subpatterns.by_ref().take(prefix).collect();
|
||||
if slice.array_len.is_some() {
|
||||
// Improves diagnostics a bit: if the type is a known-size array, instead
|
||||
// of reporting `[x, _, .., _, y]`, we prefer to report `[x, .., y]`.
|
||||
// This is incorrect if the size is not known, since `[_, ..]` captures
|
||||
// arrays of lengths `>= 1` whereas `[..]` captures any length.
|
||||
while !prefix.is_empty() && is_wildcard(prefix.last().unwrap()) {
|
||||
prefix.pop();
|
||||
}
|
||||
while subpatterns.peek().is_some()
|
||||
&& is_wildcard(subpatterns.peek().unwrap())
|
||||
{
|
||||
subpatterns.next();
|
||||
}
|
||||
}
|
||||
let suffix: Box<[_]> = subpatterns.collect();
|
||||
let wild = Pat::wildcard_from_ty(pat.ty());
|
||||
PatKind::Slice {
|
||||
prefix: prefix.into_boxed_slice(),
|
||||
slice: Some(Box::new(wild)),
|
||||
suffix,
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
&Str(value) => PatKind::Constant { value },
|
||||
Wildcard | NonExhaustive | Hidden => PatKind::Wild,
|
||||
Missing { .. } => bug!(
|
||||
"trying to convert a `Missing` constructor into a `Pat`; this is probably a bug,
|
||||
`Missing` should have been processed in `apply_constructors`"
|
||||
),
|
||||
F32Range(..) | F64Range(..) | Opaque(..) | Or => {
|
||||
bug!("can't convert to pattern: {:?}", pat)
|
||||
}
|
||||
};
|
||||
|
||||
Pat { ty: pat.ty(), span: DUMMY_SP, kind }
|
||||
}
|
||||
|
||||
/// Best-effort `Debug` implementation.
|
||||
pub(crate) fn debug_pat(
|
||||
f: &mut fmt::Formatter<'_>,
|
||||
pat: &DeconstructedPat<'p, 'tcx>,
|
||||
) -> fmt::Result {
|
||||
let mut first = true;
|
||||
let mut start_or_continue = |s| {
|
||||
if first {
|
||||
first = false;
|
||||
""
|
||||
} else {
|
||||
s
|
||||
}
|
||||
};
|
||||
let mut start_or_comma = || start_or_continue(", ");
|
||||
|
||||
match pat.ctor() {
|
||||
Single | Variant(_) => match pat.ty().kind() {
|
||||
ty::Adt(def, _) if def.is_box() => {
|
||||
// Without `box_patterns`, the only legal pattern of type `Box` is `_` (outside
|
||||
// of `std`). So this branch is only reachable when the feature is enabled and
|
||||
// the pattern is a box pattern.
|
||||
let subpattern = pat.iter_fields().next().unwrap();
|
||||
write!(f, "box {subpattern:?}")
|
||||
}
|
||||
ty::Adt(..) | ty::Tuple(..) => {
|
||||
let variant = match pat.ty().kind() {
|
||||
ty::Adt(adt, _) => Some(
|
||||
adt.variant(MatchCheckCtxt::variant_index_for_adt(pat.ctor(), *adt)),
|
||||
),
|
||||
ty::Tuple(_) => None,
|
||||
_ => unreachable!(),
|
||||
};
|
||||
|
||||
if let Some(variant) = variant {
|
||||
write!(f, "{}", variant.name)?;
|
||||
}
|
||||
|
||||
// Without `cx`, we can't know which field corresponds to which, so we can't
|
||||
// get the names of the fields. Instead we just display everything as a tuple
|
||||
// struct, which should be good enough.
|
||||
write!(f, "(")?;
|
||||
for p in pat.iter_fields() {
|
||||
write!(f, "{}", start_or_comma())?;
|
||||
write!(f, "{p:?}")?;
|
||||
}
|
||||
write!(f, ")")
|
||||
}
|
||||
// Note: given the expansion of `&str` patterns done in `expand_pattern`, we should
|
||||
// be careful to detect strings here. However a string literal pattern will never
|
||||
// be reported as a non-exhaustiveness witness, so we can ignore this issue.
|
||||
ty::Ref(_, _, mutbl) => {
|
||||
let subpattern = pat.iter_fields().next().unwrap();
|
||||
write!(f, "&{}{:?}", mutbl.prefix_str(), subpattern)
|
||||
}
|
||||
_ => write!(f, "_"),
|
||||
},
|
||||
Slice(slice) => {
|
||||
let mut subpatterns = pat.iter_fields();
|
||||
write!(f, "[")?;
|
||||
match slice.kind {
|
||||
SliceKind::FixedLen(_) => {
|
||||
for p in subpatterns {
|
||||
write!(f, "{}{:?}", start_or_comma(), p)?;
|
||||
}
|
||||
}
|
||||
SliceKind::VarLen(prefix_len, _) => {
|
||||
for p in subpatterns.by_ref().take(prefix_len) {
|
||||
write!(f, "{}{:?}", start_or_comma(), p)?;
|
||||
}
|
||||
write!(f, "{}", start_or_comma())?;
|
||||
write!(f, "..")?;
|
||||
for p in subpatterns {
|
||||
write!(f, "{}{:?}", start_or_comma(), p)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
write!(f, "]")
|
||||
}
|
||||
Bool(b) => write!(f, "{b}"),
|
||||
// Best-effort, will render signed ranges incorrectly
|
||||
IntRange(range) => write!(f, "{range:?}"),
|
||||
F32Range(lo, hi, end) => write!(f, "{lo}{end}{hi}"),
|
||||
F64Range(lo, hi, end) => write!(f, "{lo}{end}{hi}"),
|
||||
Str(value) => write!(f, "{value}"),
|
||||
Opaque(..) => write!(f, "<constant pattern>"),
|
||||
Or => {
|
||||
for pat in pat.iter_fields() {
|
||||
write!(f, "{}{:?}", start_or_continue(" | "), pat)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
Wildcard | Missing { .. } | NonExhaustive | Hidden => write!(f, "_ : {:?}", pat.ty()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Recursively expand this pattern into its subpatterns. Only useful for or-patterns.
|
||||
fn expand_or_pat<'p, 'tcx>(pat: &'p Pat<'tcx>) -> Vec<&'p Pat<'tcx>> {
|
||||
fn expand<'p, 'tcx>(pat: &'p Pat<'tcx>, vec: &mut Vec<&'p Pat<'tcx>>) {
|
||||
if let PatKind::Or { pats } = &pat.kind {
|
||||
for pat in pats.iter() {
|
||||
expand(pat, vec);
|
||||
}
|
||||
} else {
|
||||
vec.push(pat)
|
||||
}
|
||||
}
|
||||
|
||||
let mut pats = Vec::new();
|
||||
expand(pat, &mut pats);
|
||||
pats
|
||||
}
|
|
@ -1,4 +1,4 @@
|
|||
use crate::{pat::WitnessPat, usefulness::MatchCheckCtxt};
|
||||
use crate::{cx::MatchCheckCtxt, pat::WitnessPat};
|
||||
|
||||
use rustc_errors::{AddToDiagnostic, Diagnostic, SubdiagnosticMessage};
|
||||
use rustc_macros::{LintDiagnostic, Subdiagnostic};
|
||||
|
@ -24,18 +24,18 @@ impl<'tcx> Uncovered<'tcx> {
|
|||
cx: &MatchCheckCtxt<'p, 'tcx>,
|
||||
witnesses: Vec<WitnessPat<'tcx>>,
|
||||
) -> Self {
|
||||
let witness_1 = witnesses.get(0).unwrap().to_diagnostic_pat(cx);
|
||||
let witness_1 = cx.hoist_witness_pat(witnesses.get(0).unwrap());
|
||||
Self {
|
||||
span,
|
||||
count: witnesses.len(),
|
||||
// Substitute dummy values if witnesses is smaller than 3. These will never be read.
|
||||
witness_2: witnesses
|
||||
.get(1)
|
||||
.map(|w| w.to_diagnostic_pat(cx))
|
||||
.map(|w| cx.hoist_witness_pat(w))
|
||||
.unwrap_or_else(|| witness_1.clone()),
|
||||
witness_3: witnesses
|
||||
.get(2)
|
||||
.map(|w| w.to_diagnostic_pat(cx))
|
||||
.map(|w| cx.hoist_witness_pat(w))
|
||||
.unwrap_or_else(|| witness_1.clone()),
|
||||
witness_1,
|
||||
remainder: witnesses.len().saturating_sub(3),
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
//! Analysis of patterns, notably match exhaustiveness checking.
|
||||
|
||||
pub mod constructor;
|
||||
pub mod cx;
|
||||
pub mod errors;
|
||||
pub mod pat;
|
||||
pub mod usefulness;
|
||||
|
|
|
@ -2,175 +2,19 @@
|
|||
//! fields. This file defines types that represent patterns in this way.
|
||||
use std::cell::Cell;
|
||||
use std::fmt;
|
||||
use std::iter::once;
|
||||
|
||||
use smallvec::{smallvec, SmallVec};
|
||||
|
||||
use rustc_data_structures::captures::Captures;
|
||||
use rustc_hir::RangeEnd;
|
||||
use rustc_index::Idx;
|
||||
use rustc_middle::mir;
|
||||
use rustc_middle::thir::{FieldPat, Pat, PatKind, PatRange};
|
||||
use rustc_middle::ty::{self, Ty, VariantDef};
|
||||
use rustc_middle::ty::{self, Ty};
|
||||
use rustc_span::{Span, DUMMY_SP};
|
||||
use rustc_target::abi::FieldIdx;
|
||||
|
||||
use self::Constructor::*;
|
||||
use self::SliceKind::*;
|
||||
|
||||
use crate::constructor::{Constructor, IntRange, MaybeInfiniteInt, OpaqueId, Slice, SliceKind};
|
||||
use crate::usefulness::{MatchCheckCtxt, PatCtxt};
|
||||
|
||||
/// A value can be decomposed into a constructor applied to some fields. This struct represents
|
||||
/// those fields, generalized to allow patterns in each field. See also `Constructor`.
|
||||
///
|
||||
/// This is constructed for a constructor using [`Fields::wildcards()`]. The idea is that
|
||||
/// [`Fields::wildcards()`] constructs a list of fields where all entries are wildcards, and then
|
||||
/// given a pattern we fill some of the fields with its subpatterns.
|
||||
/// In the following example `Fields::wildcards` returns `[_, _, _, _]`. Then in
|
||||
/// `extract_pattern_arguments` we fill some of the entries, and the result is
|
||||
/// `[Some(0), _, _, _]`.
|
||||
/// ```compile_fail,E0004
|
||||
/// # fn foo() -> [Option<u8>; 4] { [None; 4] }
|
||||
/// let x: [Option<u8>; 4] = foo();
|
||||
/// match x {
|
||||
/// [Some(0), ..] => {}
|
||||
/// }
|
||||
/// ```
|
||||
///
|
||||
/// Note that the number of fields of a constructor may not match the fields declared in the
|
||||
/// original struct/variant. This happens if a private or `non_exhaustive` field is uninhabited,
|
||||
/// because the code mustn't observe that it is uninhabited. In that case that field is not
|
||||
/// included in `fields`. For that reason, when you have a `FieldIdx` you must use
|
||||
/// `index_with_declared_idx`.
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub struct Fields<'p, 'tcx> {
|
||||
fields: &'p [DeconstructedPat<'p, 'tcx>],
|
||||
}
|
||||
|
||||
impl<'p, 'tcx> Fields<'p, 'tcx> {
|
||||
fn empty() -> Self {
|
||||
Fields { fields: &[] }
|
||||
}
|
||||
|
||||
fn singleton(cx: &MatchCheckCtxt<'p, 'tcx>, field: DeconstructedPat<'p, 'tcx>) -> Self {
|
||||
let field: &_ = cx.pattern_arena.alloc(field);
|
||||
Fields { fields: std::slice::from_ref(field) }
|
||||
}
|
||||
|
||||
pub fn from_iter(
|
||||
cx: &MatchCheckCtxt<'p, 'tcx>,
|
||||
fields: impl IntoIterator<Item = DeconstructedPat<'p, 'tcx>>,
|
||||
) -> Self {
|
||||
let fields: &[_] = cx.pattern_arena.alloc_from_iter(fields);
|
||||
Fields { fields }
|
||||
}
|
||||
|
||||
fn wildcards_from_tys(
|
||||
cx: &MatchCheckCtxt<'p, 'tcx>,
|
||||
tys: impl IntoIterator<Item = Ty<'tcx>>,
|
||||
) -> Self {
|
||||
Fields::from_iter(cx, tys.into_iter().map(|ty| DeconstructedPat::wildcard(ty, DUMMY_SP)))
|
||||
}
|
||||
|
||||
// In the cases of either a `#[non_exhaustive]` field list or a non-public field, we hide
|
||||
// uninhabited fields in order not to reveal the uninhabitedness of the whole variant.
|
||||
// This lists the fields we keep along with their types.
|
||||
pub(crate) fn list_variant_nonhidden_fields<'a>(
|
||||
cx: &'a MatchCheckCtxt<'p, 'tcx>,
|
||||
ty: Ty<'tcx>,
|
||||
variant: &'a VariantDef,
|
||||
) -> impl Iterator<Item = (FieldIdx, Ty<'tcx>)> + Captures<'a> + Captures<'p> {
|
||||
let ty::Adt(adt, args) = ty.kind() else { bug!() };
|
||||
// Whether we must not match the fields of this variant exhaustively.
|
||||
let is_non_exhaustive = variant.is_field_list_non_exhaustive() && !adt.did().is_local();
|
||||
|
||||
variant.fields.iter().enumerate().filter_map(move |(i, field)| {
|
||||
let ty = field.ty(cx.tcx, args);
|
||||
// `field.ty()` doesn't normalize after substituting.
|
||||
let ty = cx.tcx.normalize_erasing_regions(cx.param_env, ty);
|
||||
let is_visible = adt.is_enum() || field.vis.is_accessible_from(cx.module, cx.tcx);
|
||||
let is_uninhabited = cx.tcx.features().exhaustive_patterns && cx.is_uninhabited(ty);
|
||||
|
||||
if is_uninhabited && (!is_visible || is_non_exhaustive) {
|
||||
None
|
||||
} else {
|
||||
Some((FieldIdx::new(i), ty))
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
/// Creates a new list of wildcard fields for a given constructor. The result must have a
|
||||
/// length of `constructor.arity()`.
|
||||
#[instrument(level = "trace")]
|
||||
pub(super) fn wildcards(pcx: &PatCtxt<'_, 'p, 'tcx>, constructor: &Constructor<'tcx>) -> Self {
|
||||
let ret = match constructor {
|
||||
Single | Variant(_) => match pcx.ty.kind() {
|
||||
ty::Tuple(fs) => Fields::wildcards_from_tys(pcx.cx, fs.iter()),
|
||||
ty::Ref(_, rty, _) => Fields::wildcards_from_tys(pcx.cx, once(*rty)),
|
||||
ty::Adt(adt, args) => {
|
||||
if adt.is_box() {
|
||||
// The only legal patterns of type `Box` (outside `std`) are `_` and box
|
||||
// patterns. If we're here we can assume this is a box pattern.
|
||||
Fields::wildcards_from_tys(pcx.cx, once(args.type_at(0)))
|
||||
} else {
|
||||
let variant = &adt.variant(constructor.variant_index_for_adt(*adt));
|
||||
let tys = Fields::list_variant_nonhidden_fields(pcx.cx, pcx.ty, variant)
|
||||
.map(|(_, ty)| ty);
|
||||
Fields::wildcards_from_tys(pcx.cx, tys)
|
||||
}
|
||||
}
|
||||
_ => bug!("Unexpected type for `Single` constructor: {:?}", pcx),
|
||||
},
|
||||
Slice(slice) => match *pcx.ty.kind() {
|
||||
ty::Slice(ty) | ty::Array(ty, _) => {
|
||||
let arity = slice.arity();
|
||||
Fields::wildcards_from_tys(pcx.cx, (0..arity).map(|_| ty))
|
||||
}
|
||||
_ => bug!("bad slice pattern {:?} {:?}", constructor, pcx),
|
||||
},
|
||||
Bool(..)
|
||||
| IntRange(..)
|
||||
| F32Range(..)
|
||||
| F64Range(..)
|
||||
| Str(..)
|
||||
| Opaque(..)
|
||||
| NonExhaustive
|
||||
| Hidden
|
||||
| Missing { .. }
|
||||
| Wildcard => Fields::empty(),
|
||||
Or => {
|
||||
bug!("called `Fields::wildcards` on an `Or` ctor")
|
||||
}
|
||||
};
|
||||
debug!(?ret);
|
||||
ret
|
||||
}
|
||||
|
||||
/// Returns the list of patterns.
|
||||
pub(super) fn iter_patterns<'a>(
|
||||
&'a self,
|
||||
) -> impl Iterator<Item = &'p DeconstructedPat<'p, 'tcx>> + Captures<'a> {
|
||||
self.fields.iter()
|
||||
}
|
||||
}
|
||||
|
||||
/// Recursively expand this pattern into its subpatterns. Only useful for or-patterns.
|
||||
fn expand_or_pat<'p, 'tcx>(pat: &'p Pat<'tcx>) -> Vec<&'p Pat<'tcx>> {
|
||||
fn expand<'p, 'tcx>(pat: &'p Pat<'tcx>, vec: &mut Vec<&'p Pat<'tcx>>) {
|
||||
if let PatKind::Or { pats } = &pat.kind {
|
||||
for pat in pats.iter() {
|
||||
expand(pat, vec);
|
||||
}
|
||||
} else {
|
||||
vec.push(pat)
|
||||
}
|
||||
}
|
||||
|
||||
let mut pats = Vec::new();
|
||||
expand(pat, &mut pats);
|
||||
pats
|
||||
}
|
||||
use crate::constructor::{Constructor, SliceKind};
|
||||
use crate::cx::MatchCheckCtxt;
|
||||
use crate::usefulness::PatCtxt;
|
||||
|
||||
/// Values and patterns can be represented as a constructor applied to some fields. This represents
|
||||
/// a pattern in this form.
|
||||
|
@ -178,9 +22,14 @@ fn expand_or_pat<'p, 'tcx>(pat: &'p Pat<'tcx>) -> Vec<&'p Pat<'tcx>> {
|
|||
/// during analysis. For this reason they cannot be cloned.
|
||||
/// A `DeconstructedPat` will almost always come from user input; the only exception are some
|
||||
/// `Wildcard`s introduced during specialization.
|
||||
///
|
||||
/// Note that the number of fields may not match the fields declared in the original struct/variant.
|
||||
/// This happens if a private or `non_exhaustive` field is uninhabited, because the code mustn't
|
||||
/// observe that it is uninhabited. In that case that field is not included in `fields`. Care must
|
||||
/// be taken when converting to/from `thir::Pat`.
|
||||
pub struct DeconstructedPat<'p, 'tcx> {
|
||||
ctor: Constructor<'tcx>,
|
||||
fields: Fields<'p, 'tcx>,
|
||||
fields: &'p [DeconstructedPat<'p, 'tcx>],
|
||||
ty: Ty<'tcx>,
|
||||
span: Span,
|
||||
/// Whether removing this arm would change the behavior of the match expression.
|
||||
|
@ -189,227 +38,18 @@ pub struct DeconstructedPat<'p, 'tcx> {
|
|||
|
||||
impl<'p, 'tcx> DeconstructedPat<'p, 'tcx> {
|
||||
pub(super) fn wildcard(ty: Ty<'tcx>, span: Span) -> Self {
|
||||
Self::new(Wildcard, Fields::empty(), ty, span)
|
||||
Self::new(Wildcard, &[], ty, span)
|
||||
}
|
||||
|
||||
pub(super) fn new(
|
||||
ctor: Constructor<'tcx>,
|
||||
fields: Fields<'p, 'tcx>,
|
||||
fields: &'p [DeconstructedPat<'p, 'tcx>],
|
||||
ty: Ty<'tcx>,
|
||||
span: Span,
|
||||
) -> Self {
|
||||
DeconstructedPat { ctor, fields, ty, span, useful: Cell::new(false) }
|
||||
}
|
||||
|
||||
/// Note: the input patterns must have been lowered through
|
||||
/// `rustc_mir_build::thir::pattern::check_match::MatchVisitor::lower_pattern`.
|
||||
pub fn from_pat(cx: &MatchCheckCtxt<'p, 'tcx>, pat: &Pat<'tcx>) -> Self {
|
||||
let mkpat = |pat| DeconstructedPat::from_pat(cx, pat);
|
||||
let ctor;
|
||||
let fields;
|
||||
match &pat.kind {
|
||||
PatKind::AscribeUserType { subpattern, .. }
|
||||
| PatKind::InlineConstant { subpattern, .. } => return mkpat(subpattern),
|
||||
PatKind::Binding { subpattern: Some(subpat), .. } => return mkpat(subpat),
|
||||
PatKind::Binding { subpattern: None, .. } | PatKind::Wild => {
|
||||
ctor = Wildcard;
|
||||
fields = Fields::empty();
|
||||
}
|
||||
PatKind::Deref { subpattern } => {
|
||||
ctor = Single;
|
||||
fields = Fields::singleton(cx, mkpat(subpattern));
|
||||
}
|
||||
PatKind::Leaf { subpatterns } | PatKind::Variant { subpatterns, .. } => {
|
||||
match pat.ty.kind() {
|
||||
ty::Tuple(fs) => {
|
||||
ctor = Single;
|
||||
let mut wilds: SmallVec<[_; 2]> =
|
||||
fs.iter().map(|ty| DeconstructedPat::wildcard(ty, pat.span)).collect();
|
||||
for pat in subpatterns {
|
||||
wilds[pat.field.index()] = mkpat(&pat.pattern);
|
||||
}
|
||||
fields = Fields::from_iter(cx, wilds);
|
||||
}
|
||||
ty::Adt(adt, args) if adt.is_box() => {
|
||||
// The only legal patterns of type `Box` (outside `std`) are `_` and box
|
||||
// patterns. If we're here we can assume this is a box pattern.
|
||||
// FIXME(Nadrieril): A `Box` can in theory be matched either with `Box(_,
|
||||
// _)` or a box pattern. As a hack to avoid an ICE with the former, we
|
||||
// ignore other fields than the first one. This will trigger an error later
|
||||
// anyway.
|
||||
// See https://github.com/rust-lang/rust/issues/82772 ,
|
||||
// explanation: https://github.com/rust-lang/rust/pull/82789#issuecomment-796921977
|
||||
// The problem is that we can't know from the type whether we'll match
|
||||
// normally or through box-patterns. We'll have to figure out a proper
|
||||
// solution when we introduce generalized deref patterns. Also need to
|
||||
// prevent mixing of those two options.
|
||||
let pattern = subpatterns.into_iter().find(|pat| pat.field.index() == 0);
|
||||
let pat = if let Some(pat) = pattern {
|
||||
mkpat(&pat.pattern)
|
||||
} else {
|
||||
DeconstructedPat::wildcard(args.type_at(0), pat.span)
|
||||
};
|
||||
ctor = Single;
|
||||
fields = Fields::singleton(cx, pat);
|
||||
}
|
||||
ty::Adt(adt, _) => {
|
||||
ctor = match pat.kind {
|
||||
PatKind::Leaf { .. } => Single,
|
||||
PatKind::Variant { variant_index, .. } => Variant(variant_index),
|
||||
_ => bug!(),
|
||||
};
|
||||
let variant = &adt.variant(ctor.variant_index_for_adt(*adt));
|
||||
// For each field in the variant, we store the relevant index into `self.fields` if any.
|
||||
let mut field_id_to_id: Vec<Option<usize>> =
|
||||
(0..variant.fields.len()).map(|_| None).collect();
|
||||
let tys = Fields::list_variant_nonhidden_fields(cx, pat.ty, variant)
|
||||
.enumerate()
|
||||
.map(|(i, (field, ty))| {
|
||||
field_id_to_id[field.index()] = Some(i);
|
||||
ty
|
||||
});
|
||||
let mut wilds: SmallVec<[_; 2]> =
|
||||
tys.map(|ty| DeconstructedPat::wildcard(ty, pat.span)).collect();
|
||||
for pat in subpatterns {
|
||||
if let Some(i) = field_id_to_id[pat.field.index()] {
|
||||
wilds[i] = mkpat(&pat.pattern);
|
||||
}
|
||||
}
|
||||
fields = Fields::from_iter(cx, wilds);
|
||||
}
|
||||
_ => bug!("pattern has unexpected type: pat: {:?}, ty: {:?}", pat, pat.ty),
|
||||
}
|
||||
}
|
||||
PatKind::Constant { value } => {
|
||||
match pat.ty.kind() {
|
||||
ty::Bool => {
|
||||
ctor = match value.try_eval_bool(cx.tcx, cx.param_env) {
|
||||
Some(b) => Bool(b),
|
||||
None => Opaque(OpaqueId::new()),
|
||||
};
|
||||
fields = Fields::empty();
|
||||
}
|
||||
ty::Char | ty::Int(_) | ty::Uint(_) => {
|
||||
ctor = match value.try_eval_bits(cx.tcx, cx.param_env) {
|
||||
Some(bits) => IntRange(IntRange::from_bits(cx.tcx, pat.ty, bits)),
|
||||
None => Opaque(OpaqueId::new()),
|
||||
};
|
||||
fields = Fields::empty();
|
||||
}
|
||||
ty::Float(ty::FloatTy::F32) => {
|
||||
ctor = match value.try_eval_bits(cx.tcx, cx.param_env) {
|
||||
Some(bits) => {
|
||||
use rustc_apfloat::Float;
|
||||
let value = rustc_apfloat::ieee::Single::from_bits(bits);
|
||||
F32Range(value, value, RangeEnd::Included)
|
||||
}
|
||||
None => Opaque(OpaqueId::new()),
|
||||
};
|
||||
fields = Fields::empty();
|
||||
}
|
||||
ty::Float(ty::FloatTy::F64) => {
|
||||
ctor = match value.try_eval_bits(cx.tcx, cx.param_env) {
|
||||
Some(bits) => {
|
||||
use rustc_apfloat::Float;
|
||||
let value = rustc_apfloat::ieee::Double::from_bits(bits);
|
||||
F64Range(value, value, RangeEnd::Included)
|
||||
}
|
||||
None => Opaque(OpaqueId::new()),
|
||||
};
|
||||
fields = Fields::empty();
|
||||
}
|
||||
ty::Ref(_, t, _) if t.is_str() => {
|
||||
// We want a `&str` constant to behave like a `Deref` pattern, to be compatible
|
||||
// with other `Deref` patterns. This could have been done in `const_to_pat`,
|
||||
// but that causes issues with the rest of the matching code.
|
||||
// So here, the constructor for a `"foo"` pattern is `&` (represented by
|
||||
// `Single`), and has one field. That field has constructor `Str(value)` and no
|
||||
// fields.
|
||||
// Note: `t` is `str`, not `&str`.
|
||||
let subpattern =
|
||||
DeconstructedPat::new(Str(*value), Fields::empty(), *t, pat.span);
|
||||
ctor = Single;
|
||||
fields = Fields::singleton(cx, subpattern)
|
||||
}
|
||||
// All constants that can be structurally matched have already been expanded
|
||||
// into the corresponding `Pat`s by `const_to_pat`. Constants that remain are
|
||||
// opaque.
|
||||
_ => {
|
||||
ctor = Opaque(OpaqueId::new());
|
||||
fields = Fields::empty();
|
||||
}
|
||||
}
|
||||
}
|
||||
PatKind::Range(patrange) => {
|
||||
let PatRange { lo, hi, end, .. } = patrange.as_ref();
|
||||
let ty = pat.ty;
|
||||
ctor = match ty.kind() {
|
||||
ty::Char | ty::Int(_) | ty::Uint(_) => {
|
||||
let lo =
|
||||
MaybeInfiniteInt::from_pat_range_bdy(*lo, ty, cx.tcx, cx.param_env);
|
||||
let hi =
|
||||
MaybeInfiniteInt::from_pat_range_bdy(*hi, ty, cx.tcx, cx.param_env);
|
||||
IntRange(IntRange::from_range(lo, hi, *end))
|
||||
}
|
||||
ty::Float(fty) => {
|
||||
use rustc_apfloat::Float;
|
||||
let lo = lo.as_finite().map(|c| c.eval_bits(cx.tcx, cx.param_env));
|
||||
let hi = hi.as_finite().map(|c| c.eval_bits(cx.tcx, cx.param_env));
|
||||
match fty {
|
||||
ty::FloatTy::F32 => {
|
||||
use rustc_apfloat::ieee::Single;
|
||||
let lo = lo.map(Single::from_bits).unwrap_or(-Single::INFINITY);
|
||||
let hi = hi.map(Single::from_bits).unwrap_or(Single::INFINITY);
|
||||
F32Range(lo, hi, *end)
|
||||
}
|
||||
ty::FloatTy::F64 => {
|
||||
use rustc_apfloat::ieee::Double;
|
||||
let lo = lo.map(Double::from_bits).unwrap_or(-Double::INFINITY);
|
||||
let hi = hi.map(Double::from_bits).unwrap_or(Double::INFINITY);
|
||||
F64Range(lo, hi, *end)
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => bug!("invalid type for range pattern: {}", ty),
|
||||
};
|
||||
fields = Fields::empty();
|
||||
}
|
||||
PatKind::Array { prefix, slice, suffix } | PatKind::Slice { prefix, slice, suffix } => {
|
||||
let array_len = match pat.ty.kind() {
|
||||
ty::Array(_, length) => {
|
||||
Some(length.eval_target_usize(cx.tcx, cx.param_env) as usize)
|
||||
}
|
||||
ty::Slice(_) => None,
|
||||
_ => span_bug!(pat.span, "bad ty {:?} for slice pattern", pat.ty),
|
||||
};
|
||||
let kind = if slice.is_some() {
|
||||
VarLen(prefix.len(), suffix.len())
|
||||
} else {
|
||||
FixedLen(prefix.len() + suffix.len())
|
||||
};
|
||||
ctor = Slice(Slice::new(array_len, kind));
|
||||
fields =
|
||||
Fields::from_iter(cx, prefix.iter().chain(suffix.iter()).map(|p| mkpat(&*p)));
|
||||
}
|
||||
PatKind::Or { .. } => {
|
||||
ctor = Or;
|
||||
let pats = expand_or_pat(pat);
|
||||
fields = Fields::from_iter(cx, pats.into_iter().map(mkpat));
|
||||
}
|
||||
PatKind::Never => {
|
||||
// FIXME(never_patterns): handle `!` in exhaustiveness. This is a sane default
|
||||
// in the meantime.
|
||||
ctor = Wildcard;
|
||||
fields = Fields::empty();
|
||||
}
|
||||
PatKind::Error(_) => {
|
||||
ctor = Opaque(OpaqueId::new());
|
||||
fields = Fields::empty();
|
||||
}
|
||||
}
|
||||
DeconstructedPat::new(ctor, fields, pat.ty, pat.span)
|
||||
}
|
||||
|
||||
pub(super) fn is_or_pat(&self) -> bool {
|
||||
matches!(self.ctor, Or)
|
||||
}
|
||||
|
@ -435,7 +75,7 @@ impl<'p, 'tcx> DeconstructedPat<'p, 'tcx> {
|
|||
pub fn iter_fields<'a>(
|
||||
&'a self,
|
||||
) -> impl Iterator<Item = &'p DeconstructedPat<'p, 'tcx>> + Captures<'a> {
|
||||
self.fields.iter_patterns()
|
||||
self.fields.iter()
|
||||
}
|
||||
|
||||
/// Specialize this pattern with a constructor.
|
||||
|
@ -448,7 +88,7 @@ impl<'p, 'tcx> DeconstructedPat<'p, 'tcx> {
|
|||
match (&self.ctor, other_ctor) {
|
||||
(Wildcard, _) => {
|
||||
// We return a wildcard for each field of `other_ctor`.
|
||||
Fields::wildcards(pcx, other_ctor).iter_patterns().collect()
|
||||
pcx.cx.ctor_wildcard_fields(other_ctor, pcx.ty).iter().collect()
|
||||
}
|
||||
(Slice(self_slice), Slice(other_slice))
|
||||
if self_slice.arity() != other_slice.arity() =>
|
||||
|
@ -464,8 +104,8 @@ impl<'p, 'tcx> DeconstructedPat<'p, 'tcx> {
|
|||
let (ty::Slice(inner_ty) | ty::Array(inner_ty, _)) = *self.ty.kind() else {
|
||||
bug!("bad slice pattern {:?} {:?}", self.ctor, self.ty);
|
||||
};
|
||||
let prefix = &self.fields.fields[..prefix];
|
||||
let suffix = &self.fields.fields[self_slice.arity() - suffix..];
|
||||
let prefix = &self.fields[..prefix];
|
||||
let suffix = &self.fields[self_slice.arity() - suffix..];
|
||||
let wildcard: &_ = pcx
|
||||
.cx
|
||||
.pattern_arena
|
||||
|
@ -476,7 +116,7 @@ impl<'p, 'tcx> DeconstructedPat<'p, 'tcx> {
|
|||
}
|
||||
}
|
||||
}
|
||||
_ => self.fields.iter_patterns().collect(),
|
||||
_ => self.fields.iter().collect(),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -521,94 +161,7 @@ impl<'p, 'tcx> DeconstructedPat<'p, 'tcx> {
|
|||
/// `Display` impl.
|
||||
impl<'p, 'tcx> fmt::Debug for DeconstructedPat<'p, 'tcx> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
// Printing lists is a chore.
|
||||
let mut first = true;
|
||||
let mut start_or_continue = |s| {
|
||||
if first {
|
||||
first = false;
|
||||
""
|
||||
} else {
|
||||
s
|
||||
}
|
||||
};
|
||||
let mut start_or_comma = || start_or_continue(", ");
|
||||
|
||||
match &self.ctor {
|
||||
Single | Variant(_) => match self.ty.kind() {
|
||||
ty::Adt(def, _) if def.is_box() => {
|
||||
// Without `box_patterns`, the only legal pattern of type `Box` is `_` (outside
|
||||
// of `std`). So this branch is only reachable when the feature is enabled and
|
||||
// the pattern is a box pattern.
|
||||
let subpattern = self.iter_fields().next().unwrap();
|
||||
write!(f, "box {subpattern:?}")
|
||||
}
|
||||
ty::Adt(..) | ty::Tuple(..) => {
|
||||
let variant = match self.ty.kind() {
|
||||
ty::Adt(adt, _) => Some(adt.variant(self.ctor.variant_index_for_adt(*adt))),
|
||||
ty::Tuple(_) => None,
|
||||
_ => unreachable!(),
|
||||
};
|
||||
|
||||
if let Some(variant) = variant {
|
||||
write!(f, "{}", variant.name)?;
|
||||
}
|
||||
|
||||
// Without `cx`, we can't know which field corresponds to which, so we can't
|
||||
// get the names of the fields. Instead we just display everything as a tuple
|
||||
// struct, which should be good enough.
|
||||
write!(f, "(")?;
|
||||
for p in self.iter_fields() {
|
||||
write!(f, "{}", start_or_comma())?;
|
||||
write!(f, "{p:?}")?;
|
||||
}
|
||||
write!(f, ")")
|
||||
}
|
||||
// Note: given the expansion of `&str` patterns done in `expand_pattern`, we should
|
||||
// be careful to detect strings here. However a string literal pattern will never
|
||||
// be reported as a non-exhaustiveness witness, so we can ignore this issue.
|
||||
ty::Ref(_, _, mutbl) => {
|
||||
let subpattern = self.iter_fields().next().unwrap();
|
||||
write!(f, "&{}{:?}", mutbl.prefix_str(), subpattern)
|
||||
}
|
||||
_ => write!(f, "_"),
|
||||
},
|
||||
Slice(slice) => {
|
||||
let mut subpatterns = self.fields.iter_patterns();
|
||||
write!(f, "[")?;
|
||||
match slice.kind {
|
||||
FixedLen(_) => {
|
||||
for p in subpatterns {
|
||||
write!(f, "{}{:?}", start_or_comma(), p)?;
|
||||
}
|
||||
}
|
||||
VarLen(prefix_len, _) => {
|
||||
for p in subpatterns.by_ref().take(prefix_len) {
|
||||
write!(f, "{}{:?}", start_or_comma(), p)?;
|
||||
}
|
||||
write!(f, "{}", start_or_comma())?;
|
||||
write!(f, "..")?;
|
||||
for p in subpatterns {
|
||||
write!(f, "{}{:?}", start_or_comma(), p)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
write!(f, "]")
|
||||
}
|
||||
Bool(b) => write!(f, "{b}"),
|
||||
// Best-effort, will render signed ranges incorrectly
|
||||
IntRange(range) => write!(f, "{range:?}"),
|
||||
F32Range(lo, hi, end) => write!(f, "{lo}{end}{hi}"),
|
||||
F64Range(lo, hi, end) => write!(f, "{lo}{end}{hi}"),
|
||||
Str(value) => write!(f, "{value}"),
|
||||
Opaque(..) => write!(f, "<constant pattern>"),
|
||||
Or => {
|
||||
for pat in self.iter_fields() {
|
||||
write!(f, "{}{:?}", start_or_continue(" | "), pat)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
Wildcard | Missing { .. } | NonExhaustive | Hidden => write!(f, "_ : {:?}", self.ty),
|
||||
}
|
||||
MatchCheckCtxt::debug_pat(f, self)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -633,11 +186,9 @@ impl<'tcx> WitnessPat<'tcx> {
|
|||
/// For example, if `ctor` is a `Constructor::Variant` for `Option::Some`, we get the pattern
|
||||
/// `Some(_)`.
|
||||
pub(super) fn wild_from_ctor(pcx: &PatCtxt<'_, '_, 'tcx>, ctor: Constructor<'tcx>) -> Self {
|
||||
// Reuse `Fields::wildcards` to get the types.
|
||||
let fields = Fields::wildcards(pcx, &ctor)
|
||||
.iter_patterns()
|
||||
.map(|deco_pat| Self::wildcard(deco_pat.ty()))
|
||||
.collect();
|
||||
let field_tys =
|
||||
pcx.cx.ctor_wildcard_fields(&ctor, pcx.ty).iter().map(|deco_pat| deco_pat.ty());
|
||||
let fields = field_tys.map(|ty| Self::wildcard(ty)).collect();
|
||||
Self::new(ctor, fields, pcx.ty)
|
||||
}
|
||||
|
||||
|
@ -648,96 +199,6 @@ impl<'tcx> WitnessPat<'tcx> {
|
|||
self.ty
|
||||
}
|
||||
|
||||
/// Convert back to a `thir::Pat` for diagnostic purposes. This panics for patterns that don't
|
||||
/// appear in diagnostics, like float ranges.
|
||||
pub fn to_diagnostic_pat(&self, cx: &MatchCheckCtxt<'_, 'tcx>) -> Pat<'tcx> {
|
||||
let is_wildcard = |pat: &Pat<'_>| matches!(pat.kind, PatKind::Wild);
|
||||
let mut subpatterns = self.iter_fields().map(|p| Box::new(p.to_diagnostic_pat(cx)));
|
||||
let kind = match &self.ctor {
|
||||
Bool(b) => PatKind::Constant { value: mir::Const::from_bool(cx.tcx, *b) },
|
||||
IntRange(range) => return range.to_diagnostic_pat(self.ty, cx.tcx),
|
||||
Single | Variant(_) => match self.ty.kind() {
|
||||
ty::Tuple(..) => PatKind::Leaf {
|
||||
subpatterns: subpatterns
|
||||
.enumerate()
|
||||
.map(|(i, pattern)| FieldPat { field: FieldIdx::new(i), pattern })
|
||||
.collect(),
|
||||
},
|
||||
ty::Adt(adt_def, _) if adt_def.is_box() => {
|
||||
// Without `box_patterns`, the only legal pattern of type `Box` is `_` (outside
|
||||
// of `std`). So this branch is only reachable when the feature is enabled and
|
||||
// the pattern is a box pattern.
|
||||
PatKind::Deref { subpattern: subpatterns.next().unwrap() }
|
||||
}
|
||||
ty::Adt(adt_def, args) => {
|
||||
let variant_index = self.ctor.variant_index_for_adt(*adt_def);
|
||||
let variant = &adt_def.variant(variant_index);
|
||||
let subpatterns = Fields::list_variant_nonhidden_fields(cx, self.ty, variant)
|
||||
.zip(subpatterns)
|
||||
.map(|((field, _ty), pattern)| FieldPat { field, pattern })
|
||||
.collect();
|
||||
|
||||
if adt_def.is_enum() {
|
||||
PatKind::Variant { adt_def: *adt_def, args, variant_index, subpatterns }
|
||||
} else {
|
||||
PatKind::Leaf { subpatterns }
|
||||
}
|
||||
}
|
||||
// Note: given the expansion of `&str` patterns done in `expand_pattern`, we should
|
||||
// be careful to reconstruct the correct constant pattern here. However a string
|
||||
// literal pattern will never be reported as a non-exhaustiveness witness, so we
|
||||
// ignore this issue.
|
||||
ty::Ref(..) => PatKind::Deref { subpattern: subpatterns.next().unwrap() },
|
||||
_ => bug!("unexpected ctor for type {:?} {:?}", self.ctor, self.ty),
|
||||
},
|
||||
Slice(slice) => {
|
||||
match slice.kind {
|
||||
FixedLen(_) => PatKind::Slice {
|
||||
prefix: subpatterns.collect(),
|
||||
slice: None,
|
||||
suffix: Box::new([]),
|
||||
},
|
||||
VarLen(prefix, _) => {
|
||||
let mut subpatterns = subpatterns.peekable();
|
||||
let mut prefix: Vec<_> = subpatterns.by_ref().take(prefix).collect();
|
||||
if slice.array_len.is_some() {
|
||||
// Improves diagnostics a bit: if the type is a known-size array, instead
|
||||
// of reporting `[x, _, .., _, y]`, we prefer to report `[x, .., y]`.
|
||||
// This is incorrect if the size is not known, since `[_, ..]` captures
|
||||
// arrays of lengths `>= 1` whereas `[..]` captures any length.
|
||||
while !prefix.is_empty() && is_wildcard(prefix.last().unwrap()) {
|
||||
prefix.pop();
|
||||
}
|
||||
while subpatterns.peek().is_some()
|
||||
&& is_wildcard(subpatterns.peek().unwrap())
|
||||
{
|
||||
subpatterns.next();
|
||||
}
|
||||
}
|
||||
let suffix: Box<[_]> = subpatterns.collect();
|
||||
let wild = Pat::wildcard_from_ty(self.ty);
|
||||
PatKind::Slice {
|
||||
prefix: prefix.into_boxed_slice(),
|
||||
slice: Some(Box::new(wild)),
|
||||
suffix,
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
&Str(value) => PatKind::Constant { value },
|
||||
Wildcard | NonExhaustive | Hidden => PatKind::Wild,
|
||||
Missing { .. } => bug!(
|
||||
"trying to convert a `Missing` constructor into a `Pat`; this is probably a bug,
|
||||
`Missing` should have been processed in `apply_constructors`"
|
||||
),
|
||||
F32Range(..) | F64Range(..) | Opaque(..) | Or => {
|
||||
bug!("can't convert to pattern: {:?}", self)
|
||||
}
|
||||
};
|
||||
|
||||
Pat { ty: self.ty, span: DUMMY_SP, kind }
|
||||
}
|
||||
|
||||
pub fn iter_fields<'a>(&'a self) -> impl Iterator<Item = &'a WitnessPat<'tcx>> {
|
||||
self.fields.iter()
|
||||
}
|
||||
|
|
|
@ -551,66 +551,27 @@
|
|||
//! I (Nadrieril) prefer to put new tests in `ui/pattern/usefulness` unless there's a specific
|
||||
//! reason not to, for example if they crucially depend on a particular feature like `or_patterns`.
|
||||
|
||||
use self::ValidityConstraint::*;
|
||||
use smallvec::{smallvec, SmallVec};
|
||||
use std::fmt;
|
||||
|
||||
use rustc_data_structures::{captures::Captures, stack::ensure_sufficient_stack};
|
||||
use rustc_hir::HirId;
|
||||
use rustc_middle::ty::{self, Ty};
|
||||
use rustc_session::lint;
|
||||
use rustc_session::lint::builtin::NON_EXHAUSTIVE_OMITTED_PATTERNS;
|
||||
use rustc_span::{Span, DUMMY_SP};
|
||||
|
||||
use crate::constructor::{
|
||||
Constructor, ConstructorSet, IntRange, MaybeInfiniteInt, SplitConstructorSet,
|
||||
};
|
||||
use crate::cx::MatchCheckCtxt;
|
||||
use crate::errors::{
|
||||
NonExhaustiveOmittedPattern, NonExhaustiveOmittedPatternLintOnArm, Overlap,
|
||||
OverlappingRangeEndpoints, Uncovered,
|
||||
};
|
||||
use crate::pat::{DeconstructedPat, WitnessPat};
|
||||
|
||||
use rustc_arena::TypedArena;
|
||||
use rustc_data_structures::{captures::Captures, stack::ensure_sufficient_stack};
|
||||
use rustc_hir::def_id::DefId;
|
||||
use rustc_hir::HirId;
|
||||
use rustc_middle::ty::{self, Ty, TyCtxt};
|
||||
use rustc_session::lint;
|
||||
use rustc_session::lint::builtin::NON_EXHAUSTIVE_OMITTED_PATTERNS;
|
||||
use rustc_span::{Span, DUMMY_SP};
|
||||
|
||||
use smallvec::{smallvec, SmallVec};
|
||||
use std::fmt;
|
||||
|
||||
pub struct MatchCheckCtxt<'p, 'tcx> {
|
||||
pub tcx: TyCtxt<'tcx>,
|
||||
/// The module in which the match occurs. This is necessary for
|
||||
/// checking inhabited-ness of types because whether a type is (visibly)
|
||||
/// inhabited can depend on whether it was defined in the current module or
|
||||
/// not. E.g., `struct Foo { _private: ! }` cannot be seen to be empty
|
||||
/// outside its module and should not be matchable with an empty match statement.
|
||||
pub module: DefId,
|
||||
pub param_env: ty::ParamEnv<'tcx>,
|
||||
pub pattern_arena: &'p TypedArena<DeconstructedPat<'p, 'tcx>>,
|
||||
/// Lint level at the match.
|
||||
pub match_lint_level: HirId,
|
||||
/// The span of the whole match, if applicable.
|
||||
pub whole_match_span: Option<Span>,
|
||||
/// Span of the scrutinee.
|
||||
pub scrut_span: Span,
|
||||
/// Only produce `NON_EXHAUSTIVE_OMITTED_PATTERNS` lint on refutable patterns.
|
||||
pub refutable: bool,
|
||||
/// Whether the data at the scrutinee is known to be valid. This is false if the scrutinee comes
|
||||
/// from a union field, a pointer deref, or a reference deref (pending opsem decisions).
|
||||
pub known_valid_scrutinee: bool,
|
||||
}
|
||||
|
||||
impl<'a, 'tcx> MatchCheckCtxt<'a, 'tcx> {
|
||||
pub(super) fn is_uninhabited(&self, ty: Ty<'tcx>) -> bool {
|
||||
!ty.is_inhabited_from(self.tcx, self.module, self.param_env)
|
||||
}
|
||||
|
||||
/// Returns whether the given type is an enum from another crate declared `#[non_exhaustive]`.
|
||||
pub fn is_foreign_non_exhaustive_enum(&self, ty: Ty<'tcx>) -> bool {
|
||||
match ty.kind() {
|
||||
ty::Adt(def, ..) => {
|
||||
def.is_enum() && def.is_variant_list_non_exhaustive() && !def.did().is_local()
|
||||
}
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
}
|
||||
use self::ValidityConstraint::*;
|
||||
|
||||
#[derive(Copy, Clone)]
|
||||
pub(super) struct PatCtxt<'a, 'p, 'tcx> {
|
||||
|
@ -1244,7 +1205,9 @@ fn compute_exhaustiveness_and_usefulness<'p, 'tcx>(
|
|||
|
||||
// Analyze the constructors present in this column.
|
||||
let ctors = matrix.heads().map(|p| p.ctor());
|
||||
let split_set = ConstructorSet::for_ty(cx, ty).split(pcx, ctors);
|
||||
let ctors_for_ty = &cx.ctors_for_ty(ty);
|
||||
let is_integers = matches!(ctors_for_ty, ConstructorSet::Integers { .. }); // For diagnostics.
|
||||
let split_set = ctors_for_ty.split(pcx, ctors);
|
||||
let all_missing = split_set.present.is_empty();
|
||||
|
||||
// Build the set of constructors we will specialize with. It must cover the whole type.
|
||||
|
@ -1259,7 +1222,7 @@ fn compute_exhaustiveness_and_usefulness<'p, 'tcx>(
|
|||
}
|
||||
|
||||
// Decide what constructors to report.
|
||||
let always_report_all = is_top_level && !IntRange::is_integral(pcx.ty);
|
||||
let always_report_all = is_top_level && !is_integers;
|
||||
// Whether we should report "Enum::A and Enum::C are missing" or "_ is missing".
|
||||
let report_individual_missing_ctors = always_report_all || !all_missing;
|
||||
// Which constructors are considered missing. We ensure that `!missing_ctors.is_empty() =>
|
||||
|
@ -1362,7 +1325,7 @@ impl<'p, 'tcx> PatternColumn<'p, 'tcx> {
|
|||
/// Do constructor splitting on the constructors of the column.
|
||||
fn analyze_ctors(&self, pcx: &PatCtxt<'_, 'p, 'tcx>) -> SplitConstructorSet<'tcx> {
|
||||
let column_ctors = self.patterns.iter().map(|p| p.ctor());
|
||||
ConstructorSet::for_ty(pcx.cx, pcx.ty).split(pcx, column_ctors)
|
||||
pcx.cx.ctors_for_ty(pcx.ty).split(pcx, column_ctors)
|
||||
}
|
||||
|
||||
fn iter<'a>(&'a self) -> impl Iterator<Item = &'p DeconstructedPat<'p, 'tcx>> + Captures<'a> {
|
||||
|
@ -1470,9 +1433,9 @@ fn lint_overlapping_range_endpoints<'p, 'tcx>(
|
|||
|
||||
let set = column.analyze_ctors(pcx);
|
||||
|
||||
if IntRange::is_integral(ty) {
|
||||
if matches!(ty.kind(), ty::Char | ty::Int(_) | ty::Uint(_)) {
|
||||
let emit_lint = |overlap: &IntRange, this_span: Span, overlapped_spans: &[Span]| {
|
||||
let overlap_as_pat = overlap.to_diagnostic_pat(ty, cx.tcx);
|
||||
let overlap_as_pat = cx.hoist_pat_range(overlap, ty);
|
||||
let overlaps: Vec<_> = overlapped_spans
|
||||
.iter()
|
||||
.copied()
|
||||
|
|
Loading…
Add table
Reference in a new issue