Auto merge of #93505 - lcnr:substsref-vs-ty-list, r=michaelwoerister

safely `transmute<&List<Ty<'tcx>>, &List<GenericArg<'tcx>>>`

This PR has 3 relevant steps which are is split in distinct commits.

The first commit now interns `List<Ty<'tcx>>` and `List<GenericArg<'tcx>>` together, potentially reusing memory while allowing free conversions between these two using `List<Ty<'tcx>>::as_substs()` and `SubstsRef<'tcx>::try_as_type_list()`.

Using this, we then use `&'tcx List<Ty<'tcx>>` instead of a `SubstsRef<'tcx>` for tuple fields, simplifying a bunch of code.

Finally, as tuple fields and other generic arguments now use a different `TypeFoldable<'tcx>` impl, we optimize the impl for `List<Ty<'tcx>>` improving perf by slightly less than 1% in tuple heavy benchmarks.
This commit is contained in:
bors 2022-02-21 16:03:38 +00:00
commit 03a8cc7df1
64 changed files with 289 additions and 210 deletions

View file

@ -2298,7 +2298,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
// Closure arguments are wrapped in a tuple, so we need to get the first // Closure arguments are wrapped in a tuple, so we need to get the first
// from that. // from that.
if let ty::Tuple(elems) = argument_ty.kind() { if let ty::Tuple(elems) = argument_ty.kind() {
let argument_ty = elems.first()?.expect_ty(); let &argument_ty = elems.first()?;
if let ty::Ref(_, _, _) = argument_ty.kind() { if let ty::Ref(_, _, _) = argument_ty.kind() {
return Some(AnnotatedBorrowFnSignature::Closure { return Some(AnnotatedBorrowFnSignature::Closure {
argument_ty, argument_ty,

View file

@ -480,7 +480,7 @@ impl<'tcx> MirBorrowckCtxt<'_, 'tcx> {
let search_stack: &mut Vec<(Ty<'tcx>, &hir::Ty<'_>)> = &mut vec![(ty, hir_ty)]; let search_stack: &mut Vec<(Ty<'tcx>, &hir::Ty<'_>)> = &mut vec![(ty, hir_ty)];
while let Some((ty, hir_ty)) = search_stack.pop() { while let Some((ty, hir_ty)) = search_stack.pop() {
match (&ty.kind(), &hir_ty.kind) { match (ty.kind(), &hir_ty.kind) {
// Check if the `ty` is `&'X ..` where `'X` // Check if the `ty` is `&'X ..` where `'X`
// is the region we are looking for -- if so, and we have a `&T` // is the region we are looking for -- if so, and we have a `&T`
// on the RHS, then we want to highlight the `&` like so: // on the RHS, then we want to highlight the `&` like so:
@ -532,9 +532,8 @@ impl<'tcx> MirBorrowckCtxt<'_, 'tcx> {
// The following cases don't have lifetimes, so we // The following cases don't have lifetimes, so we
// just worry about trying to match up the rustc type // just worry about trying to match up the rustc type
// with the HIR types: // with the HIR types:
(ty::Tuple(elem_tys), hir::TyKind::Tup(elem_hir_tys)) => { (&ty::Tuple(elem_tys), hir::TyKind::Tup(elem_hir_tys)) => {
search_stack search_stack.extend(iter::zip(elem_tys, *elem_hir_tys));
.extend(iter::zip(elem_tys.iter().map(|k| k.expect_ty()), *elem_hir_tys));
} }
(ty::Slice(elem_ty), hir::TyKind::Slice(elem_hir_ty)) (ty::Slice(elem_ty), hir::TyKind::Slice(elem_hir_ty))

View file

@ -832,9 +832,10 @@ impl<'a, 'b, 'tcx> TypeVerifier<'a, 'b, 'tcx> {
return match substs return match substs
.as_closure() .as_closure()
.tupled_upvars_ty() .tupled_upvars_ty()
.tuple_element_ty(field.index()) .tuple_fields()
.get(field.index())
{ {
Some(ty) => Ok(ty), Some(&ty) => Ok(ty),
None => Err(FieldAccessError::OutOfRange { None => Err(FieldAccessError::OutOfRange {
field_count: substs.as_closure().upvar_tys().count(), field_count: substs.as_closure().upvar_tys().count(),
}), }),
@ -852,7 +853,7 @@ impl<'a, 'b, 'tcx> TypeVerifier<'a, 'b, 'tcx> {
} }
ty::Tuple(tys) => { ty::Tuple(tys) => {
return match tys.get(field.index()) { return match tys.get(field.index()) {
Some(&ty) => Ok(ty.expect_ty()), Some(&ty) => Ok(ty),
None => Err(FieldAccessError::OutOfRange { field_count: tys.len() }), None => Err(FieldAccessError::OutOfRange { field_count: tys.len() }),
}; };
} }

View file

@ -641,15 +641,13 @@ impl<'cx, 'tcx> UniversalRegionsBuilder<'cx, 'tcx> {
let (&output, tuplized_inputs) = let (&output, tuplized_inputs) =
inputs_and_output.skip_binder().split_last().unwrap(); inputs_and_output.skip_binder().split_last().unwrap();
assert_eq!(tuplized_inputs.len(), 1, "multiple closure inputs"); assert_eq!(tuplized_inputs.len(), 1, "multiple closure inputs");
let ty::Tuple(inputs) = tuplized_inputs[0].kind() else { let &ty::Tuple(inputs) = tuplized_inputs[0].kind() else {
bug!("closure inputs not a tuple: {:?}", tuplized_inputs[0]); bug!("closure inputs not a tuple: {:?}", tuplized_inputs[0]);
}; };
ty::Binder::bind_with_vars( ty::Binder::bind_with_vars(
tcx.mk_type_list( tcx.mk_type_list(
iter::once(closure_ty) iter::once(closure_ty).chain(inputs).chain(iter::once(output)),
.chain(inputs.iter().map(|k| k.expect_ty()))
.chain(iter::once(output)),
), ),
bound_vars, bound_vars,
) )

View file

@ -117,7 +117,7 @@ impl<'tcx> FunctionCx<'_, '_, 'tcx> {
.unzip(); .unzip();
let return_layout = self.layout_of(return_ty); let return_layout = self.layout_of(return_ty);
let return_tys = if let ty::Tuple(tup) = return_ty.kind() { let return_tys = if let ty::Tuple(tup) = return_ty.kind() {
tup.types().map(|ty| AbiParam::new(self.clif_type(ty).unwrap())).collect() tup.iter().map(|ty| AbiParam::new(self.clif_type(ty).unwrap())).collect()
} else { } else {
vec![AbiParam::new(self.clif_type(return_ty).unwrap())] vec![AbiParam::new(self.clif_type(return_ty).unwrap())]
}; };
@ -199,7 +199,7 @@ pub(crate) fn codegen_fn_prelude<'tcx>(fx: &mut FunctionCx<'_, '_, 'tcx>, start_
}; };
let mut params = Vec::new(); let mut params = Vec::new();
for (i, _arg_ty) in tupled_arg_tys.types().enumerate() { for (i, _arg_ty) in tupled_arg_tys.iter().enumerate() {
let arg_abi = arg_abis_iter.next().unwrap(); let arg_abi = arg_abis_iter.next().unwrap();
let param = let param =
cvalue_for_param(fx, Some(local), Some(i), arg_abi, &mut block_params_iter); cvalue_for_param(fx, Some(local), Some(i), arg_abi, &mut block_params_iter);

View file

@ -90,10 +90,9 @@ fn clif_pair_type_from_ty<'tcx>(
ty: Ty<'tcx>, ty: Ty<'tcx>,
) -> Option<(types::Type, types::Type)> { ) -> Option<(types::Type, types::Type)> {
Some(match ty.kind() { Some(match ty.kind() {
ty::Tuple(substs) if substs.len() == 2 => { ty::Tuple(types) if types.len() == 2 => {
let mut types = substs.types(); let a = clif_type_from_ty(tcx, types[0])?;
let a = clif_type_from_ty(tcx, types.next().unwrap())?; let b = clif_type_from_ty(tcx, types[1])?;
let b = clif_type_from_ty(tcx, types.next().unwrap())?;
if a.is_vector() || b.is_vector() { if a.is_vector() || b.is_vector() {
return None; return None;
} }

View file

@ -752,9 +752,8 @@ pub fn type_metadata<'ll, 'tcx>(cx: &CodegenCx<'ll, 'tcx>, t: Ty<'tcx>) -> &'ll
prepare_enum_metadata(cx, t, def.did, unique_type_id, vec![]).finalize(cx) prepare_enum_metadata(cx, t, def.did, unique_type_id, vec![]).finalize(cx)
} }
}, },
ty::Tuple(elements) => { ty::Tuple(tys) => {
let tys: Vec<_> = elements.iter().map(|k| k.expect_ty()).collect(); prepare_tuple_metadata(cx, t, tys, unique_type_id, NO_SCOPE_METADATA).finalize(cx)
prepare_tuple_metadata(cx, t, &tys, unique_type_id, NO_SCOPE_METADATA).finalize(cx)
} }
// Type parameters from polymorphized functions. // Type parameters from polymorphized functions.
ty::Param(_) => MetadataCreationResult::new(param_type_metadata(cx, t), false), ty::Param(_) => MetadataCreationResult::new(param_type_metadata(cx, t), false),

View file

@ -86,7 +86,7 @@ fn push_debuginfo_type_name<'tcx>(
} }
for component_type in component_types { for component_type in component_types {
push_debuginfo_type_name(tcx, component_type.expect_ty(), true, output, visited); push_debuginfo_type_name(tcx, component_type, true, output, visited);
push_arg_separator(cpp_like_debuginfo, output); push_arg_separator(cpp_like_debuginfo, output);
} }
if !component_types.is_empty() { if !component_types.is_empty() {

View file

@ -69,7 +69,7 @@ use rustc_middle::ty::{
self, self,
error::TypeError, error::TypeError,
subst::{GenericArgKind, Subst, SubstsRef}, subst::{GenericArgKind, Subst, SubstsRef},
Binder, Region, Ty, TyCtxt, TypeFoldable, Binder, List, Region, Ty, TyCtxt, TypeFoldable,
}; };
use rustc_span::{sym, BytePos, DesugaringKind, MultiSpan, Pos, Span}; use rustc_span::{sym, BytePos, DesugaringKind, MultiSpan, Pos, Span};
use rustc_target::spec::abi; use rustc_target::spec::abi;
@ -1361,7 +1361,7 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> {
let mut values = let mut values =
(DiagnosticStyledString::normal("("), DiagnosticStyledString::normal("(")); (DiagnosticStyledString::normal("("), DiagnosticStyledString::normal("("));
let len = substs1.len(); let len = substs1.len();
for (i, (left, right)) in substs1.types().zip(substs2.types()).enumerate() { for (i, (left, right)) in substs1.iter().zip(substs2).enumerate() {
let (x1, x2) = self.cmp(left, right); let (x1, x2) = self.cmp(left, right);
(values.0).0.extend(x1.0); (values.0).0.extend(x1.0);
(values.1).0.extend(x2.0); (values.1).0.extend(x2.0);
@ -2042,8 +2042,8 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> {
// If a tuple of length one was expected and the found expression has // If a tuple of length one was expected and the found expression has
// parentheses around it, perhaps the user meant to write `(expr,)` to // parentheses around it, perhaps the user meant to write `(expr,)` to
// build a tuple (issue #86100) // build a tuple (issue #86100)
(ty::Tuple(_), _) => { (ty::Tuple(fields), _) => {
self.emit_tuple_wrap_err(&mut err, span, found, expected) self.emit_tuple_wrap_err(&mut err, span, found, fields)
} }
// If a character was expected and the found expression is a string literal // If a character was expected and the found expression is a string literal
// containing a single character, perhaps the user meant to write `'c'` to // containing a single character, perhaps the user meant to write `'c'` to
@ -2111,12 +2111,11 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> {
err: &mut DiagnosticBuilder<'tcx>, err: &mut DiagnosticBuilder<'tcx>,
span: Span, span: Span,
found: Ty<'tcx>, found: Ty<'tcx>,
expected: Ty<'tcx>, expected_fields: &List<Ty<'tcx>>,
) { ) {
let [expected_tup_elem] = &expected.tuple_fields().collect::<Vec<_>>()[..] let [expected_tup_elem] = expected_fields[..] else { return };
else { return };
if !same_type_modulo_infer(*expected_tup_elem, found) { if !same_type_modulo_infer(expected_tup_elem, found) {
return; return;
} }

View file

@ -254,7 +254,9 @@ fn closure_args(fn_sig: &ty::PolyFnSig<'_>) -> String {
.skip_binder() .skip_binder()
.iter() .iter()
.next() .next()
.map(|args| args.tuple_fields().map(|arg| arg.to_string()).collect::<Vec<_>>().join(", ")) .map(|args| {
args.tuple_fields().iter().map(|arg| arg.to_string()).collect::<Vec<_>>().join(", ")
})
.unwrap_or_default() .unwrap_or_default()
} }

View file

@ -2621,7 +2621,7 @@ impl<'tcx> LateLintPass<'tcx> for InvalidValue {
} }
Tuple(..) => { Tuple(..) => {
// Proceed recursively, check all fields. // Proceed recursively, check all fields.
ty.tuple_fields().find_map(|field| ty_find_init_error(tcx, field, init)) ty.tuple_fields().iter().find_map(|field| ty_find_init_error(tcx, field, init))
} }
// Conservative fallback. // Conservative fallback.
_ => None, _ => None,
@ -2934,7 +2934,7 @@ impl ClashingExternDeclarations {
) )
} }
(Tuple(a_substs), Tuple(b_substs)) => { (Tuple(a_substs), Tuple(b_substs)) => {
a_substs.types().eq_by(b_substs.types(), |a_ty, b_ty| { a_substs.iter().eq_by(b_substs.iter(), |a_ty, b_ty| {
structurally_same_type_impl(seen_types, cx, a_ty, b_ty, ckind) structurally_same_type_impl(seen_types, cx, a_ty, b_ty, ckind)
}) })
} }

View file

@ -246,7 +246,7 @@ impl<'tcx> LateLintPass<'tcx> for UnusedResults {
} else { } else {
vec![] vec![]
}; };
for (i, ty) in tys.iter().map(|k| k.expect_ty()).enumerate() { for (i, ty) in tys.iter().enumerate() {
let descr_post = &format!(" in tuple element {}", i); let descr_post = &format!(" in tuple element {}", i);
let span = *spans.get(i).unwrap_or(&span); let span = *spans.get(i).unwrap_or(&span);
if check_must_use_ty(cx, ty, expr, span, descr_pre, descr_post, plural_len) if check_must_use_ty(cx, ty, expr, span, descr_pre, descr_post, plural_len)

View file

@ -428,7 +428,7 @@ fn use_verbose<'tcx>(ty: Ty<'tcx>, fn_def: bool) -> bool {
ty::Int(_) | ty::Uint(_) | ty::Bool | ty::Char | ty::Float(_) => false, ty::Int(_) | ty::Uint(_) | ty::Bool | ty::Char | ty::Float(_) => false,
// Unit type // Unit type
ty::Tuple(g_args) if g_args.is_empty() => false, ty::Tuple(g_args) if g_args.is_empty() => false,
ty::Tuple(g_args) => g_args.iter().any(|g_arg| use_verbose(g_arg.expect_ty(), fn_def)), ty::Tuple(g_args) => g_args.iter().any(|g_arg| use_verbose(g_arg, fn_def)),
ty::Array(ty, _) => use_verbose(ty, fn_def), ty::Array(ty, _) => use_verbose(ty, fn_def),
ty::FnDef(..) => fn_def, ty::FnDef(..) => fn_def,
_ => true, _ => true,

View file

@ -46,7 +46,7 @@ impl<'tcx> PlaceTy<'tcx> {
let field_def = &variant_def.fields[f.index()]; let field_def = &variant_def.fields[f.index()];
field_def.ty(tcx, substs) field_def.ty(tcx, substs)
} }
ty::Tuple(ref tys) => tys[f.index()].expect_ty(), ty::Tuple(tys) => tys[f.index()],
_ => bug!("extracting field of non-tuple non-adt: {:?}", self), _ => bug!("extracting field of non-tuple non-adt: {:?}", self),
}; };
debug!("field_ty self: {:?} f: {:?} yields: {:?}", self, f, answer); debug!("field_ty self: {:?} f: {:?} yields: {:?}", self, f, answer);

View file

@ -101,7 +101,6 @@ pub struct CtxtInterners<'tcx> {
// Specifically use a speedy hash algorithm for these hash sets, since // Specifically use a speedy hash algorithm for these hash sets, since
// they're accessed quite often. // they're accessed quite often.
type_: InternedSet<'tcx, TyS<'tcx>>, type_: InternedSet<'tcx, TyS<'tcx>>,
type_list: InternedSet<'tcx, List<Ty<'tcx>>>,
substs: InternedSet<'tcx, InternalSubsts<'tcx>>, substs: InternedSet<'tcx, InternalSubsts<'tcx>>,
canonical_var_infos: InternedSet<'tcx, List<CanonicalVarInfo<'tcx>>>, canonical_var_infos: InternedSet<'tcx, List<CanonicalVarInfo<'tcx>>>,
region: InternedSet<'tcx, RegionKind>, region: InternedSet<'tcx, RegionKind>,
@ -129,7 +128,6 @@ impl<'tcx> CtxtInterners<'tcx> {
CtxtInterners { CtxtInterners {
arena, arena,
type_: Default::default(), type_: Default::default(),
type_list: Default::default(),
substs: Default::default(), substs: Default::default(),
region: Default::default(), region: Default::default(),
poly_existential_predicates: Default::default(), poly_existential_predicates: Default::default(),
@ -1657,6 +1655,8 @@ macro_rules! nop_lift {
type Lifted = $lifted; type Lifted = $lifted;
fn lift_to_tcx(self, tcx: TyCtxt<'tcx>) -> Option<Self::Lifted> { fn lift_to_tcx(self, tcx: TyCtxt<'tcx>) -> Option<Self::Lifted> {
if tcx.interners.$set.contains_pointer_to(&InternedInSet(self.0.0)) { if tcx.interners.$set.contains_pointer_to(&InternedInSet(self.0.0)) {
// SAFETY: `self` is interned and therefore valid
// for the entire lifetime of the `TyCtxt`.
Some(unsafe { mem::transmute(self) }) Some(unsafe { mem::transmute(self) })
} else { } else {
None None
@ -1666,6 +1666,25 @@ macro_rules! nop_lift {
}; };
} }
// Can't use the macros as we have reuse the `substs` here.
//
// See `intern_type_list` for more info.
impl<'a, 'tcx> Lift<'tcx> for &'a List<Ty<'a>> {
type Lifted = &'tcx List<Ty<'tcx>>;
fn lift_to_tcx(self, tcx: TyCtxt<'tcx>) -> Option<Self::Lifted> {
if self.is_empty() {
return Some(List::empty());
}
if tcx.interners.substs.contains_pointer_to(&InternedInSet(self.as_substs())) {
// SAFETY: `self` is interned and therefore valid
// for the entire lifetime of the `TyCtxt`.
Some(unsafe { mem::transmute::<&'a List<Ty<'a>>, &'tcx List<Ty<'tcx>>>(self) })
} else {
None
}
}
}
macro_rules! nop_list_lift { macro_rules! nop_list_lift {
($set:ident; $ty:ty => $lifted:ty) => { ($set:ident; $ty:ty => $lifted:ty) => {
impl<'a, 'tcx> Lift<'tcx> for &'a List<$ty> { impl<'a, 'tcx> Lift<'tcx> for &'a List<$ty> {
@ -1690,7 +1709,6 @@ nop_lift! {const_; Const<'a> => Const<'tcx>}
nop_lift_old! {const_allocation; &'a Allocation => &'tcx Allocation} nop_lift_old! {const_allocation; &'a Allocation => &'tcx Allocation}
nop_lift! {predicate; Predicate<'a> => Predicate<'tcx>} nop_lift! {predicate; Predicate<'a> => Predicate<'tcx>}
nop_list_lift! {type_list; Ty<'a> => Ty<'tcx>}
nop_list_lift! {poly_existential_predicates; ty::Binder<'a, ExistentialPredicate<'a>> => ty::Binder<'tcx, ExistentialPredicate<'tcx>>} nop_list_lift! {poly_existential_predicates; ty::Binder<'a, ExistentialPredicate<'a>> => ty::Binder<'tcx, ExistentialPredicate<'tcx>>}
nop_list_lift! {predicates; Predicate<'a> => Predicate<'tcx>} nop_list_lift! {predicates; Predicate<'a> => Predicate<'tcx>}
nop_list_lift! {canonical_var_infos; CanonicalVarInfo<'a> => CanonicalVarInfo<'tcx>} nop_list_lift! {canonical_var_infos; CanonicalVarInfo<'a> => CanonicalVarInfo<'tcx>}
@ -2189,7 +2207,6 @@ macro_rules! slice_interners {
} }
slice_interners!( slice_interners!(
type_list: _intern_type_list(Ty<'tcx>),
substs: _intern_substs(GenericArg<'tcx>), substs: _intern_substs(GenericArg<'tcx>),
canonical_var_infos: _intern_canonical_var_infos(CanonicalVarInfo<'tcx>), canonical_var_infos: _intern_canonical_var_infos(CanonicalVarInfo<'tcx>),
poly_existential_predicates: poly_existential_predicates:
@ -2259,7 +2276,7 @@ impl<'tcx> TyCtxt<'tcx> {
) -> PolyFnSig<'tcx> { ) -> PolyFnSig<'tcx> {
sig.map_bound(|s| { sig.map_bound(|s| {
let params_iter = match s.inputs()[0].kind() { let params_iter = match s.inputs()[0].kind() {
ty::Tuple(params) => params.into_iter().map(|k| k.expect_ty()), ty::Tuple(params) => params.into_iter(),
_ => bug!(), _ => bug!(),
}; };
self.mk_fn_sig(params_iter, s.output(), s.c_variadic, unsafety, abi::Abi::Rust) self.mk_fn_sig(params_iter, s.output(), s.c_variadic, unsafety, abi::Abi::Rust)
@ -2421,15 +2438,11 @@ impl<'tcx> TyCtxt<'tcx> {
#[inline] #[inline]
pub fn intern_tup(self, ts: &[Ty<'tcx>]) -> Ty<'tcx> { pub fn intern_tup(self, ts: &[Ty<'tcx>]) -> Ty<'tcx> {
let kinds: Vec<_> = ts.iter().map(|&t| GenericArg::from(t)).collect(); self.mk_ty(Tuple(self.intern_type_list(&ts)))
self.mk_ty(Tuple(self.intern_substs(&kinds)))
} }
pub fn mk_tup<I: InternAs<[Ty<'tcx>], Ty<'tcx>>>(self, iter: I) -> I::Output { pub fn mk_tup<I: InternAs<[Ty<'tcx>], Ty<'tcx>>>(self, iter: I) -> I::Output {
iter.intern_with(|ts| { iter.intern_with(|ts| self.mk_ty(Tuple(self.intern_type_list(&ts))))
let kinds: Vec<_> = ts.iter().map(|&t| GenericArg::from(t)).collect();
self.mk_ty(Tuple(self.intern_substs(&kinds)))
})
} }
#[inline] #[inline]
@ -2611,7 +2624,19 @@ impl<'tcx> TyCtxt<'tcx> {
} }
pub fn intern_type_list(self, ts: &[Ty<'tcx>]) -> &'tcx List<Ty<'tcx>> { pub fn intern_type_list(self, ts: &[Ty<'tcx>]) -> &'tcx List<Ty<'tcx>> {
if ts.is_empty() { List::empty() } else { self._intern_type_list(ts) } if ts.is_empty() {
List::empty()
} else {
// Actually intern type lists as lists of `GenericArg`s.
//
// Transmuting from `Ty<'tcx>` to `GenericArg<'tcx>` is sound
// as explained in ty_slice_as_generic_arg`. With this,
// we guarantee that even when transmuting between `List<Ty<'tcx>>`
// and `List<GenericArg<'tcx>>`, the uniqueness requirement for
// lists is upheld.
let substs = self._intern_substs(ty::subst::ty_slice_as_generic_args(ts));
substs.try_as_type_list().unwrap()
}
} }
pub fn intern_substs(self, ts: &[GenericArg<'tcx>]) -> &'tcx List<GenericArg<'tcx>> { pub fn intern_substs(self, ts: &[GenericArg<'tcx>]) -> &'tcx List<GenericArg<'tcx>> {

View file

@ -116,9 +116,10 @@ impl<'tcx> Ty<'tcx> {
} }
_ => true, _ => true,
}), }),
Projection(ProjectionTy { substs: args, .. }) | Adt(_, args) | Tuple(args) => { Projection(ProjectionTy { substs: args, .. }) | Adt(_, args) => {
args.iter().all(generic_arg_is_suggestible) args.iter().all(generic_arg_is_suggestible)
} }
Tuple(args) => args.iter().all(|ty| ty.is_suggestable()),
Slice(ty) | RawPtr(TypeAndMut { ty, .. }) | Ref(_, ty, _) => ty.is_suggestable(), Slice(ty) | RawPtr(TypeAndMut { ty, .. }) | Ref(_, ty, _) => ty.is_suggestable(),
Array(ty, c) => ty.is_suggestable() && const_is_suggestable(c.val()), Array(ty, c) => ty.is_suggestable() && const_is_suggestable(c.val()),
_ => true, _ => true,

View file

@ -201,8 +201,8 @@ impl FlagComputation {
self.add_ty(ty); self.add_ty(ty);
} }
&ty::Tuple(ref substs) => { &ty::Tuple(types) => {
self.add_substs(substs); self.add_tys(types);
} }
&ty::FnDef(_, substs) => { &ty::FnDef(_, substs) => {

View file

@ -61,6 +61,36 @@ impl<'a, 'tcx> HashStable<StableHashingContext<'a>> for ty::subst::GenericArg<'t
} }
} }
impl<'a, 'tcx> HashStable<StableHashingContext<'a>> for ty::subst::GenericArgKind<'tcx> {
fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) {
match self {
// WARNING: We dedup cache the `HashStable` results for `List`
// while ignoring types and freely transmute
// between `List<Ty<'tcx>>` and `List<GenericArg<'tcx>>`.
// See `fn intern_type_list` for more details.
//
// We therefore hash types without adding a hash for their discriminant.
//
// In order to make it very unlikely for the sequence of bytes being hashed for
// a `GenericArgKind::Type` to be the same as the sequence of bytes being
// hashed for one of the other variants, we hash a `0xFF` byte before hashing
// their discriminant (since the discriminant of `TyKind` is unlikely to ever start
// with 0xFF).
ty::subst::GenericArgKind::Type(ty) => ty.hash_stable(hcx, hasher),
ty::subst::GenericArgKind::Const(ct) => {
0xFFu8.hash_stable(hcx, hasher);
mem::discriminant(self).hash_stable(hcx, hasher);
ct.hash_stable(hcx, hasher);
}
ty::subst::GenericArgKind::Lifetime(lt) => {
0xFFu8.hash_stable(hcx, hasher);
mem::discriminant(self).hash_stable(hcx, hasher);
lt.hash_stable(hcx, hasher);
}
}
}
}
impl<'a> HashStable<StableHashingContext<'a>> for ty::RegionKind { impl<'a> HashStable<StableHashingContext<'a>> for ty::RegionKind {
fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) { fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) {
mem::discriminant(self).hash_stable(hcx, hasher); mem::discriminant(self).hash_stable(hcx, hasher);

View file

@ -207,10 +207,9 @@ pub(crate) fn type_uninhabited_from<'tcx>(
Never => DefIdForest::full(), Never => DefIdForest::full(),
Tuple(ref tys) => DefIdForest::union( Tuple(ref tys) => {
tcx, DefIdForest::union(tcx, tys.iter().map(|ty| ty.uninhabited_from(tcx, param_env)))
tys.iter().map(|ty| ty.expect_ty().uninhabited_from(tcx, param_env)), }
),
Array(ty, len) => match len.try_eval_usize(tcx, param_env) { Array(ty, len) => match len.try_eval_usize(tcx, param_env) {
Some(0) | None => DefIdForest::empty(), Some(0) | None => DefIdForest::empty(),

View file

@ -628,7 +628,7 @@ fn polymorphize<'tcx>(
} else { } else {
None None
}; };
let has_upvars = upvars_ty.map_or(false, |ty| ty.tuple_fields().count() > 0); let has_upvars = upvars_ty.map_or(false, |ty| !ty.tuple_fields().is_empty());
debug!("polymorphize: upvars_ty={:?} has_upvars={:?}", upvars_ty, has_upvars); debug!("polymorphize: upvars_ty={:?} has_upvars={:?}", upvars_ty, has_upvars);
struct PolymorphizationFolder<'tcx> { struct PolymorphizationFolder<'tcx> {

View file

@ -712,9 +712,7 @@ impl<'tcx> LayoutCx<'tcx, TyCtxt<'tcx>> {
if tys.len() == 0 { StructKind::AlwaysSized } else { StructKind::MaybeUnsized }; if tys.len() == 0 { StructKind::AlwaysSized } else { StructKind::MaybeUnsized };
univariant( univariant(
&tys.iter() &tys.iter().map(|k| self.layout_of(k)).collect::<Result<Vec<_>, _>>()?,
.map(|k| self.layout_of(k.expect_ty()))
.collect::<Result<Vec<_>, _>>()?,
&ReprOptions::default(), &ReprOptions::default(),
kind, kind,
)? )?
@ -2382,7 +2380,7 @@ where
} }
}, },
ty::Tuple(tys) => TyMaybeWithLayout::Ty(tys[i].expect_ty()), ty::Tuple(tys) => TyMaybeWithLayout::Ty(tys[i]),
// ADTs. // ADTs.
ty::Adt(def, substs) => { ty::Adt(def, substs) => {
@ -3012,7 +3010,7 @@ impl<'tcx> LayoutCx<'tcx, TyCtxt<'tcx>> {
if let Some(input) = sig.inputs().last() { if let Some(input) = sig.inputs().last() {
if let ty::Tuple(tupled_arguments) = input.kind() { if let ty::Tuple(tupled_arguments) = input.kind() {
inputs = &sig.inputs()[0..sig.inputs().len() - 1]; inputs = &sig.inputs()[0..sig.inputs().len() - 1];
tupled_arguments.iter().map(|k| k.expect_ty()).collect() tupled_arguments
} else { } else {
bug!( bug!(
"argument to function with \"rust-call\" ABI \ "argument to function with \"rust-call\" ABI \
@ -3027,7 +3025,7 @@ impl<'tcx> LayoutCx<'tcx, TyCtxt<'tcx>> {
} }
} else { } else {
assert!(sig.c_variadic || extra_args.is_empty()); assert!(sig.c_variadic || extra_args.is_empty());
extra_args.to_vec() extra_args
}; };
let target = &self.tcx.sess.target; let target = &self.tcx.sess.target;
@ -3155,8 +3153,8 @@ impl<'tcx> LayoutCx<'tcx, TyCtxt<'tcx>> {
ret: arg_of(sig.output(), None)?, ret: arg_of(sig.output(), None)?,
args: inputs args: inputs
.iter() .iter()
.cloned() .copied()
.chain(extra_args) .chain(extra_args.iter().copied())
.chain(caller_location) .chain(caller_location)
.enumerate() .enumerate()
.map(|(i, ty)| arg_of(ty, Some(i))) .map(|(i, ty)| arg_of(ty, Some(i)))

View file

@ -287,7 +287,6 @@ fn characteristic_def_id_of_type_cached<'a>(
ty::Ref(_, ty, _) => characteristic_def_id_of_type_cached(ty, visited), ty::Ref(_, ty, _) => characteristic_def_id_of_type_cached(ty, visited),
ty::Tuple(ref tys) => tys.iter().find_map(|ty| { ty::Tuple(ref tys) => tys.iter().find_map(|ty| {
let ty = ty.expect_ty();
if visited.insert(ty) { if visited.insert(ty) {
return characteristic_def_id_of_type_cached(ty, visited); return characteristic_def_id_of_type_cached(ty, visited);
} }

View file

@ -845,7 +845,7 @@ pub trait PrettyPrinter<'tcx>:
write("{}{}(", if paren_needed { "(" } else { "" }, name) write("{}{}(", if paren_needed { "(" } else { "" }, name)
); );
for (idx, ty) in arg_tys.tuple_fields().enumerate() { for (idx, ty) in arg_tys.tuple_fields().iter().enumerate() {
if idx > 0 { if idx > 0 {
p!(", "); p!(", ");
} }
@ -1032,12 +1032,11 @@ pub trait PrettyPrinter<'tcx>:
// Special-case `Fn(...) -> ...` and resugar it. // Special-case `Fn(...) -> ...` and resugar it.
let fn_trait_kind = cx.tcx().fn_trait_kind_from_lang_item(principal.def_id); let fn_trait_kind = cx.tcx().fn_trait_kind_from_lang_item(principal.def_id);
if !cx.tcx().sess.verbose() && fn_trait_kind.is_some() { if !cx.tcx().sess.verbose() && fn_trait_kind.is_some() {
if let ty::Tuple(ref args) = principal.substs.type_at(0).kind() { if let ty::Tuple(tys) = principal.substs.type_at(0).kind() {
let mut projections = predicates.projection_bounds(); let mut projections = predicates.projection_bounds();
if let (Some(proj), None) = (projections.next(), projections.next()) { if let (Some(proj), None) = (projections.next(), projections.next()) {
let tys: Vec<_> = args.iter().map(|k| k.expect_ty()).collect();
p!(pretty_fn_sig( p!(pretty_fn_sig(
&tys, tys,
false, false,
proj.skip_binder().term.ty().expect("Return type was a const") proj.skip_binder().term.ty().expect("Return type was a const")
)); ));

View file

@ -501,9 +501,7 @@ pub fn super_relate_tys<'tcx, R: TypeRelation<'tcx>>(
(&ty::Tuple(as_), &ty::Tuple(bs)) => { (&ty::Tuple(as_), &ty::Tuple(bs)) => {
if as_.len() == bs.len() { if as_.len() == bs.len() {
Ok(tcx.mk_tup( Ok(tcx.mk_tup(iter::zip(as_, bs).map(|(a, b)| relation.relate(a, b)))?)
iter::zip(as_, bs).map(|(a, b)| relation.relate(a.expect_ty(), b.expect_ty())),
)?)
} else if !(as_.is_empty() || bs.is_empty()) { } else if !(as_.is_empty() || bs.is_empty()) {
Err(TypeError::TupleSize(expected_found(relation, as_.len(), bs.len()))) Err(TypeError::TupleSize(expected_found(relation, as_.len(), bs.len())))
} else { } else {

View file

@ -889,19 +889,6 @@ impl<'tcx> TypeFoldable<'tcx> for &'tcx ty::List<ty::Binder<'tcx, ty::Existentia
} }
} }
impl<'tcx> TypeFoldable<'tcx> for &'tcx ty::List<Ty<'tcx>> {
fn try_super_fold_with<F: FallibleTypeFolder<'tcx>>(
self,
folder: &mut F,
) -> Result<Self, F::Error> {
ty::util::fold_list(self, folder, |tcx, v| tcx.intern_type_list(v))
}
fn super_visit_with<V: TypeVisitor<'tcx>>(&self, visitor: &mut V) -> ControlFlow<V::BreakTy> {
self.iter().try_for_each(|t| t.visit_with(visitor))
}
}
impl<'tcx> TypeFoldable<'tcx> for &'tcx ty::List<ProjectionKind> { impl<'tcx> TypeFoldable<'tcx> for &'tcx ty::List<ProjectionKind> {
fn try_super_fold_with<F: FallibleTypeFolder<'tcx>>( fn try_super_fold_with<F: FallibleTypeFolder<'tcx>>(
self, self,

View file

@ -200,8 +200,7 @@ pub enum TyKind<'tcx> {
Never, Never,
/// A tuple type. For example, `(i32, bool)`. /// A tuple type. For example, `(i32, bool)`.
/// Use `Ty::tuple_fields` to iterate over the field types. Tuple(&'tcx List<Ty<'tcx>>),
Tuple(SubstsRef<'tcx>),
/// The projection of an associated type. For example, /// The projection of an associated type. For example,
/// `<T as Trait<..>>::N`. /// `<T as Trait<..>>::N`.
@ -2155,18 +2154,9 @@ impl<'tcx> Ty<'tcx> {
/// Iterates over tuple fields. /// Iterates over tuple fields.
/// Panics when called on anything but a tuple. /// Panics when called on anything but a tuple.
pub fn tuple_fields(self) -> impl DoubleEndedIterator<Item = Ty<'tcx>> { pub fn tuple_fields(self) -> &'tcx List<Ty<'tcx>> {
match self.kind() { match self.kind() {
Tuple(substs) => substs.iter().map(|field| field.expect_ty()), Tuple(substs) => substs,
_ => bug!("tuple_fields called on non-tuple"),
}
}
/// Get the `i`-th element of a tuple.
/// Panics when called on anything but a tuple.
pub fn tuple_element_ty(self, i: usize) -> Option<Ty<'tcx>> {
match self.kind() {
Tuple(substs) => substs.iter().nth(i).map(|field| field.expect_ty()),
_ => bug!("tuple_fields called on non-tuple"), _ => bug!("tuple_fields called on non-tuple"),
} }
} }
@ -2367,7 +2357,7 @@ impl<'tcx> Ty<'tcx> {
ty::Str | ty::Slice(_) | ty::Dynamic(..) | ty::Foreign(..) => false, ty::Str | ty::Slice(_) | ty::Dynamic(..) | ty::Foreign(..) => false,
ty::Tuple(tys) => tys.iter().all(|ty| ty.expect_ty().is_trivially_sized(tcx)), ty::Tuple(tys) => tys.iter().all(|ty| ty.is_trivially_sized(tcx)),
ty::Adt(def, _substs) => def.sized_constraint(tcx).is_empty(), ty::Adt(def, _substs) => def.sized_constraint(tcx).is_empty(),

View file

@ -20,6 +20,7 @@ use std::marker::PhantomData;
use std::mem; use std::mem;
use std::num::NonZeroUsize; use std::num::NonZeroUsize;
use std::ops::ControlFlow; use std::ops::ControlFlow;
use std::slice;
/// An entity in the Rust type system, which can be one of /// An entity in the Rust type system, which can be one of
/// several kinds (types, lifetimes, and consts). /// several kinds (types, lifetimes, and consts).
@ -40,13 +41,38 @@ const TYPE_TAG: usize = 0b00;
const REGION_TAG: usize = 0b01; const REGION_TAG: usize = 0b01;
const CONST_TAG: usize = 0b10; const CONST_TAG: usize = 0b10;
#[derive(Debug, TyEncodable, TyDecodable, PartialEq, Eq, PartialOrd, Ord, HashStable)] #[derive(Debug, TyEncodable, TyDecodable, PartialEq, Eq, PartialOrd, Ord)]
pub enum GenericArgKind<'tcx> { pub enum GenericArgKind<'tcx> {
Lifetime(ty::Region<'tcx>), Lifetime(ty::Region<'tcx>),
Type(Ty<'tcx>), Type(Ty<'tcx>),
Const(ty::Const<'tcx>), Const(ty::Const<'tcx>),
} }
/// This function goes from `&'a [Ty<'tcx>]` to `&'a [GenericArg<'tcx>]`
///
/// This is sound as, for types, `GenericArg` is just
/// `NonZeroUsize::new_unchecked(ty as *const _ as usize)` as
/// long as we use `0` for the `TYPE_TAG`.
pub fn ty_slice_as_generic_args<'a, 'tcx>(ts: &'a [Ty<'tcx>]) -> &'a [GenericArg<'tcx>] {
assert_eq!(TYPE_TAG, 0);
// SAFETY: the whole slice is valid and immutable.
// `Ty` and `GenericArg` is explained above.
unsafe { slice::from_raw_parts(ts.as_ptr().cast(), ts.len()) }
}
impl<'tcx> List<Ty<'tcx>> {
/// Allows to freely switch betwen `List<Ty<'tcx>>` and `List<GenericArg<'tcx>>`.
///
/// As lists are interned, `List<Ty<'tcx>>` and `List<GenericArg<'tcx>>` have
/// be interned together, see `intern_type_list` for more details.
#[inline]
pub fn as_substs(&'tcx self) -> SubstsRef<'tcx> {
assert_eq!(TYPE_TAG, 0);
// SAFETY: `List<T>` is `#[repr(C)]`. `Ty` and `GenericArg` is explained above.
unsafe { &*(self as *const List<Ty<'tcx>> as *const List<GenericArg<'tcx>>) }
}
}
impl<'tcx> GenericArgKind<'tcx> { impl<'tcx> GenericArgKind<'tcx> {
#[inline] #[inline]
fn pack(self) -> GenericArg<'tcx> { fn pack(self) -> GenericArg<'tcx> {
@ -208,6 +234,17 @@ pub type InternalSubsts<'tcx> = List<GenericArg<'tcx>>;
pub type SubstsRef<'tcx> = &'tcx InternalSubsts<'tcx>; pub type SubstsRef<'tcx> = &'tcx InternalSubsts<'tcx>;
impl<'a, 'tcx> InternalSubsts<'tcx> { impl<'a, 'tcx> InternalSubsts<'tcx> {
/// Checks whether all elements of this list are types, if so, transmute.
pub fn try_as_type_list(&'tcx self) -> Option<&'tcx List<Ty<'tcx>>> {
if self.iter().all(|arg| matches!(arg.unpack(), GenericArgKind::Type(_))) {
assert_eq!(TYPE_TAG, 0);
// SAFETY: All elements are types, see `List<Ty<'tcx>>::as_substs`.
Some(unsafe { &*(self as *const List<GenericArg<'tcx>> as *const List<Ty<'tcx>>) })
} else {
None
}
}
/// Interpret these substitutions as the substitutions of a closure type. /// Interpret these substitutions as the substitutions of a closure type.
/// Closure substitutions have a particular structure controlled by the /// Closure substitutions have a particular structure controlled by the
/// compiler that encodes information like the signature and closure kind; /// compiler that encodes information like the signature and closure kind;
@ -422,6 +459,45 @@ impl<'tcx> TypeFoldable<'tcx> for SubstsRef<'tcx> {
} }
} }
impl<'tcx> TypeFoldable<'tcx> for &'tcx ty::List<Ty<'tcx>> {
fn try_super_fold_with<F: FallibleTypeFolder<'tcx>>(
self,
folder: &mut F,
) -> Result<Self, F::Error> {
// This code is fairly hot, though not as hot as `SubstsRef`.
//
// When compiling stage 2, I get the following results:
//
// len | total | %
// --- | --------- | -----
// 2 | 15083590 | 48.1
// 3 | 7540067 | 24.0
// 1 | 5300377 | 16.9
// 4 | 1351897 | 4.3
// 0 | 1256849 | 4.0
//
// I've tried it with some private repositories and got
// close to the same result, with 4 and 0 swapping places
// sometimes.
match self.len() {
2 => {
let param0 = self[0].try_fold_with(folder)?;
let param1 = self[1].try_fold_with(folder)?;
if param0 == self[0] && param1 == self[1] {
Ok(self)
} else {
Ok(folder.tcx().intern_type_list(&[param0, param1]))
}
}
_ => ty::util::fold_list(self, folder, |tcx, v| tcx.intern_type_list(v)),
}
}
fn super_visit_with<V: TypeVisitor<'tcx>>(&self, visitor: &mut V) -> ControlFlow<V::BreakTy> {
self.iter().try_for_each(|t| t.visit_with(visitor))
}
}
/////////////////////////////////////////////////////////////////////////// ///////////////////////////////////////////////////////////////////////////
// Public trait `Subst` // Public trait `Subst`
// //

View file

@ -248,7 +248,7 @@ impl<'tcx> TyCtxt<'tcx> {
} }
ty::Tuple(tys) if let Some((&last_ty, _)) = tys.split_last() => { ty::Tuple(tys) if let Some((&last_ty, _)) = tys.split_last() => {
ty = last_ty.expect_ty(); ty = last_ty;
} }
ty::Tuple(_) => break, ty::Tuple(_) => break,
@ -319,9 +319,9 @@ impl<'tcx> TyCtxt<'tcx> {
} }
} }
(&Tuple(a_tys), &Tuple(b_tys)) if a_tys.len() == b_tys.len() => { (&Tuple(a_tys), &Tuple(b_tys)) if a_tys.len() == b_tys.len() => {
if let Some(a_last) = a_tys.last() { if let Some(&a_last) = a_tys.last() {
a = a_last.expect_ty(); a = a_last;
b = b_tys.last().unwrap().expect_ty(); b = *b_tys.last().unwrap();
} else { } else {
break; break;
} }
@ -746,7 +746,7 @@ impl<'tcx> Ty<'tcx> {
| ty::FnDef(..) | ty::FnDef(..)
| ty::Error(_) | ty::Error(_)
| ty::FnPtr(_) => true, | ty::FnPtr(_) => true,
ty::Tuple(_) => self.tuple_fields().all(|f| Self::is_trivially_freeze(f)), ty::Tuple(fields) => fields.iter().all(Self::is_trivially_freeze),
ty::Slice(elem_ty) | ty::Array(elem_ty, _) => elem_ty.is_trivially_freeze(), ty::Slice(elem_ty) | ty::Array(elem_ty, _) => elem_ty.is_trivially_freeze(),
ty::Adt(..) ty::Adt(..)
| ty::Bound(..) | ty::Bound(..)
@ -786,7 +786,7 @@ impl<'tcx> Ty<'tcx> {
| ty::FnDef(..) | ty::FnDef(..)
| ty::Error(_) | ty::Error(_)
| ty::FnPtr(_) => true, | ty::FnPtr(_) => true,
ty::Tuple(_) => self.tuple_fields().all(|f| Self::is_trivially_unpin(f)), ty::Tuple(fields) => fields.iter().all(Self::is_trivially_unpin),
ty::Slice(elem_ty) | ty::Array(elem_ty, _) => elem_ty.is_trivially_unpin(), ty::Slice(elem_ty) | ty::Array(elem_ty, _) => elem_ty.is_trivially_unpin(),
ty::Adt(..) ty::Adt(..)
| ty::Bound(..) | ty::Bound(..)
@ -1042,7 +1042,7 @@ pub fn needs_drop_components<'tcx>(
} }
} }
// If any field needs drop, then the whole tuple does. // If any field needs drop, then the whole tuple does.
ty::Tuple(..) => ty.tuple_fields().try_fold(SmallVec::new(), move |mut acc, elem| { ty::Tuple(fields) => fields.iter().try_fold(SmallVec::new(), move |mut acc, elem| {
acc.extend(needs_drop_components(elem, target_layout)?); acc.extend(needs_drop_components(elem, target_layout)?);
Ok(acc) Ok(acc)
}), }),
@ -1092,7 +1092,7 @@ pub fn is_trivially_const_drop<'tcx>(ty: Ty<'tcx>) -> bool {
ty::Array(ty, _) | ty::Slice(ty) => is_trivially_const_drop(ty), ty::Array(ty, _) | ty::Slice(ty) => is_trivially_const_drop(ty),
ty::Tuple(tys) => tys.iter().all(|ty| is_trivially_const_drop(ty.expect_ty())), ty::Tuple(tys) => tys.iter().all(|ty| is_trivially_const_drop(ty)),
} }
} }

View file

@ -175,10 +175,10 @@ fn push_inner<'tcx>(stack: &mut TypeWalkerStack<'tcx>, parent: GenericArg<'tcx>)
| ty::Opaque(_, substs) | ty::Opaque(_, substs)
| ty::Closure(_, substs) | ty::Closure(_, substs)
| ty::Generator(_, substs, _) | ty::Generator(_, substs, _)
| ty::Tuple(substs)
| ty::FnDef(_, substs) => { | ty::FnDef(_, substs) => {
stack.extend(substs.iter().rev()); stack.extend(substs.iter().rev());
} }
ty::Tuple(ts) => stack.extend(ts.as_substs().iter().rev()),
ty::GeneratorWitness(ts) => { ty::GeneratorWitness(ts) => {
stack.extend(ts.skip_binder().iter().rev().map(|ty| ty.into())); stack.extend(ts.skip_binder().iter().rev().map(|ty| ty.into()));
} }

View file

@ -256,7 +256,7 @@ fn to_upvars_resolved_place_builder<'a, 'tcx>(
// We must have inferred the capture types since we are building MIR, therefore // We must have inferred the capture types since we are building MIR, therefore
// it's safe to call `tuple_element_ty` and we can unwrap here because // it's safe to call `tuple_element_ty` and we can unwrap here because
// we know that the capture exists and is the `capture_index`-th capture. // we know that the capture exists and is the `capture_index`-th capture.
let var_ty = substs.tupled_upvars_ty().tuple_element_ty(capture_index).unwrap(); let var_ty = substs.tupled_upvars_ty().tuple_fields()[capture_index];
upvar_resolved_place_builder = upvar_resolved_place_builder =
upvar_resolved_place_builder.field(Field::new(capture_index), var_ty); upvar_resolved_place_builder.field(Field::new(capture_index), var_ty);

View file

@ -1209,7 +1209,7 @@ impl<'p, 'tcx> Fields<'p, 'tcx> {
) -> Self { ) -> Self {
let ret = match constructor { let ret = match constructor {
Single | Variant(_) => match ty.kind() { Single | Variant(_) => match ty.kind() {
ty::Tuple(fs) => Fields::wildcards_from_tys(cx, fs.iter().map(|ty| ty.expect_ty())), ty::Tuple(fs) => Fields::wildcards_from_tys(cx, fs.iter()),
ty::Ref(_, rty, _) => Fields::wildcards_from_tys(cx, once(*rty)), ty::Ref(_, rty, _) => Fields::wildcards_from_tys(cx, once(*rty)),
ty::Adt(adt, substs) => { ty::Adt(adt, substs) => {
if adt.is_box() { if adt.is_box() {
@ -1315,11 +1315,8 @@ impl<'p, 'tcx> DeconstructedPat<'p, 'tcx> {
match pat.ty.kind() { match pat.ty.kind() {
ty::Tuple(fs) => { ty::Tuple(fs) => {
ctor = Single; ctor = Single;
let mut wilds: SmallVec<[_; 2]> = fs let mut wilds: SmallVec<[_; 2]> =
.iter() fs.iter().map(DeconstructedPat::wildcard).collect();
.map(|ty| ty.expect_ty())
.map(DeconstructedPat::wildcard)
.collect();
for pat in subpatterns { for pat in subpatterns {
wilds[pat.field.index()] = mkpat(&pat.pattern); wilds[pat.field.index()] = mkpat(&pat.pattern);
} }

View file

@ -866,10 +866,7 @@ where
let tys: Vec<_> = substs.as_generator().upvar_tys().collect(); let tys: Vec<_> = substs.as_generator().upvar_tys().collect();
self.open_drop_for_tuple(&tys) self.open_drop_for_tuple(&tys)
} }
ty::Tuple(..) => { ty::Tuple(fields) => self.open_drop_for_tuple(fields),
let tys: Vec<_> = ty.tuple_fields().collect();
self.open_drop_for_tuple(&tys)
}
ty::Adt(def, substs) => { ty::Adt(def, substs) => {
if def.is_box() { if def.is_box() {
self.open_drop_for_box(def, substs) self.open_drop_for_box(def, substs)

View file

@ -843,12 +843,10 @@ impl<'mir, 'tcx> ConstPropagator<'mir, 'tcx> {
// FIXME: enable the general case stated above ^. // FIXME: enable the general case stated above ^.
let ty = value.layout.ty; let ty = value.layout.ty;
// Only do it for tuples // Only do it for tuples
if let ty::Tuple(substs) = ty.kind() { if let ty::Tuple(types) = ty.kind() {
// Only do it if tuple is also a pair with two scalars // Only do it if tuple is also a pair with two scalars
if substs.len() == 2 { if let [ty1, ty2] = types[..] {
let alloc = self.use_ecx(|this| { let alloc = self.use_ecx(|this| {
let ty1 = substs[0].expect_ty();
let ty2 = substs[1].expect_ty();
let ty_is_scalar = |ty| { let ty_is_scalar = |ty| {
this.ecx.layout_of(ty).ok().map(|layout| layout.abi.is_scalar()) this.ecx.layout_of(ty).ok().map(|layout| layout.abi.is_scalar())
== Some(true) == Some(true)

View file

@ -692,8 +692,7 @@ impl<'tcx> Inliner<'tcx> {
// The `tmp0`, `tmp1`, and `tmp2` in our example abonve. // The `tmp0`, `tmp1`, and `tmp2` in our example abonve.
let tuple_tmp_args = tuple_tys.iter().enumerate().map(|(i, ty)| { let tuple_tmp_args = tuple_tys.iter().enumerate().map(|(i, ty)| {
// This is e.g., `tuple_tmp.0` in our example above. // This is e.g., `tuple_tmp.0` in our example above.
let tuple_field = let tuple_field = Operand::Move(tcx.mk_place_field(tuple, Field::new(i), ty));
Operand::Move(tcx.mk_place_field(tuple, Field::new(i), ty.expect_ty()));
// Spill to a local to make e.g., `tmp0`. // Spill to a local to make e.g., `tmp0`.
self.create_temp_if_necessary(tuple_field, callsite, caller_body) self.create_temp_if_necessary(tuple_field, callsite, caller_body)

View file

@ -148,8 +148,8 @@ fn is_needs_drop_and_init<'tcx>(
}) })
} }
ty::Tuple(_) => ty ty::Tuple(fields) => fields
.tuple_fields() .iter()
.enumerate() .enumerate()
.map(|(f, f_ty)| (Field::from_usize(f), f_ty, mpi)) .map(|(f, f_ty)| (Field::from_usize(f), f_ty, mpi))
.any(field_needs_drop_and_init), .any(field_needs_drop_and_init),

View file

@ -461,10 +461,10 @@ impl<'tcx> CloneShimBuilder<'tcx> {
fn tuple_like_shim<I>(&mut self, dest: Place<'tcx>, src: Place<'tcx>, tys: I) fn tuple_like_shim<I>(&mut self, dest: Place<'tcx>, src: Place<'tcx>, tys: I)
where where
I: Iterator<Item = Ty<'tcx>>, I: IntoIterator<Item = Ty<'tcx>>,
{ {
let mut previous_field = None; let mut previous_field = None;
for (i, ity) in tys.enumerate() { for (i, ity) in tys.into_iter().enumerate() {
let field = Field::new(i); let field = Field::new(i);
let src_field = self.tcx.mk_place_field(src, field, ity); let src_field = self.tcx.mk_place_field(src, field, ity);

View file

@ -446,7 +446,7 @@ impl<'tcx> Printer<'tcx> for &mut SymbolMangler<'tcx> {
ty::Tuple(tys) => { ty::Tuple(tys) => {
self.push("T"); self.push("T");
for ty in tys.iter().map(|k| k.expect_ty()) { for ty in tys.iter() {
self = ty.print(self)?; self = ty.print(self)?;
} }
self.push("E"); self.push("E");

View file

@ -834,10 +834,9 @@ impl<'a, 'tcx> InferCtxtExt<'tcx> for InferCtxt<'a, 'tcx> {
let expected_ty = expected_trait_ref.skip_binder().substs.type_at(1); let expected_ty = expected_trait_ref.skip_binder().substs.type_at(1);
let expected = match expected_ty.kind() { let expected = match expected_ty.kind() {
ty::Tuple(ref tys) => tys ty::Tuple(ref tys) => {
.iter() tys.iter().map(|t| ArgKind::from_expected_ty(t, Some(span))).collect()
.map(|t| ArgKind::from_expected_ty(t.expect_ty(), Some(span))) }
.collect(),
_ => vec![ArgKind::Arg("_".to_owned(), expected_ty.to_string())], _ => vec![ArgKind::Arg("_".to_owned(), expected_ty.to_string())],
}; };

View file

@ -1301,7 +1301,7 @@ impl<'a, 'tcx> InferCtxtExt<'tcx> for InferCtxt<'a, 'tcx> {
if tcx.fn_trait_kind_from_lang_item(trait_ref.def_id()).is_some() => if tcx.fn_trait_kind_from_lang_item(trait_ref.def_id()).is_some() =>
{ {
tcx.mk_fn_sig( tcx.mk_fn_sig(
inputs.iter().map(|k| k.expect_ty()), inputs.iter(),
tcx.mk_ty_infer(ty::TyVar(ty::TyVid::from_u32(0))), tcx.mk_ty_infer(ty::TyVar(ty::TyVid::from_u32(0))),
false, false,
hir::Unsafety::Normal, hir::Unsafety::Normal,

View file

@ -108,7 +108,7 @@ pub fn trivial_dropck_outlives<'tcx>(tcx: TyCtxt<'tcx>, ty: Ty<'tcx>) -> bool {
// (T1..Tn) and closures have same properties as T1..Tn -- // (T1..Tn) and closures have same properties as T1..Tn --
// check if *any* of those are trivial. // check if *any* of those are trivial.
ty::Tuple(ref tys) => tys.iter().all(|t| trivial_dropck_outlives(tcx, t.expect_ty())), ty::Tuple(tys) => tys.iter().all(|t| trivial_dropck_outlives(tcx, t)),
ty::Closure(_, ref substs) => { ty::Closure(_, ref substs) => {
trivial_dropck_outlives(tcx, substs.as_closure().tupled_upvars_ty()) trivial_dropck_outlives(tcx, substs.as_closure().tupled_upvars_ty())
} }

View file

@ -1015,9 +1015,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
// Check that the source tuple with the target's // Check that the source tuple with the target's
// last element is equal to the target. // last element is equal to the target.
let new_tuple = tcx.mk_tup( let new_tuple = tcx.mk_tup(a_mid.iter().copied().chain(iter::once(b_last)));
a_mid.iter().map(|k| k.expect_ty()).chain(iter::once(b_last.expect_ty())),
);
let InferOk { obligations, .. } = self let InferOk { obligations, .. } = self
.infcx .infcx
.at(&obligation.cause, obligation.param_env) .at(&obligation.cause, obligation.param_env)
@ -1033,8 +1031,8 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
obligation.cause.clone(), obligation.cause.clone(),
obligation.predicate.def_id(), obligation.predicate.def_id(),
obligation.recursion_depth + 1, obligation.recursion_depth + 1,
a_last.expect_ty(), a_last,
&[b_last], &[b_last.into()],
) )
})); }));
} }
@ -1097,7 +1095,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
stack.push(ty); stack.push(ty);
} }
ty::Tuple(tys) => { ty::Tuple(tys) => {
stack.extend(tys.iter().map(|ty| ty.expect_ty())); stack.extend(tys.iter());
} }
ty::Closure(_, substs) => { ty::Closure(_, substs) => {
stack.push(substs.as_closure().tupled_upvars_ty()); stack.push(substs.as_closure().tupled_upvars_ty());

View file

@ -1852,9 +1852,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
ty::Str | ty::Slice(_) | ty::Dynamic(..) | ty::Foreign(..) => None, ty::Str | ty::Slice(_) | ty::Dynamic(..) | ty::Foreign(..) => None,
ty::Tuple(tys) => Where( ty::Tuple(tys) => Where(
obligation obligation.predicate.rebind(tys.last().map_or_else(Vec::new, |&last| vec![last])),
.predicate
.rebind(tys.last().into_iter().map(|k| k.expect_ty()).collect()),
), ),
ty::Adt(def, substs) => { ty::Adt(def, substs) => {
@ -1917,7 +1915,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
ty::Tuple(tys) => { ty::Tuple(tys) => {
// (*) binder moved here // (*) binder moved here
Where(obligation.predicate.rebind(tys.iter().map(|k| k.expect_ty()).collect())) Where(obligation.predicate.rebind(tys.iter().collect()))
} }
ty::Closure(_, substs) => { ty::Closure(_, substs) => {
@ -1997,7 +1995,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
ty::Tuple(ref tys) => { ty::Tuple(ref tys) => {
// (T1, ..., Tn) -- meets any bound that all of T1...Tn meet // (T1, ..., Tn) -- meets any bound that all of T1...Tn meet
t.rebind(tys.iter().map(|k| k.expect_ty()).collect()) t.rebind(tys.iter().collect())
} }
ty::Closure(_, ref substs) => { ty::Closure(_, ref substs) => {

View file

@ -529,8 +529,8 @@ impl<'a, 'tcx> WfPredicates<'a, 'tcx> {
ty::Tuple(ref tys) => { ty::Tuple(ref tys) => {
if let Some((_last, rest)) = tys.split_last() { if let Some((_last, rest)) = tys.split_last() {
for elem in rest { for &elem in rest {
self.require_sized(elem.expect_ty(), traits::TupleElem); self.require_sized(elem, traits::TupleElem);
} }
} }
} }

View file

@ -565,7 +565,7 @@ impl<'tcx> chalk_solve::RustIrDatabase<RustInterner<'tcx>> for RustIrDatabase<'t
// FIXME(chalk): actually get hidden ty // FIXME(chalk): actually get hidden ty
self.interner self.interner
.tcx .tcx
.mk_ty(ty::Tuple(self.interner.tcx.intern_substs(&[]))) .mk_ty(ty::Tuple(self.interner.tcx.intern_type_list(&[])))
.lower_into(self.interner) .lower_into(self.interner)
} }

View file

@ -326,7 +326,9 @@ impl<'tcx> LowerInto<'tcx, chalk_ir::Ty<RustInterner<'tcx>>> for Ty<'tcx> {
ty::Generator(_def_id, _substs, _) => unimplemented!(), ty::Generator(_def_id, _substs, _) => unimplemented!(),
ty::GeneratorWitness(_) => unimplemented!(), ty::GeneratorWitness(_) => unimplemented!(),
ty::Never => chalk_ir::TyKind::Never, ty::Never => chalk_ir::TyKind::Never,
ty::Tuple(substs) => chalk_ir::TyKind::Tuple(substs.len(), substs.lower_into(interner)), ty::Tuple(types) => {
chalk_ir::TyKind::Tuple(types.len(), types.as_substs().lower_into(interner))
}
ty::Projection(proj) => chalk_ir::TyKind::Alias(proj.lower_into(interner)), ty::Projection(proj) => chalk_ir::TyKind::Alias(proj.lower_into(interner)),
ty::Opaque(def_id, substs) => { ty::Opaque(def_id, substs) => {
chalk_ir::TyKind::Alias(chalk_ir::AliasTy::Opaque(chalk_ir::OpaqueTy { chalk_ir::TyKind::Alias(chalk_ir::AliasTy::Opaque(chalk_ir::OpaqueTy {
@ -398,7 +400,9 @@ impl<'tcx> LowerInto<'tcx, Ty<'tcx>> for &chalk_ir::Ty<RustInterner<'tcx>> {
TyKind::Generator(..) => unimplemented!(), TyKind::Generator(..) => unimplemented!(),
TyKind::GeneratorWitness(..) => unimplemented!(), TyKind::GeneratorWitness(..) => unimplemented!(),
TyKind::Never => ty::Never, TyKind::Never => ty::Never,
TyKind::Tuple(_len, substitution) => ty::Tuple(substitution.lower_into(interner)), TyKind::Tuple(_len, substitution) => {
ty::Tuple(substitution.lower_into(interner).try_as_type_list().unwrap())
}
TyKind::Slice(ty) => ty::Slice(ty.lower_into(interner)), TyKind::Slice(ty) => ty::Slice(ty.lower_into(interner)),
TyKind::Raw(mutbl, ty) => ty::RawPtr(ty::TypeAndMut { TyKind::Raw(mutbl, ty) => ty::RawPtr(ty::TypeAndMut {
ty: ty.lower_into(interner), ty: ty.lower_into(interner),

View file

@ -198,14 +198,7 @@ fn dtorck_constraint_for_ty<'tcx>(
ty::Tuple(tys) => rustc_data_structures::stack::ensure_sufficient_stack(|| { ty::Tuple(tys) => rustc_data_structures::stack::ensure_sufficient_stack(|| {
for ty in tys.iter() { for ty in tys.iter() {
dtorck_constraint_for_ty( dtorck_constraint_for_ty(tcx, span, for_ty, depth + 1, ty, constraints)?;
tcx,
span,
for_ty,
depth + 1,
ty.expect_ty(),
constraints,
)?;
} }
Ok::<_, NoSolution>(()) Ok::<_, NoSolution>(())
})?, })?,

View file

@ -70,9 +70,9 @@ fn are_inner_types_recursive<'tcx>(
) -> Representability { ) -> Representability {
debug!("are_inner_types_recursive({:?}, {:?}, {:?})", ty, seen, shadow_seen); debug!("are_inner_types_recursive({:?}, {:?}, {:?})", ty, seen, shadow_seen);
match ty.kind() { match ty.kind() {
ty::Tuple(..) => { ty::Tuple(fields) => {
// Find non representable // Find non representable
fold_repr(ty.tuple_fields().map(|ty| { fold_repr(fields.iter().map(|ty| {
is_type_structurally_recursive( is_type_structurally_recursive(
tcx, tcx,
sp, sp,

View file

@ -24,7 +24,7 @@ fn sized_constraint_for_ty<'tcx>(
Tuple(ref tys) => match tys.last() { Tuple(ref tys) => match tys.last() {
None => vec![], None => vec![],
Some(ty) => sized_constraint_for_ty(tcx, adtdef, ty.expect_ty()), Some(&ty) => sized_constraint_for_ty(tcx, adtdef, ty),
}, },
Adt(adt, substs) => { Adt(adt, substs) => {
@ -461,9 +461,9 @@ pub fn conservative_is_privately_uninhabited_raw<'tcx>(
}) })
}) })
} }
ty::Tuple(..) => { ty::Tuple(fields) => {
debug!("ty::Tuple(..) =>"); debug!("ty::Tuple(..) =>");
ty.tuple_fields().any(|ty| tcx.conservative_is_privately_uninhabited(param_env.and(ty))) fields.iter().any(|ty| tcx.conservative_is_privately_uninhabited(param_env.and(ty)))
} }
ty::Array(ty, len) => { ty::Array(ty, len) => {
debug!("ty::Array(ty, len) =>"); debug!("ty::Array(ty, len) =>");

View file

@ -111,7 +111,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
}, },
ty::Tuple(fields) => match fields.last() { ty::Tuple(fields) => match fields.last() {
None => Some(PointerKind::Thin), None => Some(PointerKind::Thin),
Some(f) => self.pointer_kind(f.expect_ty(), span)?, Some(&f) => self.pointer_kind(f, span)?,
}, },
// Pointers to foreign types are thin, despite being unsized // Pointers to foreign types are thin, despite being unsized

View file

@ -270,7 +270,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
debug!("deduce_sig_from_projection: arg_param_ty={:?}", arg_param_ty); debug!("deduce_sig_from_projection: arg_param_ty={:?}", arg_param_ty);
match arg_param_ty.kind() { match arg_param_ty.kind() {
ty::Tuple(tys) => tys.into_iter().map(|k| k.expect_ty()).collect::<Vec<_>>(), &ty::Tuple(tys) => tys,
_ => return None, _ => return None,
} }
} else { } else {
@ -286,7 +286,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
let sig = projection.rebind(self.tcx.mk_fn_sig( let sig = projection.rebind(self.tcx.mk_fn_sig(
input_tys.iter(), input_tys.iter(),
&ret_param_ty, ret_param_ty,
false, false,
hir::Unsafety::Normal, hir::Unsafety::Normal,
Abi::Rust, Abi::Rust,

View file

@ -1291,7 +1291,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
let elt_ts_iter = elts.iter().enumerate().map(|(i, e)| match flds { let elt_ts_iter = elts.iter().enumerate().map(|(i, e)| match flds {
Some(fs) if i < fs.len() => { Some(fs) if i < fs.len() => {
let ety = fs[i].expect_ty(); let ety = fs[i];
self.check_expr_coercable_to_type(&e, ety, None); self.check_expr_coercable_to_type(&e, ety, None);
ety ety
} }
@ -1877,13 +1877,13 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
let fstr = field.as_str(); let fstr = field.as_str();
if let Ok(index) = fstr.parse::<usize>() { if let Ok(index) = fstr.parse::<usize>() {
if fstr == index.to_string() { if fstr == index.to_string() {
if let Some(field_ty) = tys.get(index) { if let Some(&field_ty) = tys.get(index) {
let adjustments = self.adjust_steps(&autoderef); let adjustments = self.adjust_steps(&autoderef);
self.apply_adjustments(base, adjustments); self.apply_adjustments(base, adjustments);
self.register_predicates(autoderef.into_obligations()); self.register_predicates(autoderef.into_obligations());
self.write_field_index(expr.hir_id, index); self.write_field_index(expr.hir_id, index);
return field_ty.expect_ty(); return field_ty;
} }
} }
} }

View file

@ -147,12 +147,12 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
} }
let expected_input_tys = match expected_input_tys.get(0) { let expected_input_tys = match expected_input_tys.get(0) {
Some(&ty) => match ty.kind() { Some(&ty) => match ty.kind() {
ty::Tuple(ref tys) => tys.iter().map(|k| k.expect_ty()).collect(), ty::Tuple(tys) => tys.iter().collect(),
_ => vec![], _ => vec![],
}, },
None => vec![], None => vec![],
}; };
(arg_types.iter().map(|k| k.expect_ty()).collect(), expected_input_tys) (arg_types.iter().collect(), expected_input_tys)
} }
_ => { _ => {
// Otherwise, there's a mismatch, so clear out what we're expecting, and set // Otherwise, there's a mismatch, so clear out what we're expecting, and set
@ -495,12 +495,11 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
expected_input_tys: &[Ty<'tcx>], expected_input_tys: &[Ty<'tcx>],
provided_args: &'tcx [hir::Expr<'tcx>], provided_args: &'tcx [hir::Expr<'tcx>],
) -> Option<FnArgsAsTuple<'_>> { ) -> Option<FnArgsAsTuple<'_>> {
let [expected_arg_type] = &expected_input_tys[..] else { return None }; let [expected_arg_type] = expected_input_tys[..] else { return None };
let ty::Tuple(expected_elems) = self.resolve_vars_if_possible(*expected_arg_type).kind() let &ty::Tuple(expected_types) = self.resolve_vars_if_possible(expected_arg_type).kind()
else { return None }; else { return None };
let expected_types: Vec<_> = expected_elems.iter().map(|k| k.expect_ty()).collect();
let supplied_types: Vec<_> = provided_args.iter().map(|arg| self.check_expr(arg)).collect(); let supplied_types: Vec<_> = provided_args.iter().map(|arg| self.check_expr(arg)).collect();
let all_match = iter::zip(expected_types, supplied_types) let all_match = iter::zip(expected_types, supplied_types)

View file

@ -539,16 +539,16 @@ pub fn check_must_not_suspend_ty<'tcx>(
} }
has_emitted has_emitted
} }
ty::Tuple(_) => { ty::Tuple(fields) => {
let mut has_emitted = false; let mut has_emitted = false;
let comps = match data.expr.map(|e| &e.kind) { let comps = match data.expr.map(|e| &e.kind) {
Some(hir::ExprKind::Tup(comps)) => { Some(hir::ExprKind::Tup(comps)) => {
debug_assert_eq!(comps.len(), ty.tuple_fields().count()); debug_assert_eq!(comps.len(), fields.len());
Some(comps) Some(comps)
} }
_ => None, _ => None,
}; };
for (i, ty) in ty.tuple_fields().enumerate() { for (i, ty) in fields.iter().enumerate() {
let descr_post = &format!(" in tuple element {}", i); let descr_post = &format!(" in tuple element {}", i);
let span = comps.and_then(|c| c.get(i)).map(|e| e.span).unwrap_or(data.source_span); let span = comps.and_then(|c| c.get(i)).map(|e| e.span).unwrap_or(data.source_span);
if check_must_not_suspend_ty( if check_must_not_suspend_ty(

View file

@ -9,7 +9,6 @@ use rustc_hir::pat_util::EnumerateAndAdjustIterator;
use rustc_hir::{HirId, Pat, PatKind}; use rustc_hir::{HirId, Pat, PatKind};
use rustc_infer::infer; use rustc_infer::infer;
use rustc_infer::infer::type_variable::{TypeVariableOrigin, TypeVariableOriginKind}; use rustc_infer::infer::type_variable::{TypeVariableOrigin, TypeVariableOriginKind};
use rustc_middle::ty::subst::GenericArg;
use rustc_middle::ty::{self, Adt, BindingMode, Ty, TypeFoldable}; use rustc_middle::ty::{self, Adt, BindingMode, Ty, TypeFoldable};
use rustc_session::lint::builtin::NON_EXHAUSTIVE_OMITTED_PATTERNS; use rustc_session::lint::builtin::NON_EXHAUSTIVE_OMITTED_PATTERNS;
use rustc_span::hygiene::DesugaringKind; use rustc_span::hygiene::DesugaringKind;
@ -1072,7 +1071,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
(ty::Adt(_, substs), [field], false) => { (ty::Adt(_, substs), [field], false) => {
let field_ty = self.field_ty(pat_span, field, substs); let field_ty = self.field_ty(pat_span, field, substs);
match field_ty.kind() { match field_ty.kind() {
ty::Tuple(_) => field_ty.tuple_fields().count() == subpats.len(), ty::Tuple(fields) => fields.len() == subpats.len(),
_ => false, _ => false,
} }
} }
@ -1183,13 +1182,13 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
let max_len = cmp::max(expected_len, elements.len()); let max_len = cmp::max(expected_len, elements.len());
let element_tys_iter = (0..max_len).map(|_| { let element_tys_iter = (0..max_len).map(|_| {
GenericArg::from(self.next_ty_var( self.next_ty_var(
// FIXME: `MiscVariable` for now -- obtaining the span and name information // FIXME: `MiscVariable` for now -- obtaining the span and name information
// from all tuple elements isn't trivial. // from all tuple elements isn't trivial.
TypeVariableOrigin { kind: TypeVariableOriginKind::TypeInference, span }, TypeVariableOrigin { kind: TypeVariableOriginKind::TypeInference, span },
)) )
}); });
let element_tys = tcx.mk_substs(element_tys_iter); let element_tys = tcx.mk_type_list(element_tys_iter);
let pat_ty = tcx.mk_ty(ty::Tuple(element_tys)); let pat_ty = tcx.mk_ty(ty::Tuple(element_tys));
if let Some(mut err) = self.demand_eqtype_pat_diag(span, expected, pat_ty, ti) { if let Some(mut err) = self.demand_eqtype_pat_diag(span, expected, pat_ty, ti) {
err.emit(); err.emit();
@ -1202,7 +1201,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
tcx.mk_tup(element_tys_iter) tcx.mk_tup(element_tys_iter)
} else { } else {
for (i, elem) in elements.iter().enumerate_and_adjust(max_len, ddpos) { for (i, elem) in elements.iter().enumerate_and_adjust(max_len, ddpos) {
self.check_pat(elem, element_tys[i].expect_ty(), def_bm, ti); self.check_pat(elem, element_tys[i], def_bm, ti);
} }
pat_ty pat_ty
} }

View file

@ -1448,7 +1448,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
) )
} }
ty::Tuple(..) => { ty::Tuple(fields) => {
// Only Field projections can be applied to a tuple. // Only Field projections can be applied to a tuple.
assert!( assert!(
captured_by_move_projs.iter().all(|projs| matches!( captured_by_move_projs.iter().all(|projs| matches!(
@ -1457,7 +1457,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
)) ))
); );
base_path_ty.tuple_fields().enumerate().any(|(i, element_ty)| { fields.iter().enumerate().any(|(i, element_ty)| {
let paths_using_field = captured_by_move_projs let paths_using_field = captured_by_move_projs
.iter() .iter()
.filter_map(|projs| { .filter_map(|projs| {

View file

@ -278,7 +278,7 @@ impl<'a, 'tcx> ConstraintContext<'a, 'tcx> {
ty::Tuple(subtys) => { ty::Tuple(subtys) => {
for subty in subtys { for subty in subtys {
self.add_constraints_from_ty(current, subty.expect_ty(), variance); self.add_constraints_from_ty(current, subty, variance);
} }
} }

View file

@ -1543,7 +1543,7 @@ impl<'tcx> Clean<Type> for Ty<'tcx> {
DynTrait(bounds, lifetime) DynTrait(bounds, lifetime)
} }
ty::Tuple(t) => Tuple(t.iter().map(|t| t.expect_ty().clean(cx)).collect()), ty::Tuple(t) => Tuple(t.iter().map(|t| t.clean(cx)).collect()),
ty::Projection(ref data) => data.clean(cx), ty::Projection(ref data) => data.clean(cx),

View file

@ -109,7 +109,7 @@ fn external_generic_args(
if cx.tcx.fn_trait_kind_from_lang_item(did).is_some() { if cx.tcx.fn_trait_kind_from_lang_item(did).is_some() {
let inputs = match ty_kind.unwrap() { let inputs = match ty_kind.unwrap() {
ty::Tuple(tys) => tys.iter().map(|t| t.expect_ty().clean(cx)).collect(), ty::Tuple(tys) => tys.iter().map(|t| t.clean(cx)).collect(),
_ => return GenericArgs::AngleBracketed { args, bindings: bindings.into() }, _ => return GenericArgs::AngleBracketed { args, bindings: bindings.into() },
}; };
let output = None; let output = None;

View file

@ -193,7 +193,7 @@ fn is_mutable_ty<'tcx>(cx: &LateContext<'tcx>, ty: Ty<'tcx>, span: Span, tys: &m
|| KNOWN_WRAPPER_TYS.iter().any(|path| match_def_path(cx, adt.did, path)) || KNOWN_WRAPPER_TYS.iter().any(|path| match_def_path(cx, adt.did, path))
&& substs.types().any(|ty| is_mutable_ty(cx, ty, span, tys)) && substs.types().any(|ty| is_mutable_ty(cx, ty, span, tys))
}, },
ty::Tuple(substs) => substs.types().any(|ty| is_mutable_ty(cx, ty, span, tys)), ty::Tuple(substs) => substs.iter().any(|ty| is_mutable_ty(cx, ty, span, tys)),
ty::Array(ty, _) | ty::Slice(ty) => is_mutable_ty(cx, ty, span, tys), ty::Array(ty, _) | ty::Slice(ty) => is_mutable_ty(cx, ty, span, tys),
ty::RawPtr(ty::TypeAndMut { ty, mutbl }) | ty::Ref(_, ty, mutbl) => { ty::RawPtr(ty::TypeAndMut { ty, mutbl }) | ty::Ref(_, ty, mutbl) => {
mutbl == hir::Mutability::Mut || is_mutable_ty(cx, ty, span, tys) mutbl == hir::Mutability::Mut || is_mutable_ty(cx, ty, span, tys)

View file

@ -58,8 +58,8 @@ fn type_needs_ordered_drop_inner<'tcx>(cx: &LateContext<'tcx>, ty: Ty<'tcx>, see
// This type doesn't implement drop, so no side effects here. // This type doesn't implement drop, so no side effects here.
// Check if any component type has any. // Check if any component type has any.
match ty.kind() { match ty.kind() {
ty::Tuple(_) => ty.tuple_fields().any(|ty| type_needs_ordered_drop_inner(cx, ty, seen)), ty::Tuple(fields) => fields.iter().any(|ty| type_needs_ordered_drop_inner(cx, ty, seen)),
ty::Array(ty, _) => type_needs_ordered_drop_inner(cx, *ty, seen), &ty::Array(ty, _) => type_needs_ordered_drop_inner(cx, ty, seen),
ty::Adt(adt, subs) => adt ty::Adt(adt, subs) => adt
.all_fields() .all_fields()
.map(|f| f.ty(cx.tcx, subs)) .map(|f| f.ty(cx.tcx, subs))

View file

@ -142,7 +142,7 @@ fn is_interior_mutable_type<'tcx>(cx: &LateContext<'tcx>, ty: Ty<'tcx>, span: Sp
size.try_eval_usize(cx.tcx, cx.param_env).map_or(true, |u| u != 0) size.try_eval_usize(cx.tcx, cx.param_env).map_or(true, |u| u != 0)
&& is_interior_mutable_type(cx, inner_ty, span) && is_interior_mutable_type(cx, inner_ty, span)
}, },
Tuple(..) => ty.tuple_fields().any(|ty| is_interior_mutable_type(cx, ty, span)), Tuple(fields) => fields.iter().any(|ty| is_interior_mutable_type(cx, ty, span)),
Adt(def, substs) => { Adt(def, substs) => {
// Special case for collections in `std` who's impl of `Hash` or `Ord` delegates to // Special case for collections in `std` who's impl of `Hash` or `Ord` delegates to
// that of their type parameters. Note: we don't include `HashSet` and `HashMap` // that of their type parameters. Note: we don't include `HashSet` and `HashMap`

View file

@ -202,8 +202,8 @@ fn ty_allowed_with_raw_pointer_heuristic<'tcx>(cx: &LateContext<'tcx>, ty: Ty<'t
// The type is known to be `!Send` and `!Copy` // The type is known to be `!Send` and `!Copy`
match ty.kind() { match ty.kind() {
ty::Tuple(_) => ty ty::Tuple(fields) => fields
.tuple_fields() .iter()
.all(|ty| ty_allowed_with_raw_pointer_heuristic(cx, ty, send_trait)), .all(|ty| ty_allowed_with_raw_pointer_heuristic(cx, ty, send_trait)),
ty::Array(ty, _) | ty::Slice(ty) => ty_allowed_with_raw_pointer_heuristic(cx, *ty, send_trait), ty::Array(ty, _) | ty::Slice(ty) => ty_allowed_with_raw_pointer_heuristic(cx, *ty, send_trait),
ty::Adt(_, substs) => { ty::Adt(_, substs) => {

View file

@ -2,7 +2,7 @@ use super::TRANSMUTE_UNDEFINED_REPR;
use clippy_utils::diagnostics::span_lint_and_then; use clippy_utils::diagnostics::span_lint_and_then;
use rustc_hir::Expr; use rustc_hir::Expr;
use rustc_lint::LateContext; use rustc_lint::LateContext;
use rustc_middle::ty::subst::{GenericArg, Subst}; use rustc_middle::ty::subst::Subst;
use rustc_middle::ty::{self, Ty, TypeAndMut}; use rustc_middle::ty::{self, Ty, TypeAndMut};
use rustc_span::Span; use rustc_span::Span;
@ -246,11 +246,10 @@ fn reduce_ty<'tcx>(cx: &LateContext<'tcx>, mut ty: Ty<'tcx>) -> ReducedTy<'tcx>
continue; continue;
}, },
ty::Tuple(args) => { ty::Tuple(args) => {
let mut iter = args.iter().map(GenericArg::expect_ty); let Some(sized_ty) = args.iter().find(|&ty| !is_zero_sized_ty(cx, ty)) else {
let Some(sized_ty) = iter.find(|ty| !is_zero_sized_ty(cx, *ty)) else {
return ReducedTy::OrderedFields(ty); return ReducedTy::OrderedFields(ty);
}; };
if iter.all(|ty| is_zero_sized_ty(cx, ty)) { if args.iter().all(|ty| is_zero_sized_ty(cx, ty)) {
ty = sized_ty; ty = sized_ty;
continue; continue;
} }

View file

@ -169,7 +169,7 @@ pub fn is_must_use_ty<'tcx>(cx: &LateContext<'tcx>, ty: Ty<'tcx>) -> bool {
// because we don't want to lint functions returning empty arrays // because we don't want to lint functions returning empty arrays
is_must_use_ty(cx, *ty) is_must_use_ty(cx, *ty)
}, },
ty::Tuple(substs) => substs.types().any(|ty| is_must_use_ty(cx, ty)), ty::Tuple(substs) => substs.iter().any(|ty| is_must_use_ty(cx, ty)),
ty::Opaque(ref def_id, _) => { ty::Opaque(ref def_id, _) => {
for (predicate, _) in cx.tcx.explicit_item_bounds(*def_id) { for (predicate, _) in cx.tcx.explicit_item_bounds(*def_id) {
if let ty::PredicateKind::Trait(trait_predicate) = predicate.kind().skip_binder() { if let ty::PredicateKind::Trait(trait_predicate) = predicate.kind().skip_binder() {
@ -249,11 +249,11 @@ pub fn is_non_aggregate_primitive_type(ty: Ty<'_>) -> bool {
/// Returns `true` if the given type is a primitive (a `bool` or `char`, any integer or /// Returns `true` if the given type is a primitive (a `bool` or `char`, any integer or
/// floating-point number type, a `str`, or an array, slice, or tuple of those types). /// floating-point number type, a `str`, or an array, slice, or tuple of those types).
pub fn is_recursively_primitive_type(ty: Ty<'_>) -> bool { pub fn is_recursively_primitive_type(ty: Ty<'_>) -> bool {
match ty.kind() { match *ty.kind() {
ty::Bool | ty::Char | ty::Int(_) | ty::Uint(_) | ty::Float(_) | ty::Str => true, ty::Bool | ty::Char | ty::Int(_) | ty::Uint(_) | ty::Float(_) | ty::Str => true,
ty::Ref(_, inner, _) if *inner.kind() == ty::Str => true, ty::Ref(_, inner, _) if *inner.kind() == ty::Str => true,
ty::Array(inner_type, _) | ty::Slice(inner_type) => is_recursively_primitive_type(*inner_type), ty::Array(inner_type, _) | ty::Slice(inner_type) => is_recursively_primitive_type(inner_type),
ty::Tuple(inner_types) => inner_types.types().all(is_recursively_primitive_type), ty::Tuple(inner_types) => inner_types.iter().all(is_recursively_primitive_type),
_ => false, _ => false,
} }
} }
@ -393,9 +393,9 @@ pub fn same_type_and_consts<'tcx>(a: Ty<'tcx>, b: Ty<'tcx>) -> bool {
/// Checks if a given type looks safe to be uninitialized. /// Checks if a given type looks safe to be uninitialized.
pub fn is_uninit_value_valid_for_ty(cx: &LateContext<'_>, ty: Ty<'_>) -> bool { pub fn is_uninit_value_valid_for_ty(cx: &LateContext<'_>, ty: Ty<'_>) -> bool {
match ty.kind() { match *ty.kind() {
ty::Array(component, _) => is_uninit_value_valid_for_ty(cx, *component), ty::Array(component, _) => is_uninit_value_valid_for_ty(cx, component),
ty::Tuple(types) => types.types().all(|ty| is_uninit_value_valid_for_ty(cx, ty)), ty::Tuple(types) => types.iter().all(|ty| is_uninit_value_valid_for_ty(cx, ty)),
ty::Adt(adt, _) => cx.tcx.lang_items().maybe_uninit() == Some(adt.did), ty::Adt(adt, _) => cx.tcx.lang_items().maybe_uninit() == Some(adt.did),
_ => false, _ => false,
} }
@ -426,8 +426,8 @@ impl<'tcx> ExprFnSig<'tcx> {
pub fn input(self, i: usize) -> Binder<'tcx, Ty<'tcx>> { pub fn input(self, i: usize) -> Binder<'tcx, Ty<'tcx>> {
match self { match self {
Self::Sig(sig) => sig.input(i), Self::Sig(sig) => sig.input(i),
Self::Closure(sig) => sig.input(0).map_bound(|ty| ty.tuple_element_ty(i).unwrap()), Self::Closure(sig) => sig.input(0).map_bound(|ty| ty.tuple_fields()[i]),
Self::Trait(inputs, _) => inputs.map_bound(|ty| ty.tuple_element_ty(i).unwrap()), Self::Trait(inputs, _) => inputs.map_bound(|ty| ty.tuple_fields()[i]),
} }
} }