iterate List by value

This commit is contained in:
Bastian Kauschke 2020-05-23 11:49:24 +02:00
parent 647ae50ce6
commit da57cedd21
40 changed files with 82 additions and 72 deletions

View file

@ -48,7 +48,7 @@ pub fn push_debuginfo_type_name<'tcx>(
}
ty::Tuple(component_types) => {
output.push('(');
for &component_type in component_types {
for component_type in component_types {
push_debuginfo_type_name(tcx, component_type.expect_ty(), true, output, visited);
output.push_str(", ");
}

View file

@ -87,7 +87,7 @@ impl<'cx, 'tcx> InferCtxt<'cx, 'tcx> {
) -> CanonicalVarValues<'tcx> {
let var_values: IndexVec<BoundVar, GenericArg<'tcx>> = variables
.iter()
.map(|info| self.instantiate_canonical_var(span, *info, &universe_map))
.map(|info| self.instantiate_canonical_var(span, info, &universe_map))
.collect();
CanonicalVarValues { var_values }

View file

@ -464,12 +464,12 @@ impl<'cx, 'tcx> InferCtxt<'cx, 'tcx> {
if info.is_existential() {
match opt_values[BoundVar::new(index)] {
Some(k) => k,
None => self.instantiate_canonical_var(cause.span, *info, |u| {
None => self.instantiate_canonical_var(cause.span, info, |u| {
universe_map[u.as_usize()]
}),
}
} else {
self.instantiate_canonical_var(cause.span, *info, |u| {
self.instantiate_canonical_var(cause.span, info, |u| {
universe_map[u.as_usize()]
})
}

View file

@ -50,7 +50,7 @@ impl<'cx, 'tcx> VerifyBoundCx<'cx, 'tcx> {
// for further background and discussion.
let mut bounds = substs
.iter()
.filter_map(|&child| match child.unpack() {
.filter_map(|child| match child.unpack() {
GenericArgKind::Type(ty) => Some(self.type_bound(ty)),
GenericArgKind::Lifetime(_) => None,
GenericArgKind::Const(_) => Some(self.recursive_bound(child)),
@ -223,8 +223,7 @@ impl<'cx, 'tcx> VerifyBoundCx<'cx, 'tcx> {
// like `T` and `T::Item`. It may not work as well for things
// like `<T as Foo<'a>>::Item`.
let c_b = self.param_env.caller_bounds;
let param_bounds =
self.collect_outlives_from_predicate_list(&compare_ty, c_b.into_iter().copied());
let param_bounds = self.collect_outlives_from_predicate_list(&compare_ty, c_b.into_iter());
// Next, collect regions we scraped from the well-formedness
// constraints in the fn signature. To do that, we walk the list

View file

@ -2077,10 +2077,10 @@ impl Debug for Place<'_> {
ProjectionElem::ConstantIndex { offset, min_length, from_end: true } => {
write!(fmt, "[-{:?} of {:?}]", offset, min_length)?;
}
ProjectionElem::Subslice { from, to, from_end: true } if *to == 0 => {
ProjectionElem::Subslice { from, to, from_end: true } if to == 0 => {
write!(fmt, "[{:?}:]", from)?;
}
ProjectionElem::Subslice { from, to, from_end: true } if *from == 0 => {
ProjectionElem::Subslice { from, to, from_end: true } if from == 0 => {
write!(fmt, "[:-{:?}]", to)?;
}
ProjectionElem::Subslice { from, to, from_end: true } => {

View file

@ -129,7 +129,7 @@ impl FlagComputation {
&ty::Dynamic(ref obj, r) => {
let mut computation = FlagComputation::new();
for predicate in obj.skip_binder().iter() {
match *predicate {
match predicate {
ty::ExistentialPredicate::Trait(tr) => computation.add_substs(tr.substs),
ty::ExistentialPredicate::Projection(p) => {
let mut proj_computation = FlagComputation::new();

View file

@ -5,6 +5,7 @@ use rustc_serialize::{Encodable, Encoder};
use std::cmp::{self, Ordering};
use std::fmt;
use std::hash::{Hash, Hasher};
use std::iter;
use std::mem;
use std::ops::Deref;
use std::ptr;
@ -21,6 +22,10 @@ extern "C" {
/// the same contents can exist in the same context.
/// This means we can use pointer for both
/// equality comparisons and hashing.
///
/// Unlike slices, The types contained in `List` are expected to be `Copy`
/// and iterating over a `List` returns `T` instead of a reference.
///
/// Note: `Slice` was already taken by the `Ty`.
#[repr(C)]
pub struct List<T> {
@ -61,6 +66,15 @@ impl<T: Copy> List<T> {
result
}
}
// If this method didn't exist, we would use `slice.iter` due to
// deref coercion.
//
// This would be weird, as `self.into_iter` iterates over `T` directly.
#[inline(always)]
pub fn iter(&self) -> <&'_ List<T> as IntoIterator>::IntoIter {
self.into_iter()
}
}
impl<T: fmt::Debug> fmt::Debug for List<T> {
@ -128,12 +142,12 @@ impl<T> AsRef<[T]> for List<T> {
}
}
impl<'a, T> IntoIterator for &'a List<T> {
type Item = &'a T;
type IntoIter = <&'a [T] as IntoIterator>::IntoIter;
impl<'a, T: Copy> IntoIterator for &'a List<T> {
type Item = T;
type IntoIter = iter::Copied<<&'a [T] as IntoIterator>::IntoIter>;
#[inline(always)]
fn into_iter(self) -> Self::IntoIter {
self[..].iter()
self[..].iter().copied()
}
}

View file

@ -70,7 +70,7 @@ fn compute_components(tcx: TyCtxt<'tcx>, ty: Ty<'tcx>, out: &mut SmallVec<[Compo
// consistent with previous (accidental) behavior.
// See https://github.com/rust-lang/rust/issues/70917
// for further background and discussion.
for &child in substs {
for child in substs {
match child.unpack() {
GenericArgKind::Type(ty) => {
compute_components(tcx, ty, out);

View file

@ -47,7 +47,7 @@ impl DefPathBasedNames<'tcx> {
}
ty::Tuple(component_types) => {
output.push('(');
for &component_type in component_types {
for component_type in component_types {
self.push_type_name(component_type.expect_ty(), output, debug);
output.push_str(", ");
}

View file

@ -495,7 +495,7 @@ pub trait PrettyPrinter<'tcx>:
}
ty::Never => p!(write("!")),
ty::Tuple(ref tys) => {
p!(write("("), comma_sep(tys.iter().copied()));
p!(write("("), comma_sep(tys.iter()));
if tys.len() == 1 {
p!(write(","));
}
@ -560,7 +560,7 @@ pub trait PrettyPrinter<'tcx>:
// FIXME(eddyb) print this with `print_def_path`.
if !substs.is_empty() {
p!(write("::"));
p!(generic_delimiters(|cx| cx.comma_sep(substs.iter().copied())));
p!(generic_delimiters(|cx| cx.comma_sep(substs.iter())));
}
return Ok(self);
}
@ -1935,7 +1935,7 @@ define_print_and_forward_display! {
(self, cx):
&'tcx ty::List<Ty<'tcx>> {
p!(write("{{"), comma_sep(self.iter().copied()), write("}}"))
p!(write("{{"), comma_sep(self.iter()), write("}}"))
}
ty::TypeAndMut<'tcx> {

View file

@ -143,7 +143,7 @@ pub fn relate_substs<R: TypeRelation<'tcx>>(
let params = a_subst.iter().zip(b_subst).enumerate().map(|(i, (a, b))| {
let variance = variances.map_or(ty::Invariant, |v| v[i]);
relation.relate_with_variance(variance, a, b)
relation.relate_with_variance(variance, &a, &b)
});
Ok(tcx.mk_substs(params)?)
@ -319,7 +319,7 @@ impl<'tcx> Relate<'tcx> for GeneratorWitness<'tcx> {
) -> RelateResult<'tcx, GeneratorWitness<'tcx>> {
assert_eq!(a.0.len(), b.0.len());
let tcx = relation.tcx();
let types = tcx.mk_type_list(a.0.iter().zip(b.0).map(|(a, b)| relation.relate(a, b)))?;
let types = tcx.mk_type_list(a.0.iter().zip(b.0).map(|(a, b)| relation.relate(&a, &b)))?;
Ok(GeneratorWitness(types))
}
}
@ -633,7 +633,7 @@ impl<'tcx> Relate<'tcx> for &'tcx ty::List<ty::ExistentialPredicate<'tcx>> {
let tcx = relation.tcx();
let v = a.iter().zip(b.iter()).map(|(ep_a, ep_b)| {
use crate::ty::ExistentialPredicate::*;
match (*ep_a, *ep_b) {
match (ep_a, ep_b) {
(Trait(ref a), Trait(ref b)) => Ok(Trait(relation.relate(a, b)?)),
(Projection(ref a), Projection(ref b)) => Ok(Projection(relation.relate(a, b)?)),
(AutoTrait(ref a), AutoTrait(ref b)) if a == b => Ok(AutoTrait(*a)),

View file

@ -1093,7 +1093,7 @@ where
// Look for the first element that changed
if let Some((i, new_t)) = iter.by_ref().enumerate().find_map(|(i, t)| {
let new_t = t.fold_with(folder);
if new_t == *t { None } else { Some((i, new_t)) }
if new_t == t { None } else { Some((i, new_t)) }
}) {
// An element changed, prepare to intern the resulting list
let mut new_list = SmallVec::<[_; 8]>::with_capacity(list.len());

View file

@ -670,7 +670,7 @@ impl<'tcx> List<ExistentialPredicate<'tcx>> {
pub fn projection_bounds<'a>(
&'a self,
) -> impl Iterator<Item = ExistentialProjection<'tcx>> + 'a {
self.iter().filter_map(|predicate| match *predicate {
self.iter().filter_map(|predicate| match predicate {
ExistentialPredicate::Projection(projection) => Some(projection),
_ => None,
})
@ -678,7 +678,7 @@ impl<'tcx> List<ExistentialPredicate<'tcx>> {
#[inline]
pub fn auto_traits<'a>(&'a self) -> impl Iterator<Item = DefId> + 'a {
self.iter().filter_map(|predicate| match *predicate {
self.iter().filter_map(|predicate| match predicate {
ExistentialPredicate::AutoTrait(did) => Some(did),
_ => None,
})
@ -709,7 +709,7 @@ impl<'tcx> Binder<&'tcx List<ExistentialPredicate<'tcx>>> {
pub fn iter<'a>(
&'a self,
) -> impl DoubleEndedIterator<Item = Binder<ExistentialPredicate<'tcx>>> + 'tcx {
self.skip_binder().iter().cloned().map(Binder::bind)
self.skip_binder().iter().map(Binder::bind)
}
}

View file

@ -340,11 +340,11 @@ impl<'a, 'tcx> InternalSubsts<'tcx> {
target_substs: SubstsRef<'tcx>,
) -> SubstsRef<'tcx> {
let defs = tcx.generics_of(source_ancestor);
tcx.mk_substs(target_substs.iter().chain(&self[defs.params.len()..]).cloned())
tcx.mk_substs(target_substs.iter().chain(self.iter().skip(defs.params.len())))
}
pub fn truncate_to(&self, tcx: TyCtxt<'tcx>, generics: &ty::Generics) -> SubstsRef<'tcx> {
tcx.mk_substs(self.iter().take(generics.count()).cloned())
tcx.mk_substs(self.iter().take(generics.count()))
}
}

View file

@ -413,7 +413,7 @@ impl<'tcx> TyCtxt<'tcx> {
let result = item_substs
.iter()
.zip(impl_substs.iter())
.filter(|&(_, &k)| {
.filter(|&(_, k)| {
match k.unpack() {
GenericArgKind::Lifetime(&ty::RegionKind::ReEarlyBound(ref ebr)) => {
!impl_generics.region_param(ebr, self).pure_wrt_drop
@ -433,7 +433,7 @@ impl<'tcx> TyCtxt<'tcx> {
}
}
})
.map(|(&item_param, _)| item_param)
.map(|(item_param, _)| item_param)
.collect();
debug!("destructor_constraint({:?}) = {:?}", def.did, result);
result

View file

@ -128,7 +128,7 @@ fn push_inner<'tcx>(stack: &mut TypeWalkerStack<'tcx>, parent: GenericArg<'tcx>)
stack.push(lt.into());
}
ty::Projection(data) => {
stack.extend(data.substs.iter().copied().rev());
stack.extend(data.substs.iter().rev());
}
ty::Dynamic(obj, lt) => {
stack.push(lt.into());
@ -143,7 +143,7 @@ fn push_inner<'tcx>(stack: &mut TypeWalkerStack<'tcx>, parent: GenericArg<'tcx>)
}
};
substs.iter().copied().rev().chain(opt_ty.map(|ty| ty.into()))
substs.iter().rev().chain(opt_ty.map(|ty| ty.into()))
}));
}
ty::Adt(_, substs)
@ -152,14 +152,14 @@ fn push_inner<'tcx>(stack: &mut TypeWalkerStack<'tcx>, parent: GenericArg<'tcx>)
| ty::Generator(_, substs, _)
| ty::Tuple(substs)
| ty::FnDef(_, substs) => {
stack.extend(substs.iter().copied().rev());
stack.extend(substs.iter().rev());
}
ty::GeneratorWitness(ts) => {
stack.extend(ts.skip_binder().iter().cloned().rev().map(|ty| ty.into()));
stack.extend(ts.skip_binder().iter().rev().map(|ty| ty.into()));
}
ty::FnPtr(sig) => {
stack.push(sig.skip_binder().output().into());
stack.extend(sig.skip_binder().inputs().iter().cloned().rev().map(|ty| ty.into()));
stack.extend(sig.skip_binder().inputs().iter().copied().rev().map(|ty| ty.into()));
}
},
GenericArgKind::Lifetime(_) => {}
@ -174,7 +174,7 @@ fn push_inner<'tcx>(stack: &mut TypeWalkerStack<'tcx>, parent: GenericArg<'tcx>)
| ty::ConstKind::Error => {}
ty::ConstKind::Unevaluated(_, substs, _) => {
stack.extend(substs.iter().copied().rev());
stack.extend(substs.iter().rev());
}
}
}

View file

@ -47,7 +47,7 @@ impl<'tcx> PlaceExt<'tcx> for Place<'tcx> {
for (i, elem) in self.projection.iter().enumerate() {
let proj_base = &self.projection[..i];
if *elem == ProjectionElem::Deref {
if elem == ProjectionElem::Deref {
let ty = Place::ty_from(self.local, proj_base, body, tcx).ty;
match ty.kind {
ty::Ref(_, _, hir::Mutability::Not) if i == 0 => {

View file

@ -163,8 +163,8 @@ fn place_components_conflict<'tcx>(
body,
borrow_local,
borrow_proj_base,
borrow_c,
access_c,
&borrow_c,
&access_c,
bias,
) {
Overlap::Arbitrary => {
@ -420,24 +420,24 @@ fn place_projection_conflict<'tcx>(
}
}
(
ProjectionElem::ConstantIndex {
&ProjectionElem::ConstantIndex {
offset: offset_from_begin,
min_length: min_length1,
from_end: false,
},
ProjectionElem::ConstantIndex {
&ProjectionElem::ConstantIndex {
offset: offset_from_end,
min_length: min_length2,
from_end: true,
},
)
| (
ProjectionElem::ConstantIndex {
&ProjectionElem::ConstantIndex {
offset: offset_from_end,
min_length: min_length1,
from_end: true,
},
ProjectionElem::ConstantIndex {
&ProjectionElem::ConstantIndex {
offset: offset_from_begin,
min_length: min_length2,
from_end: false,
@ -449,7 +449,7 @@ fn place_projection_conflict<'tcx>(
// element (like -1 in Python) and `min_length` the first.
// Therefore, `min_length - offset_from_end` gives the minimal possible
// offset from the beginning
if *offset_from_begin >= *min_length - *offset_from_end {
if offset_from_begin >= min_length - offset_from_end {
debug!("place_element_conflict: DISJOINT-OR-EQ-ARRAY-CONSTANT-INDEX-FE");
Overlap::EqualOrDisjoint
} else {

View file

@ -497,7 +497,7 @@ impl<'a, 'b, 'tcx> TypeVerifier<'a, 'b, 'tcx> {
return PlaceTy::from_ty(self.tcx().types.err);
}
}
place_ty = self.sanitize_projection(place_ty, elem, place, location)
place_ty = self.sanitize_projection(place_ty, &elem, place, location)
}
if let PlaceContext::NonMutatingUse(NonMutatingUseContext::Copy) = context {

View file

@ -158,7 +158,7 @@ impl<'b, 'a, 'tcx> Gatherer<'b, 'a, 'tcx> {
};
if union_path.is_none() {
base = self.add_move_path(base, elem, |tcx| Place {
base = self.add_move_path(base, &elem, |tcx| Place {
local: place.local,
projection: tcx.intern_place_elems(&place.projection[..i + 1]),
});

View file

@ -466,7 +466,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
let op = place
.projection
.iter()
.try_fold(base_op, |op, elem| self.operand_projection(op, elem))?;
.try_fold(base_op, |op, elem| self.operand_projection(op, &elem))?;
trace!("eval_place_to_op: got {:?}", *op);
Ok(op)

View file

@ -634,7 +634,7 @@ where
};
for elem in place.projection.iter() {
place_ty = self.place_projection(place_ty, elem)?
place_ty = self.place_projection(place_ty, &elem)?
}
self.dump_place(place_ty.place);

View file

@ -449,7 +449,7 @@ fn check_type_length_limit<'tcx>(tcx: TyCtxt<'tcx>, instance: Instance<'tcx>) {
let type_length = instance
.substs
.iter()
.flat_map(|&arg| arg.walk())
.flat_map(|arg| arg.walk())
.filter(|arg| match arg.unpack() {
GenericArgKind::Type(_) | GenericArgKind::Const(_) => true,
GenericArgKind::Lifetime(_) => false,

View file

@ -132,7 +132,7 @@ impl<'tcx> MutVisitor<'tcx> for DerefArgVisitor<'tcx> {
for elem in place.projection.iter() {
if let PlaceElem::Index(local) = elem {
assert_ne!(*local, SELF_ARG);
assert_ne!(local, SELF_ARG);
}
}
}
@ -171,7 +171,7 @@ impl<'tcx> MutVisitor<'tcx> for PinArgVisitor<'tcx> {
for elem in place.projection.iter() {
if let PlaceElem::Index(local) = elem {
assert_ne!(*local, SELF_ARG);
assert_ne!(local, SELF_ARG);
}
}
}

View file

@ -1042,7 +1042,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
matched_candidates.iter().flat_map(|candidate| &candidate.bindings)
{
if let Some(i) =
source.projection.iter().rposition(|elem| *elem == ProjectionElem::Deref)
source.projection.iter().rposition(|elem| elem == ProjectionElem::Deref)
{
let proj_base = &source.projection[..i];

View file

@ -176,7 +176,7 @@ where
// All traits in the list are considered the "primary" part of the type
// and are visited by shallow visitors.
for predicate in *predicates.skip_binder() {
let trait_ref = match *predicate {
let trait_ref = match predicate {
ty::ExistentialPredicate::Trait(trait_ref) => trait_ref,
ty::ExistentialPredicate::Projection(proj) => proj.trait_ref(tcx),
ty::ExistentialPredicate::AutoTrait(def_id) => {

View file

@ -477,7 +477,7 @@ impl Printer<'tcx> for SymbolMangler<'tcx> {
predicates: &'tcx ty::List<ty::ExistentialPredicate<'tcx>>,
) -> Result<Self::DynExistential, Self::Error> {
for predicate in predicates {
match *predicate {
match predicate {
ty::ExistentialPredicate::Trait(trait_ref) => {
// Use a type that can't appear in defaults of type parameters.
let dummy_self = self.tcx.mk_ty_infer(ty::FreshTy(0));

View file

@ -647,7 +647,7 @@ impl<'a, 'tcx> InferCtxtExt<'tcx> for InferCtxt<'a, 'tcx> {
// shifting.
let id_substs = InternalSubsts::identity_for_item(self.tcx, def_id);
let map: FxHashMap<GenericArg<'tcx>, GenericArg<'tcx>> =
substs.iter().enumerate().map(|(index, subst)| (*subst, id_substs[index])).collect();
substs.iter().enumerate().map(|(index, subst)| (subst, id_substs[index])).collect();
// Convert the type from the function into a type valid outside
// the function, by replacing invalid regions with 'static,
@ -891,7 +891,7 @@ impl TypeFolder<'tcx> for ReverseMapper<'tcx> {
// during codegen.
let generics = self.tcx.generics_of(def_id);
let substs = self.tcx.mk_substs(substs.iter().enumerate().map(|(index, &kind)| {
let substs = self.tcx.mk_substs(substs.iter().enumerate().map(|(index, kind)| {
if index < generics.parent_count {
// Accommodate missing regions in the parent kinds...
self.fold_kind_mapping_missing_regions_to_empty(kind)
@ -906,7 +906,7 @@ impl TypeFolder<'tcx> for ReverseMapper<'tcx> {
ty::Generator(def_id, substs, movability) => {
let generics = self.tcx.generics_of(def_id);
let substs = self.tcx.mk_substs(substs.iter().enumerate().map(|(index, &kind)| {
let substs = self.tcx.mk_substs(substs.iter().enumerate().map(|(index, kind)| {
if index < generics.parent_count {
// Accommodate missing regions in the parent kinds...
self.fold_kind_mapping_missing_regions_to_empty(kind)

View file

@ -281,9 +281,8 @@ impl AutoTraitFinder<'tcx> {
},
}));
let computed_preds = param_env.caller_bounds.iter().cloned();
let mut user_computed_preds: FxHashSet<_> =
user_env.caller_bounds.iter().cloned().collect();
let computed_preds = param_env.caller_bounds.iter();
let mut user_computed_preds: FxHashSet<_> = user_env.caller_bounds.iter().collect();
let mut new_env = param_env;
let dummy_cause = ObligationCause::dummy();

View file

@ -87,7 +87,7 @@ fn environment<'tcx>(
NodeKind::TraitImpl => {
let trait_ref = tcx.impl_trait_ref(def_id).expect("not an impl");
inputs.extend(trait_ref.substs.iter().flat_map(|&arg| arg.walk()));
inputs.extend(trait_ref.substs.iter().flat_map(|arg| arg.walk()));
}
// In an inherent impl, we assume that the receiver type and all its

View file

@ -302,7 +302,7 @@ pub fn normalize_param_env_or_error<'tcx>(
);
let mut predicates: Vec<_> =
util::elaborate_predicates(tcx, unnormalized_env.caller_bounds.into_iter().cloned())
util::elaborate_predicates(tcx, unnormalized_env.caller_bounds.into_iter())
.map(|obligation| obligation.predicate)
.collect();

View file

@ -658,7 +658,6 @@ fn receiver_is_dispatchable<'tcx>(
let caller_bounds: Vec<Predicate<'tcx>> = param_env
.caller_bounds
.iter()
.cloned()
.chain(iter::once(unsize_predicate))
.chain(iter::once(trait_predicate))
.collect();

View file

@ -872,7 +872,7 @@ fn assemble_candidates_from_param_env<'cx, 'tcx>(
obligation_trait_ref,
candidate_set,
ProjectionTyCandidate::ParamEnv,
obligation.param_env.caller_bounds.iter().cloned(),
obligation.param_env.caller_bounds.iter(),
);
}

View file

@ -3154,7 +3154,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
// Check that the source struct with the target's
// unsizing parameters is equal to the target.
let substs = tcx.mk_substs(substs_a.iter().enumerate().map(|(i, &k)| {
let substs = tcx.mk_substs(substs_a.iter().enumerate().map(|(i, k)| {
if unsizing_params.contains(i as u32) { substs_b[i] } else { k }
}));
let new_struct = tcx.mk_adt(def, substs);

View file

@ -11,7 +11,7 @@ type NeedsDropResult<T> = Result<T, AlwaysRequiresDrop>;
fn needs_drop_raw<'tcx>(tcx: TyCtxt<'tcx>, query: ty::ParamEnvAnd<'tcx, Ty<'tcx>>) -> bool {
let adt_fields =
move |adt_def: &ty::AdtDef| tcx.adt_drop_tys(adt_def.did).map(|tys| tys.iter().copied());
move |adt_def: &ty::AdtDef| tcx.adt_drop_tys(adt_def.did).map(|tys| tys.iter());
// If we don't know a type doesn't need drop, for example if it's a type
// parameter without a `Copy` bound, then we conservatively return that it
// needs drop.

View file

@ -2769,7 +2769,7 @@ impl<'a, 'tcx> AstConv<'tcx> for FnCtxt<'a, 'tcx> {
ty::GenericPredicates {
parent: None,
predicates: tcx.arena.alloc_from_iter(self.param_env.caller_bounds.iter().filter_map(
|&predicate| match predicate.kind() {
|predicate| match predicate.kind() {
ty::PredicateKind::Trait(ref data, _)
if data.skip_binder().self_ty().is_param(index) =>
{

View file

@ -900,7 +900,7 @@ fn check_opaque_types<'fcx, 'tcx>(
if may_define_opaque_type(tcx, fn_def_id, opaque_hir_id) {
trace!("check_opaque_types: may define, generics={:#?}", generics);
let mut seen_params: FxHashMap<_, Vec<_>> = FxHashMap::default();
for (i, &arg) in substs.iter().enumerate() {
for (i, arg) in substs.iter().enumerate() {
let arg_is_param = match arg.unpack() {
GenericArgKind::Type(ty) => matches!(ty.kind, ty::Param(_)),

View file

@ -229,7 +229,7 @@ fn unconstrained_parent_impl_substs<'tcx>(
.iter()
.enumerate()
.filter(|&(idx, _)| !constrained_params.contains(&(idx as u32)))
.map(|(_, arg)| *arg)
.map(|(_, arg)| arg)
.collect()
}

View file

@ -291,7 +291,7 @@ impl<'a, 'tcx> ConstraintContext<'a, 'tcx> {
}
ty::Tuple(subtys) => {
for &subty in subtys {
for subty in subtys {
self.add_constraints_from_ty(current, subty.expect_ty(), variance);
}
}

View file

@ -111,7 +111,7 @@ impl<'a, 'tcx> LateLintPass<'a, 'tcx> for NeedlessPassByValue {
let fn_def_id = cx.tcx.hir().local_def_id(hir_id);
let preds = traits::elaborate_predicates(cx.tcx, cx.param_env.caller_bounds.iter().copied())
let preds = traits::elaborate_predicates(cx.tcx, cx.param_env.caller_bounds.iter())
.filter(|p| !p.is_global())
.filter_map(|obligation| {
if let ty::PredicateKind::Trait(poly_trait_ref, _) = obligation.predicate.kind() {
@ -179,7 +179,6 @@ impl<'a, 'tcx> LateLintPass<'a, 'tcx> for NeedlessPassByValue {
.substs
.iter()
.skip(1)
.cloned()
.collect::<Vec<_>>();
implements_trait(cx, ty_empty_region, t.def_id(), ty_params)
})