commit
2a979f8ba0
209 changed files with 3854 additions and 1974 deletions
|
@ -4180,6 +4180,7 @@ dependencies = [
|
|||
"rustc_hir_analysis",
|
||||
"rustc_hir_typeck",
|
||||
"rustc_incremental",
|
||||
"rustc_index",
|
||||
"rustc_lint",
|
||||
"rustc_macros",
|
||||
"rustc_metadata",
|
||||
|
|
|
@ -13,7 +13,7 @@ use rustc_hir::def_id::{LocalDefId, CRATE_DEF_ID};
|
|||
use rustc_hir::PredicateOrigin;
|
||||
use rustc_index::vec::{Idx, IndexVec};
|
||||
use rustc_middle::ty::{DefIdTree, ResolverAstLowering, TyCtxt};
|
||||
use rustc_span::lev_distance::find_best_match_for_name;
|
||||
use rustc_span::edit_distance::find_best_match_for_name;
|
||||
use rustc_span::source_map::DesugaringKind;
|
||||
use rustc_span::symbol::{kw, sym, Ident};
|
||||
use rustc_span::{Span, Symbol};
|
||||
|
|
|
@ -534,7 +534,7 @@ impl<'a, 'b, 'tcx> TypeVerifier<'a, 'b, 'tcx> {
|
|||
return PlaceTy::from_ty(self.tcx().ty_error());
|
||||
}
|
||||
}
|
||||
place_ty = self.sanitize_projection(place_ty, elem, place, location);
|
||||
place_ty = self.sanitize_projection(place_ty, elem, place, location, context);
|
||||
}
|
||||
|
||||
if let PlaceContext::NonMutatingUse(NonMutatingUseContext::Copy) = context {
|
||||
|
@ -630,12 +630,14 @@ impl<'a, 'b, 'tcx> TypeVerifier<'a, 'b, 'tcx> {
|
|||
}
|
||||
}
|
||||
|
||||
#[instrument(skip(self), level = "debug")]
|
||||
fn sanitize_projection(
|
||||
&mut self,
|
||||
base: PlaceTy<'tcx>,
|
||||
pi: PlaceElem<'tcx>,
|
||||
place: &Place<'tcx>,
|
||||
location: Location,
|
||||
context: PlaceContext,
|
||||
) -> PlaceTy<'tcx> {
|
||||
debug!("sanitize_projection: {:?} {:?} {:?}", base, pi, place);
|
||||
let tcx = self.tcx();
|
||||
|
@ -713,8 +715,11 @@ impl<'a, 'b, 'tcx> TypeVerifier<'a, 'b, 'tcx> {
|
|||
match self.field_ty(place, base, field, location) {
|
||||
Ok(ty) => {
|
||||
let ty = self.cx.normalize(ty, location);
|
||||
if let Err(terr) = self.cx.eq_types(
|
||||
debug!(?fty, ?ty);
|
||||
|
||||
if let Err(terr) = self.cx.relate_types(
|
||||
ty,
|
||||
self.get_ambient_variance(context),
|
||||
fty,
|
||||
location.to_locations(),
|
||||
ConstraintCategory::Boring,
|
||||
|
@ -743,9 +748,10 @@ impl<'a, 'b, 'tcx> TypeVerifier<'a, 'b, 'tcx> {
|
|||
let ty = self.sanitize_type(place, ty);
|
||||
let ty = self.cx.normalize(ty, location);
|
||||
self.cx
|
||||
.eq_types(
|
||||
base.ty,
|
||||
.relate_types(
|
||||
ty,
|
||||
self.get_ambient_variance(context),
|
||||
base.ty,
|
||||
location.to_locations(),
|
||||
ConstraintCategory::TypeAnnotation,
|
||||
)
|
||||
|
@ -760,6 +766,21 @@ impl<'a, 'b, 'tcx> TypeVerifier<'a, 'b, 'tcx> {
|
|||
self.tcx().ty_error()
|
||||
}
|
||||
|
||||
fn get_ambient_variance(&self, context: PlaceContext) -> ty::Variance {
|
||||
use rustc_middle::mir::visit::NonMutatingUseContext::*;
|
||||
use rustc_middle::mir::visit::NonUseContext::*;
|
||||
|
||||
match context {
|
||||
PlaceContext::MutatingUse(_) => ty::Invariant,
|
||||
PlaceContext::NonUse(StorageDead | StorageLive | VarDebugInfo) => ty::Invariant,
|
||||
PlaceContext::NonMutatingUse(
|
||||
Inspect | Copy | Move | SharedBorrow | ShallowBorrow | UniqueBorrow | AddressOf
|
||||
| Projection,
|
||||
) => ty::Covariant,
|
||||
PlaceContext::NonUse(AscribeUserTy) => ty::Covariant,
|
||||
}
|
||||
}
|
||||
|
||||
fn field_ty(
|
||||
&mut self,
|
||||
parent: &dyn fmt::Debug,
|
||||
|
|
|
@ -33,6 +33,7 @@ use rustc_target::spec::{RelocModel, Target};
|
|||
/// <dt>dylib</dt>
|
||||
/// <dd>The metadata can be found in the `.rustc` section of the shared library.</dd>
|
||||
/// </dl>
|
||||
#[derive(Debug)]
|
||||
pub struct DefaultMetadataLoader;
|
||||
|
||||
fn load_metadata_with(
|
||||
|
|
|
@ -312,6 +312,8 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
|
|||
}
|
||||
}
|
||||
|
||||
/// `src` is a *pointer to* a `source_ty`, and in `dest` we should store a pointer to th same
|
||||
/// data at type `cast_ty`.
|
||||
fn unsize_into_ptr(
|
||||
&mut self,
|
||||
src: &OpTy<'tcx, M::Provenance>,
|
||||
|
@ -335,7 +337,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
|
|||
);
|
||||
self.write_immediate(val, dest)
|
||||
}
|
||||
(ty::Dynamic(data_a, ..), ty::Dynamic(data_b, ..)) => {
|
||||
(ty::Dynamic(data_a, _, ty::Dyn), ty::Dynamic(data_b, _, ty::Dyn)) => {
|
||||
let val = self.read_immediate(src)?;
|
||||
if data_a.principal() == data_b.principal() {
|
||||
// A NOP cast that doesn't actually change anything, should be allowed even with mismatching vtables.
|
||||
|
@ -359,7 +361,12 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
|
|||
}
|
||||
|
||||
_ => {
|
||||
span_bug!(self.cur_span(), "invalid unsizing {:?} -> {:?}", src.layout.ty, cast_ty)
|
||||
span_bug!(
|
||||
self.cur_span(),
|
||||
"invalid pointer unsizing {:?} -> {:?}",
|
||||
src.layout.ty,
|
||||
cast_ty
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -632,7 +632,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
|
|||
}
|
||||
Ok(Some((size, align)))
|
||||
}
|
||||
ty::Dynamic(..) => {
|
||||
ty::Dynamic(_, _, ty::Dyn) => {
|
||||
let vtable = metadata.unwrap_meta().to_pointer(self)?;
|
||||
// Read size and align from vtable (already checks size).
|
||||
Ok(Some(self.get_vtable_size_and_align(vtable)?))
|
||||
|
|
|
@ -242,7 +242,7 @@ impl<'rt, 'mir, 'tcx: 'mir, M: CompileTimeMachine<'mir, 'tcx, const_eval::Memory
|
|||
let mplace = self.ecx.ref_to_mplace(&value)?;
|
||||
assert_eq!(mplace.layout.ty, referenced_ty);
|
||||
// Handle trait object vtables.
|
||||
if let ty::Dynamic(..) =
|
||||
if let ty::Dynamic(_, _, ty::Dyn) =
|
||||
tcx.struct_tail_erasing_lifetimes(referenced_ty, self.ecx.param_env).kind()
|
||||
{
|
||||
let ptr = mplace.meta.unwrap_meta().to_pointer(&tcx)?;
|
||||
|
|
|
@ -255,7 +255,22 @@ impl<'tcx, Prov: Provenance> OpTy<'tcx, Prov> {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn offset_with_meta(
|
||||
/// Replace the layout of this operand. There's basically no sanity check that this makes sense,
|
||||
/// you better know what you are doing! If this is an immediate, applying the wrong layout can
|
||||
/// not just lead to invalid data, it can actually *shift the data around* since the offsets of
|
||||
/// a ScalarPair are entirely determined by the layout, not the data.
|
||||
pub fn transmute(&self, layout: TyAndLayout<'tcx>) -> Self {
|
||||
assert_eq!(
|
||||
self.layout.size, layout.size,
|
||||
"transmuting with a size change, that doesn't seem right"
|
||||
);
|
||||
OpTy { layout, ..*self }
|
||||
}
|
||||
|
||||
/// Offset the operand in memory (if possible) and change its metadata.
|
||||
///
|
||||
/// This can go wrong very easily if you give the wrong layout for the new place!
|
||||
pub(super) fn offset_with_meta(
|
||||
&self,
|
||||
offset: Size,
|
||||
meta: MemPlaceMeta<Prov>,
|
||||
|
@ -276,6 +291,9 @@ impl<'tcx, Prov: Provenance> OpTy<'tcx, Prov> {
|
|||
}
|
||||
}
|
||||
|
||||
/// Offset the operand in memory (if possible).
|
||||
///
|
||||
/// This can go wrong very easily if you give the wrong layout for the new place!
|
||||
pub fn offset(
|
||||
&self,
|
||||
offset: Size,
|
||||
|
|
|
@ -26,6 +26,7 @@ pub enum MemPlaceMeta<Prov: Provenance = AllocId> {
|
|||
}
|
||||
|
||||
impl<Prov: Provenance> MemPlaceMeta<Prov> {
|
||||
#[cfg_attr(debug_assertions, track_caller)] // only in debug builds due to perf (see #98980)
|
||||
pub fn unwrap_meta(self) -> Scalar<Prov> {
|
||||
match self {
|
||||
Self::Meta(s) => s,
|
||||
|
@ -147,12 +148,16 @@ impl<Prov: Provenance> MemPlace<Prov> {
|
|||
}
|
||||
|
||||
#[inline]
|
||||
pub fn offset_with_meta<'tcx>(
|
||||
pub(super) fn offset_with_meta<'tcx>(
|
||||
self,
|
||||
offset: Size,
|
||||
meta: MemPlaceMeta<Prov>,
|
||||
cx: &impl HasDataLayout,
|
||||
) -> InterpResult<'tcx, Self> {
|
||||
debug_assert!(
|
||||
!meta.has_meta() || self.meta.has_meta(),
|
||||
"cannot use `offset_with_meta` to add metadata to a place"
|
||||
);
|
||||
Ok(MemPlace { ptr: self.ptr.offset(offset, cx)?, meta })
|
||||
}
|
||||
}
|
||||
|
@ -182,8 +187,11 @@ impl<'tcx, Prov: Provenance> MPlaceTy<'tcx, Prov> {
|
|||
MPlaceTy { mplace: MemPlace { ptr, meta: MemPlaceMeta::None }, layout, align }
|
||||
}
|
||||
|
||||
/// Offset the place in memory and change its metadata.
|
||||
///
|
||||
/// This can go wrong very easily if you give the wrong layout for the new place!
|
||||
#[inline]
|
||||
pub fn offset_with_meta(
|
||||
pub(crate) fn offset_with_meta(
|
||||
&self,
|
||||
offset: Size,
|
||||
meta: MemPlaceMeta<Prov>,
|
||||
|
@ -197,6 +205,9 @@ impl<'tcx, Prov: Provenance> MPlaceTy<'tcx, Prov> {
|
|||
})
|
||||
}
|
||||
|
||||
/// Offset the place in memory.
|
||||
///
|
||||
/// This can go wrong very easily if you give the wrong layout for the new place!
|
||||
pub fn offset(
|
||||
&self,
|
||||
offset: Size,
|
||||
|
@ -241,14 +252,6 @@ impl<'tcx, Prov: Provenance> MPlaceTy<'tcx, Prov> {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub(super) fn vtable(&self) -> Scalar<Prov> {
|
||||
match self.layout.ty.kind() {
|
||||
ty::Dynamic(..) => self.mplace.meta.unwrap_meta(),
|
||||
_ => bug!("vtable not supported on type {:?}", self.layout.ty),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// These are defined here because they produce a place.
|
||||
|
@ -266,7 +269,12 @@ impl<'tcx, Prov: Provenance> OpTy<'tcx, Prov> {
|
|||
#[inline(always)]
|
||||
#[cfg_attr(debug_assertions, track_caller)] // only in debug builds due to perf (see #98980)
|
||||
pub fn assert_mem_place(&self) -> MPlaceTy<'tcx, Prov> {
|
||||
self.as_mplace_or_imm().left().unwrap()
|
||||
self.as_mplace_or_imm().left().unwrap_or_else(|| {
|
||||
bug!(
|
||||
"OpTy of type {} was immediate when it was expected to be an MPlace",
|
||||
self.layout.ty
|
||||
)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -283,7 +291,12 @@ impl<'tcx, Prov: Provenance> PlaceTy<'tcx, Prov> {
|
|||
#[inline(always)]
|
||||
#[cfg_attr(debug_assertions, track_caller)] // only in debug builds due to perf (see #98980)
|
||||
pub fn assert_mem_place(&self) -> MPlaceTy<'tcx, Prov> {
|
||||
self.as_mplace_or_local().left().unwrap()
|
||||
self.as_mplace_or_local().left().unwrap_or_else(|| {
|
||||
bug!(
|
||||
"PlaceTy of type {} was a local when it was expected to be an MPlace",
|
||||
self.layout.ty
|
||||
)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -807,11 +820,16 @@ where
|
|||
}
|
||||
|
||||
/// Turn a place with a `dyn Trait` type into a place with the actual dynamic type.
|
||||
/// Aso returns the vtable.
|
||||
pub(super) fn unpack_dyn_trait(
|
||||
&self,
|
||||
mplace: &MPlaceTy<'tcx, M::Provenance>,
|
||||
) -> InterpResult<'tcx, MPlaceTy<'tcx, M::Provenance>> {
|
||||
let vtable = mplace.vtable().to_pointer(self)?; // also sanity checks the type
|
||||
) -> InterpResult<'tcx, (MPlaceTy<'tcx, M::Provenance>, Pointer<Option<M::Provenance>>)> {
|
||||
assert!(
|
||||
matches!(mplace.layout.ty.kind(), ty::Dynamic(_, _, ty::Dyn)),
|
||||
"`unpack_dyn_trait` only makes sense on `dyn*` types"
|
||||
);
|
||||
let vtable = mplace.meta.unwrap_meta().to_pointer(self)?;
|
||||
let (ty, _) = self.get_ptr_vtable(vtable)?;
|
||||
let layout = self.layout_of(ty)?;
|
||||
|
||||
|
@ -820,7 +838,26 @@ where
|
|||
layout,
|
||||
align: layout.align.abi,
|
||||
};
|
||||
Ok(mplace)
|
||||
Ok((mplace, vtable))
|
||||
}
|
||||
|
||||
/// Turn an operand with a `dyn* Trait` type into an operand with the actual dynamic type.
|
||||
/// Aso returns the vtable.
|
||||
pub(super) fn unpack_dyn_star(
|
||||
&self,
|
||||
op: &OpTy<'tcx, M::Provenance>,
|
||||
) -> InterpResult<'tcx, (OpTy<'tcx, M::Provenance>, Pointer<Option<M::Provenance>>)> {
|
||||
assert!(
|
||||
matches!(op.layout.ty.kind(), ty::Dynamic(_, _, ty::DynStar)),
|
||||
"`unpack_dyn_star` only makes sense on `dyn*` types"
|
||||
);
|
||||
let data = self.operand_field(&op, 0)?;
|
||||
let vtable = self.operand_field(&op, 1)?;
|
||||
let vtable = self.read_pointer(&vtable)?;
|
||||
let (ty, _) = self.get_ptr_vtable(vtable)?;
|
||||
let layout = self.layout_of(ty)?;
|
||||
let data = data.transmute(layout);
|
||||
Ok((data, vtable))
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -547,7 +547,14 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
|
|||
let receiver_place = loop {
|
||||
match receiver.layout.ty.kind() {
|
||||
ty::Ref(..) | ty::RawPtr(..) => break self.deref_operand(&receiver)?,
|
||||
ty::Dynamic(..) => break receiver.assert_mem_place(), // no immediate unsized values
|
||||
ty::Dynamic(.., ty::Dyn) => break receiver.assert_mem_place(), // no immediate unsized values
|
||||
ty::Dynamic(.., ty::DynStar) => {
|
||||
// Not clear how to handle this, so far we assume the receiver is always a pointer.
|
||||
span_bug!(
|
||||
self.cur_span(),
|
||||
"by-value calls on a `dyn*`... are those a thing?"
|
||||
);
|
||||
}
|
||||
_ => {
|
||||
// Not there yet, search for the only non-ZST field.
|
||||
let mut non_zst_field = None;
|
||||
|
@ -573,39 +580,59 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
|
|||
}
|
||||
}
|
||||
};
|
||||
// Obtain the underlying trait we are working on.
|
||||
let receiver_tail = self
|
||||
.tcx
|
||||
.struct_tail_erasing_lifetimes(receiver_place.layout.ty, self.param_env);
|
||||
let ty::Dynamic(data, ..) = receiver_tail.kind() else {
|
||||
span_bug!(self.cur_span(), "dynamic call on non-`dyn` type {}", receiver_tail)
|
||||
};
|
||||
|
||||
// Get the required information from the vtable.
|
||||
let vptr = receiver_place.meta.unwrap_meta().to_pointer(self)?;
|
||||
let (dyn_ty, dyn_trait) = self.get_ptr_vtable(vptr)?;
|
||||
if dyn_trait != data.principal() {
|
||||
throw_ub_format!(
|
||||
"`dyn` call on a pointer whose vtable does not match its type"
|
||||
);
|
||||
}
|
||||
// Obtain the underlying trait we are working on, and the adjusted receiver argument.
|
||||
let (vptr, dyn_ty, adjusted_receiver) = if let ty::Dynamic(data, _, ty::DynStar) =
|
||||
receiver_place.layout.ty.kind()
|
||||
{
|
||||
let (recv, vptr) = self.unpack_dyn_star(&receiver_place.into())?;
|
||||
let (dyn_ty, dyn_trait) = self.get_ptr_vtable(vptr)?;
|
||||
if dyn_trait != data.principal() {
|
||||
throw_ub_format!(
|
||||
"`dyn*` call on a pointer whose vtable does not match its type"
|
||||
);
|
||||
}
|
||||
let recv = recv.assert_mem_place(); // we passed an MPlaceTy to `unpack_dyn_star` so we definitely still have one
|
||||
|
||||
(vptr, dyn_ty, recv.ptr)
|
||||
} else {
|
||||
// Doesn't have to be a `dyn Trait`, but the unsized tail must be `dyn Trait`.
|
||||
// (For that reason we also cannot use `unpack_dyn_trait`.)
|
||||
let receiver_tail = self
|
||||
.tcx
|
||||
.struct_tail_erasing_lifetimes(receiver_place.layout.ty, self.param_env);
|
||||
let ty::Dynamic(data, _, ty::Dyn) = receiver_tail.kind() else {
|
||||
span_bug!(self.cur_span(), "dynamic call on non-`dyn` type {}", receiver_tail)
|
||||
};
|
||||
assert!(receiver_place.layout.is_unsized());
|
||||
|
||||
// Get the required information from the vtable.
|
||||
let vptr = receiver_place.meta.unwrap_meta().to_pointer(self)?;
|
||||
let (dyn_ty, dyn_trait) = self.get_ptr_vtable(vptr)?;
|
||||
if dyn_trait != data.principal() {
|
||||
throw_ub_format!(
|
||||
"`dyn` call on a pointer whose vtable does not match its type"
|
||||
);
|
||||
}
|
||||
|
||||
// It might be surprising that we use a pointer as the receiver even if this
|
||||
// is a by-val case; this works because by-val passing of an unsized `dyn
|
||||
// Trait` to a function is actually desugared to a pointer.
|
||||
(vptr, dyn_ty, receiver_place.ptr)
|
||||
};
|
||||
|
||||
// Now determine the actual method to call. We can do that in two different ways and
|
||||
// compare them to ensure everything fits.
|
||||
let Some(ty::VtblEntry::Method(fn_inst)) = self.get_vtable_entries(vptr)?.get(idx).copied() else {
|
||||
throw_ub_format!("`dyn` call trying to call something that is not a method")
|
||||
};
|
||||
trace!("Virtual call dispatches to {fn_inst:#?}");
|
||||
if cfg!(debug_assertions) {
|
||||
let tcx = *self.tcx;
|
||||
|
||||
let trait_def_id = tcx.trait_of_item(def_id).unwrap();
|
||||
let virtual_trait_ref =
|
||||
ty::TraitRef::from_method(tcx, trait_def_id, instance.substs);
|
||||
assert_eq!(
|
||||
receiver_tail,
|
||||
virtual_trait_ref.self_ty(),
|
||||
"mismatch in underlying dyn trait computation within Miri and MIR building",
|
||||
);
|
||||
let existential_trait_ref =
|
||||
ty::ExistentialTraitRef::erase_self_ty(tcx, virtual_trait_ref);
|
||||
let concrete_trait_ref = existential_trait_ref.with_self_ty(tcx, dyn_ty);
|
||||
|
@ -620,17 +647,12 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
|
|||
assert_eq!(fn_inst, concrete_method);
|
||||
}
|
||||
|
||||
// `*mut receiver_place.layout.ty` is almost the layout that we
|
||||
// want for args[0]: We have to project to field 0 because we want
|
||||
// a thin pointer.
|
||||
assert!(receiver_place.layout.is_unsized());
|
||||
let receiver_ptr_ty = self.tcx.mk_mut_ptr(receiver_place.layout.ty);
|
||||
let this_receiver_ptr = self.layout_of(receiver_ptr_ty)?.field(self, 0);
|
||||
// Adjust receiver argument.
|
||||
args[0] = OpTy::from(ImmTy::from_immediate(
|
||||
Scalar::from_maybe_pointer(receiver_place.ptr, self).into(),
|
||||
this_receiver_ptr,
|
||||
));
|
||||
// Adjust receiver argument. Layout can be any (thin) ptr.
|
||||
args[0] = ImmTy::from_immediate(
|
||||
Scalar::from_maybe_pointer(adjusted_receiver, self).into(),
|
||||
self.layout_of(self.tcx.mk_mut_ptr(dyn_ty))?,
|
||||
)
|
||||
.into();
|
||||
trace!("Patched receiver operand to {:#?}", args[0]);
|
||||
// recurse with concrete function
|
||||
self.eval_fn_call(
|
||||
|
@ -659,15 +681,24 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
|
|||
// implementation fail -- a problem shared by rustc.
|
||||
let place = self.force_allocation(place)?;
|
||||
|
||||
let (instance, place) = match place.layout.ty.kind() {
|
||||
ty::Dynamic(..) => {
|
||||
let place = match place.layout.ty.kind() {
|
||||
ty::Dynamic(_, _, ty::Dyn) => {
|
||||
// Dropping a trait object. Need to find actual drop fn.
|
||||
let place = self.unpack_dyn_trait(&place)?;
|
||||
let instance = ty::Instance::resolve_drop_in_place(*self.tcx, place.layout.ty);
|
||||
(instance, place)
|
||||
self.unpack_dyn_trait(&place)?.0
|
||||
}
|
||||
ty::Dynamic(_, _, ty::DynStar) => {
|
||||
// Dropping a `dyn*`. Need to find actual drop fn.
|
||||
self.unpack_dyn_star(&place.into())?.0.assert_mem_place()
|
||||
}
|
||||
_ => {
|
||||
debug_assert_eq!(
|
||||
instance,
|
||||
ty::Instance::resolve_drop_in_place(*self.tcx, place.layout.ty)
|
||||
);
|
||||
place
|
||||
}
|
||||
_ => (instance, place),
|
||||
};
|
||||
let instance = ty::Instance::resolve_drop_in_place(*self.tcx, place.layout.ty);
|
||||
let fn_abi = self.fn_abi_of_instance(instance, ty::List::empty())?;
|
||||
|
||||
let arg = ImmTy::from_immediate(
|
||||
|
|
|
@ -23,18 +23,18 @@ use std::hash::Hash;
|
|||
// for the validation errors
|
||||
use super::UndefinedBehaviorInfo::*;
|
||||
use super::{
|
||||
CheckInAllocMsg, GlobalAlloc, ImmTy, Immediate, InterpCx, InterpResult, MPlaceTy, Machine,
|
||||
MemPlaceMeta, OpTy, Scalar, ValueVisitor,
|
||||
AllocId, CheckInAllocMsg, GlobalAlloc, ImmTy, Immediate, InterpCx, InterpResult, MPlaceTy,
|
||||
Machine, MemPlaceMeta, OpTy, Pointer, Scalar, ValueVisitor,
|
||||
};
|
||||
|
||||
macro_rules! throw_validation_failure {
|
||||
($where:expr, { $( $what_fmt:expr ),+ } $( expected { $( $expected_fmt:expr ),+ } )?) => {{
|
||||
($where:expr, { $( $what_fmt:tt )* } $( expected { $( $expected_fmt:tt )* } )?) => {{
|
||||
let mut msg = String::new();
|
||||
msg.push_str("encountered ");
|
||||
write!(&mut msg, $($what_fmt),+).unwrap();
|
||||
write!(&mut msg, $($what_fmt)*).unwrap();
|
||||
$(
|
||||
msg.push_str(", but expected ");
|
||||
write!(&mut msg, $($expected_fmt),+).unwrap();
|
||||
write!(&mut msg, $($expected_fmt)*).unwrap();
|
||||
)?
|
||||
let path = rustc_middle::ty::print::with_no_trimmed_paths!({
|
||||
let where_ = &$where;
|
||||
|
@ -82,7 +82,7 @@ macro_rules! throw_validation_failure {
|
|||
///
|
||||
macro_rules! try_validation {
|
||||
($e:expr, $where:expr,
|
||||
$( $( $p:pat_param )|+ => { $( $what_fmt:expr ),+ } $( expected { $( $expected_fmt:expr ),+ } )? ),+ $(,)?
|
||||
$( $( $p:pat_param )|+ => { $( $what_fmt:tt )* } $( expected { $( $expected_fmt:tt )* } )? ),+ $(,)?
|
||||
) => {{
|
||||
match $e {
|
||||
Ok(x) => x,
|
||||
|
@ -93,7 +93,7 @@ macro_rules! try_validation {
|
|||
InterpError::UndefinedBehavior($($p)|+) =>
|
||||
throw_validation_failure!(
|
||||
$where,
|
||||
{ $( $what_fmt ),+ } $( expected { $( $expected_fmt ),+ } )?
|
||||
{ $( $what_fmt )* } $( expected { $( $expected_fmt )* } )?
|
||||
)
|
||||
),+,
|
||||
#[allow(unreachable_patterns)]
|
||||
|
@ -335,7 +335,7 @@ impl<'rt, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> ValidityVisitor<'rt, 'mir, '
|
|||
) -> InterpResult<'tcx> {
|
||||
let tail = self.ecx.tcx.struct_tail_erasing_lifetimes(pointee.ty, self.ecx.param_env);
|
||||
match tail.kind() {
|
||||
ty::Dynamic(..) => {
|
||||
ty::Dynamic(_, _, ty::Dyn) => {
|
||||
let vtable = meta.unwrap_meta().to_pointer(self.ecx)?;
|
||||
// Make sure it is a genuine vtable pointer.
|
||||
let (_ty, _trait) = try_validation!(
|
||||
|
@ -399,12 +399,15 @@ impl<'rt, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> ValidityVisitor<'rt, 'mir, '
|
|||
{
|
||||
"an unaligned {kind} (required {} byte alignment but found {})",
|
||||
required.bytes(),
|
||||
has.bytes()
|
||||
has.bytes(),
|
||||
},
|
||||
DanglingIntPointer(0, _) =>
|
||||
{ "a null {kind}" },
|
||||
DanglingIntPointer(i, _) =>
|
||||
{ "a dangling {kind} (address {i:#x} is unallocated)" },
|
||||
{
|
||||
"a dangling {kind} ({pointer} has no provenance)",
|
||||
pointer = Pointer::<Option<AllocId>>::from_addr_invalid(*i),
|
||||
},
|
||||
PointerOutOfBounds { .. } =>
|
||||
{ "a dangling {kind} (going beyond the bounds of its allocation)" },
|
||||
// This cannot happen during const-eval (because interning already detects
|
||||
|
|
|
@ -284,7 +284,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> ValueMut<'mir, 'tcx, M>
|
|||
&self,
|
||||
ecx: &InterpCx<'mir, 'tcx, M>,
|
||||
) -> InterpResult<'tcx, OpTy<'tcx, M::Provenance>> {
|
||||
// We `force_allocation` here so that `from_op` below can work.
|
||||
// No need for `force_allocation` since we are just going to read from this.
|
||||
ecx.place_to_op(self)
|
||||
}
|
||||
|
||||
|
@ -421,15 +421,25 @@ macro_rules! make_value_visitor {
|
|||
// Special treatment for special types, where the (static) layout is not sufficient.
|
||||
match *ty.kind() {
|
||||
// If it is a trait object, switch to the real type that was used to create it.
|
||||
ty::Dynamic(..) => {
|
||||
ty::Dynamic(_, _, ty::Dyn) => {
|
||||
// Dyn types. This is unsized, and the actual dynamic type of the data is given by the
|
||||
// vtable stored in the place metadata.
|
||||
// unsized values are never immediate, so we can assert_mem_place
|
||||
let op = v.to_op_for_read(self.ecx())?;
|
||||
let dest = op.assert_mem_place();
|
||||
let inner_mplace = self.ecx().unpack_dyn_trait(&dest)?;
|
||||
let inner_mplace = self.ecx().unpack_dyn_trait(&dest)?.0;
|
||||
trace!("walk_value: dyn object layout: {:#?}", inner_mplace.layout);
|
||||
// recurse with the inner type
|
||||
return self.visit_field(&v, 0, &$value_trait::from_op(&inner_mplace.into()));
|
||||
},
|
||||
ty::Dynamic(_, _, ty::DynStar) => {
|
||||
// DynStar types. Very different from a dyn type (but strangely part of the
|
||||
// same variant in `TyKind`): These are pairs where the 2nd component is the
|
||||
// vtable, and the first component is the data (which must be ptr-sized).
|
||||
let op = v.to_op_for_proj(self.ecx())?;
|
||||
let data = self.ecx().unpack_dyn_star(&op)?.0;
|
||||
return self.visit_field(&v, 0, &$value_trait::from_op(&data));
|
||||
}
|
||||
// Slices do not need special handling here: they have `Array` field
|
||||
// placement with length 0, so we enter the `Array` case below which
|
||||
// indirectly uses the metadata to determine the actual length.
|
||||
|
|
|
@ -320,7 +320,7 @@ fn run_compiler(
|
|||
}
|
||||
|
||||
// Make sure name resolution and macro expansion is run.
|
||||
queries.global_ctxt()?;
|
||||
queries.global_ctxt()?.enter(|tcx| tcx.resolver_for_lowering(()));
|
||||
|
||||
if callbacks.after_expansion(compiler, queries) == Compilation::Stop {
|
||||
return early_exit();
|
||||
|
|
|
@ -5,6 +5,7 @@ use crate::expand::{self, AstFragment, Invocation};
|
|||
use crate::module::DirOwnership;
|
||||
|
||||
use rustc_ast::attr::MarkedAttrs;
|
||||
use rustc_ast::mut_visit::DummyAstNode;
|
||||
use rustc_ast::ptr::P;
|
||||
use rustc_ast::token::{self, Nonterminal};
|
||||
use rustc_ast::tokenstream::TokenStream;
|
||||
|
@ -640,6 +641,10 @@ impl MacResult for DummyResult {
|
|||
fn make_variants(self: Box<DummyResult>) -> Option<SmallVec<[ast::Variant; 1]>> {
|
||||
Some(SmallVec::new())
|
||||
}
|
||||
|
||||
fn make_crate(self: Box<DummyResult>) -> Option<ast::Crate> {
|
||||
Some(DummyAstNode::dummy())
|
||||
}
|
||||
}
|
||||
|
||||
/// A syntax extension kind.
|
||||
|
|
|
@ -16,7 +16,6 @@ use rustc_ast_pretty::pprust;
|
|||
use rustc_attr::{self as attr, TransparencyError};
|
||||
use rustc_data_structures::fx::{FxHashMap, FxIndexMap};
|
||||
use rustc_errors::{Applicability, ErrorGuaranteed};
|
||||
use rustc_feature::Features;
|
||||
use rustc_lint_defs::builtin::{
|
||||
RUST_2021_INCOMPATIBLE_OR_PATTERNS, SEMICOLON_IN_EXPRESSIONS_FROM_MACROS,
|
||||
};
|
||||
|
@ -379,7 +378,6 @@ pub(super) fn try_match_macro<'matcher, T: Tracker<'matcher>>(
|
|||
/// Converts a macro item into a syntax extension.
|
||||
pub fn compile_declarative_macro(
|
||||
sess: &Session,
|
||||
features: &Features,
|
||||
def: &ast::Item,
|
||||
edition: Edition,
|
||||
) -> (SyntaxExtension, Vec<(usize, Span)>) {
|
||||
|
@ -508,7 +506,7 @@ pub fn compile_declarative_macro(
|
|||
true,
|
||||
&sess.parse_sess,
|
||||
def.id,
|
||||
features,
|
||||
sess.features_untracked(),
|
||||
edition,
|
||||
)
|
||||
.pop()
|
||||
|
@ -532,7 +530,7 @@ pub fn compile_declarative_macro(
|
|||
false,
|
||||
&sess.parse_sess,
|
||||
def.id,
|
||||
features,
|
||||
sess.features_untracked(),
|
||||
edition,
|
||||
)
|
||||
.pop()
|
||||
|
|
|
@ -987,7 +987,6 @@ pub struct Pat<'hir> {
|
|||
}
|
||||
|
||||
impl<'hir> Pat<'hir> {
|
||||
// FIXME(#19596) this is a workaround, but there should be a better way
|
||||
fn walk_short_(&self, it: &mut impl FnMut(&Pat<'hir>) -> bool) -> bool {
|
||||
if !it(self) {
|
||||
return false;
|
||||
|
@ -1015,7 +1014,6 @@ impl<'hir> Pat<'hir> {
|
|||
self.walk_short_(&mut it)
|
||||
}
|
||||
|
||||
// FIXME(#19596) this is a workaround, but there should be a better way
|
||||
fn walk_(&self, it: &mut impl FnMut(&Pat<'hir>) -> bool) {
|
||||
if !it(self) {
|
||||
return;
|
||||
|
|
|
@ -7,7 +7,7 @@ use rustc_hir::def_id::DefId;
|
|||
use rustc_infer::traits::FulfillmentError;
|
||||
use rustc_middle::ty::{self, Ty};
|
||||
use rustc_session::parse::feature_err;
|
||||
use rustc_span::lev_distance::find_best_match_for_name;
|
||||
use rustc_span::edit_distance::find_best_match_for_name;
|
||||
use rustc_span::symbol::{sym, Ident};
|
||||
use rustc_span::{Span, Symbol, DUMMY_SP};
|
||||
|
||||
|
|
|
@ -6,7 +6,7 @@ use crate::astconv::{
|
|||
use crate::errors::AssocTypeBindingNotAllowed;
|
||||
use crate::structured_errors::{GenericArgsInfo, StructuredDiagnostic, WrongNumberOfGenericArgs};
|
||||
use rustc_ast::ast::ParamKindOrd;
|
||||
use rustc_errors::{struct_span_err, Applicability, Diagnostic, MultiSpan};
|
||||
use rustc_errors::{struct_span_err, Applicability, Diagnostic, ErrorGuaranteed, MultiSpan};
|
||||
use rustc_hir as hir;
|
||||
use rustc_hir::def::{DefKind, Res};
|
||||
use rustc_hir::def_id::DefId;
|
||||
|
@ -26,7 +26,7 @@ fn generic_arg_mismatch_err(
|
|||
param: &GenericParamDef,
|
||||
possible_ordering_error: bool,
|
||||
help: Option<&str>,
|
||||
) {
|
||||
) -> ErrorGuaranteed {
|
||||
let sess = tcx.sess;
|
||||
let mut err = struct_span_err!(
|
||||
sess,
|
||||
|
@ -70,9 +70,9 @@ fn generic_arg_mismatch_err(
|
|||
) => match path.res {
|
||||
Res::Err => {
|
||||
add_braces_suggestion(arg, &mut err);
|
||||
err.set_primary_message("unresolved item provided when a constant was expected")
|
||||
return err
|
||||
.set_primary_message("unresolved item provided when a constant was expected")
|
||||
.emit();
|
||||
return;
|
||||
}
|
||||
Res::Def(DefKind::TyParam, src_def_id) => {
|
||||
if let Some(param_local_id) = param.def_id.as_local() {
|
||||
|
@ -81,7 +81,7 @@ fn generic_arg_mismatch_err(
|
|||
if param_type.is_suggestable(tcx, false) {
|
||||
err.span_suggestion(
|
||||
tcx.def_span(src_def_id),
|
||||
"consider changing this type parameter to be a `const` generic",
|
||||
"consider changing this type parameter to a const parameter",
|
||||
format!("const {}: {}", param_name, param_type),
|
||||
Applicability::MaybeIncorrect,
|
||||
);
|
||||
|
@ -137,7 +137,7 @@ fn generic_arg_mismatch_err(
|
|||
}
|
||||
}
|
||||
|
||||
err.emit();
|
||||
err.emit()
|
||||
}
|
||||
|
||||
/// Creates the relevant generic argument substitutions
|
||||
|
|
|
@ -37,8 +37,8 @@ use rustc_middle::ty::DynKind;
|
|||
use rustc_middle::ty::GenericParamDefKind;
|
||||
use rustc_middle::ty::{self, Const, DefIdTree, IsSuggestable, Ty, TyCtxt, TypeVisitable};
|
||||
use rustc_session::lint::builtin::{AMBIGUOUS_ASSOCIATED_ITEMS, BARE_TRAIT_OBJECTS};
|
||||
use rustc_span::edit_distance::find_best_match_for_name;
|
||||
use rustc_span::edition::Edition;
|
||||
use rustc_span::lev_distance::find_best_match_for_name;
|
||||
use rustc_span::symbol::{kw, Ident, Symbol};
|
||||
use rustc_span::{sym, Span, DUMMY_SP};
|
||||
use rustc_target::spec::abi;
|
||||
|
|
|
@ -45,8 +45,8 @@ use rustc_middle::ty::subst::SubstsRef;
|
|||
use rustc_middle::ty::{self, AdtKind, Ty, TypeVisitable};
|
||||
use rustc_session::errors::ExprParenthesesNeeded;
|
||||
use rustc_session::parse::feature_err;
|
||||
use rustc_span::edit_distance::find_best_match_for_name;
|
||||
use rustc_span::hygiene::DesugaringKind;
|
||||
use rustc_span::lev_distance::find_best_match_for_name;
|
||||
use rustc_span::source_map::{Span, Spanned};
|
||||
use rustc_span::symbol::{kw, sym, Ident, Symbol};
|
||||
use rustc_target::spec::abi::Abi::RustIntrinsic;
|
||||
|
|
|
@ -32,7 +32,7 @@ use rustc_session::lint;
|
|||
use rustc_span::def_id::LocalDefId;
|
||||
use rustc_span::hygiene::DesugaringKind;
|
||||
use rustc_span::symbol::{kw, sym, Ident};
|
||||
use rustc_span::{Span, DUMMY_SP};
|
||||
use rustc_span::Span;
|
||||
use rustc_trait_selection::traits::error_reporting::TypeErrCtxtExt as _;
|
||||
use rustc_trait_selection::traits::{self, NormalizeExt, ObligationCauseCode, ObligationCtxt};
|
||||
|
||||
|
@ -737,7 +737,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
|||
if let ty::subst::GenericArgKind::Type(ty) = ty.unpack()
|
||||
&& let ty::Alias(ty::Opaque, ty::AliasTy { def_id, .. }) = *ty.kind()
|
||||
&& let Some(def_id) = def_id.as_local()
|
||||
&& self.opaque_type_origin(def_id, DUMMY_SP).is_some() {
|
||||
&& self.opaque_type_origin(def_id).is_some() {
|
||||
return None;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -549,6 +549,19 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
|||
return Err(expr);
|
||||
};
|
||||
|
||||
if let (
|
||||
hir::ExprKind::AddrOf(_borrow_kind, _borrow_mutability, borrowed_expr),
|
||||
ty::Ref(_ty_region, ty_ref_type, _ty_mutability),
|
||||
) = (&expr.kind, in_ty.kind())
|
||||
{
|
||||
// We can "drill into" the borrowed expression.
|
||||
return self.blame_specific_part_of_expr_corresponding_to_generic_param(
|
||||
param,
|
||||
borrowed_expr,
|
||||
(*ty_ref_type).into(),
|
||||
);
|
||||
}
|
||||
|
||||
if let (hir::ExprKind::Tup(expr_elements), ty::Tuple(in_ty_elements)) =
|
||||
(&expr.kind, in_ty.kind())
|
||||
{
|
||||
|
|
|
@ -601,7 +601,6 @@ impl<'a, 'tcx> MemCategorizationContext<'a, 'tcx> {
|
|||
}
|
||||
}
|
||||
|
||||
// FIXME(#19596) This is a workaround, but there should be a better way to do this
|
||||
fn cat_pattern_<F>(
|
||||
&self,
|
||||
mut place_with_id: PlaceWithHirId<'tcx>,
|
||||
|
|
|
@ -24,8 +24,8 @@ use rustc_middle::ty::{InternalSubsts, SubstsRef};
|
|||
use rustc_session::lint;
|
||||
use rustc_span::def_id::DefId;
|
||||
use rustc_span::def_id::LocalDefId;
|
||||
use rustc_span::lev_distance::{
|
||||
find_best_match_for_name_with_substrings, lev_distance_with_substrings,
|
||||
use rustc_span::edit_distance::{
|
||||
edit_distance_with_substrings, find_best_match_for_name_with_substrings,
|
||||
};
|
||||
use rustc_span::symbol::sym;
|
||||
use rustc_span::{symbol::Ident, Span, Symbol, DUMMY_SP};
|
||||
|
@ -69,7 +69,7 @@ struct ProbeContext<'a, 'tcx> {
|
|||
impl_dups: FxHashSet<DefId>,
|
||||
|
||||
/// When probing for names, include names that are close to the
|
||||
/// requested name (by Levenshtein distance)
|
||||
/// requested name (by edit distance)
|
||||
allow_similar_names: bool,
|
||||
|
||||
/// Some(candidate) if there is a private candidate
|
||||
|
@ -1793,7 +1793,7 @@ impl<'a, 'tcx> ProbeContext<'a, 'tcx> {
|
|||
|
||||
/// Similarly to `probe_for_return_type`, this method attempts to find the best matching
|
||||
/// candidate method where the method name may have been misspelled. Similarly to other
|
||||
/// Levenshtein based suggestions, we provide at most one such suggestion.
|
||||
/// edit distance based suggestions, we provide at most one such suggestion.
|
||||
fn probe_for_similar_candidate(&mut self) -> Result<Option<ty::AssocItem>, MethodError<'tcx>> {
|
||||
debug!("probing for method names similar to {:?}", self.method_name);
|
||||
|
||||
|
@ -2024,8 +2024,11 @@ impl<'a, 'tcx> ProbeContext<'a, 'tcx> {
|
|||
if self.matches_by_doc_alias(x.def_id) {
|
||||
return true;
|
||||
}
|
||||
match lev_distance_with_substrings(name.as_str(), x.name.as_str(), max_dist)
|
||||
{
|
||||
match edit_distance_with_substrings(
|
||||
name.as_str(),
|
||||
x.name.as_str(),
|
||||
max_dist,
|
||||
) {
|
||||
Some(d) => d > 0,
|
||||
None => false,
|
||||
}
|
||||
|
|
|
@ -31,7 +31,7 @@ use rustc_middle::ty::{self, DefIdTree, GenericArgKind, Ty, TyCtxt, TypeVisitabl
|
|||
use rustc_middle::ty::{IsSuggestable, ToPolyTraitRef};
|
||||
use rustc_span::symbol::{kw, sym, Ident};
|
||||
use rustc_span::Symbol;
|
||||
use rustc_span::{lev_distance, source_map, ExpnKind, FileName, MacroKind, Span};
|
||||
use rustc_span::{edit_distance, source_map, ExpnKind, FileName, MacroKind, Span};
|
||||
use rustc_trait_selection::traits::error_reporting::on_unimplemented::OnUnimplementedNote;
|
||||
use rustc_trait_selection::traits::error_reporting::on_unimplemented::TypeErrCtxtExt as _;
|
||||
use rustc_trait_selection::traits::query::evaluate_obligation::InferCtxtExt as _;
|
||||
|
@ -1014,7 +1014,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
|||
// that had unsatisfied trait bounds
|
||||
if unsatisfied_predicates.is_empty() && rcvr_ty.is_enum() {
|
||||
let adt_def = rcvr_ty.ty_adt_def().expect("enum is not an ADT");
|
||||
if let Some(suggestion) = lev_distance::find_best_match_for_name(
|
||||
if let Some(suggestion) = edit_distance::find_best_match_for_name(
|
||||
&adt_def.variants().iter().map(|s| s.name).collect::<Vec<_>>(),
|
||||
item_name.name,
|
||||
None,
|
||||
|
|
|
@ -14,8 +14,8 @@ use rustc_infer::infer::type_variable::{TypeVariableOrigin, TypeVariableOriginKi
|
|||
use rustc_middle::middle::stability::EvalResult;
|
||||
use rustc_middle::ty::{self, Adt, BindingMode, Ty, TypeVisitable};
|
||||
use rustc_session::lint::builtin::NON_EXHAUSTIVE_OMITTED_PATTERNS;
|
||||
use rustc_span::edit_distance::find_best_match_for_name;
|
||||
use rustc_span::hygiene::DesugaringKind;
|
||||
use rustc_span::lev_distance::find_best_match_for_name;
|
||||
use rustc_span::source_map::{Span, Spanned};
|
||||
use rustc_span::symbol::{kw, sym, Ident};
|
||||
use rustc_span::{BytePos, DUMMY_SP};
|
||||
|
|
|
@ -57,9 +57,7 @@ impl<'tcx> InferCtxt<'tcx> {
|
|||
}
|
||||
let mut obligations = vec![];
|
||||
let replace_opaque_type = |def_id: DefId| {
|
||||
def_id
|
||||
.as_local()
|
||||
.map_or(false, |def_id| self.opaque_type_origin(def_id, span).is_some())
|
||||
def_id.as_local().map_or(false, |def_id| self.opaque_type_origin(def_id).is_some())
|
||||
};
|
||||
let value = value.fold_with(&mut BottomUpFolder {
|
||||
tcx: self.tcx,
|
||||
|
@ -144,9 +142,9 @@ impl<'tcx> InferCtxt<'tcx> {
|
|||
// let x = || foo(); // returns the Opaque assoc with `foo`
|
||||
// }
|
||||
// ```
|
||||
self.opaque_type_origin(def_id, cause.span)?
|
||||
self.opaque_type_origin(def_id)?
|
||||
}
|
||||
DefiningAnchor::Bubble => self.opaque_ty_origin_unchecked(def_id, cause.span),
|
||||
DefiningAnchor::Bubble => self.opaque_type_origin_unchecked(def_id),
|
||||
DefiningAnchor::Error => return None,
|
||||
};
|
||||
if let ty::Alias(ty::Opaque, ty::AliasTy { def_id: b_def_id, .. }) = *b.kind() {
|
||||
|
@ -155,9 +153,8 @@ impl<'tcx> InferCtxt<'tcx> {
|
|||
// no one encounters it in practice.
|
||||
// It does occur however in `fn fut() -> impl Future<Output = i32> { async { 42 } }`,
|
||||
// where it is of no concern, so we only check for TAITs.
|
||||
if let Some(OpaqueTyOrigin::TyAlias) = b_def_id
|
||||
.as_local()
|
||||
.and_then(|b_def_id| self.opaque_type_origin(b_def_id, cause.span))
|
||||
if let Some(OpaqueTyOrigin::TyAlias) =
|
||||
b_def_id.as_local().and_then(|b_def_id| self.opaque_type_origin(b_def_id))
|
||||
{
|
||||
self.tcx.sess.emit_err(OpaqueHiddenTypeDiag {
|
||||
span: cause.span,
|
||||
|
@ -371,24 +368,18 @@ impl<'tcx> InferCtxt<'tcx> {
|
|||
});
|
||||
}
|
||||
|
||||
/// Returns the origin of the opaque type `def_id` if we're currently
|
||||
/// in its defining scope.
|
||||
#[instrument(skip(self), level = "trace", ret)]
|
||||
pub fn opaque_type_origin(&self, def_id: LocalDefId, span: Span) -> Option<OpaqueTyOrigin> {
|
||||
pub fn opaque_type_origin(&self, def_id: LocalDefId) -> Option<OpaqueTyOrigin> {
|
||||
let opaque_hir_id = self.tcx.hir().local_def_id_to_hir_id(def_id);
|
||||
let parent_def_id = match self.defining_use_anchor {
|
||||
DefiningAnchor::Bubble | DefiningAnchor::Error => return None,
|
||||
DefiningAnchor::Bind(bind) => bind,
|
||||
};
|
||||
let item_kind = &self.tcx.hir().expect_item(def_id).kind;
|
||||
|
||||
let hir::ItemKind::OpaqueTy(hir::OpaqueTy { origin, .. }) = item_kind else {
|
||||
span_bug!(
|
||||
span,
|
||||
"weird opaque type: {:#?}, {:#?}",
|
||||
def_id,
|
||||
item_kind
|
||||
)
|
||||
};
|
||||
let in_definition_scope = match *origin {
|
||||
let origin = self.opaque_type_origin_unchecked(def_id);
|
||||
let in_definition_scope = match origin {
|
||||
// Async `impl Trait`
|
||||
hir::OpaqueTyOrigin::AsyncFn(parent) => parent == parent_def_id,
|
||||
// Anonymous `impl Trait`
|
||||
|
@ -398,16 +389,17 @@ impl<'tcx> InferCtxt<'tcx> {
|
|||
may_define_opaque_type(self.tcx, parent_def_id, opaque_hir_id)
|
||||
}
|
||||
};
|
||||
trace!(?origin);
|
||||
in_definition_scope.then_some(*origin)
|
||||
in_definition_scope.then_some(origin)
|
||||
}
|
||||
|
||||
/// Returns the origin of the opaque type `def_id` even if we are not in its
|
||||
/// defining scope.
|
||||
#[instrument(skip(self), level = "trace", ret)]
|
||||
fn opaque_ty_origin_unchecked(&self, def_id: LocalDefId, span: Span) -> OpaqueTyOrigin {
|
||||
fn opaque_type_origin_unchecked(&self, def_id: LocalDefId) -> OpaqueTyOrigin {
|
||||
match self.tcx.hir().expect_item(def_id).kind {
|
||||
hir::ItemKind::OpaqueTy(hir::OpaqueTy { origin, .. }) => origin,
|
||||
ref itemkind => {
|
||||
span_bug!(span, "weird opaque type: {:?}, {:#?}", def_id, itemkind)
|
||||
bug!("weird opaque type: {:?}, {:#?}", def_id, itemkind)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -24,6 +24,7 @@ rustc_middle = { path = "../rustc_middle" }
|
|||
rustc_ast_lowering = { path = "../rustc_ast_lowering" }
|
||||
rustc_ast_passes = { path = "../rustc_ast_passes" }
|
||||
rustc_incremental = { path = "../rustc_incremental" }
|
||||
rustc_index = { path = "../rustc_index" }
|
||||
rustc_traits = { path = "../rustc_traits" }
|
||||
rustc_data_structures = { path = "../rustc_data_structures" }
|
||||
rustc_codegen_ssa = { path = "../rustc_codegen_ssa" }
|
||||
|
|
|
@ -8,11 +8,12 @@ use rustc_ast::{self as ast, visit};
|
|||
use rustc_borrowck as mir_borrowck;
|
||||
use rustc_codegen_ssa::traits::CodegenBackend;
|
||||
use rustc_data_structures::parallel;
|
||||
use rustc_data_structures::steal::Steal;
|
||||
use rustc_data_structures::sync::{Lrc, OnceCell, WorkerLocal};
|
||||
use rustc_errors::{ErrorGuaranteed, PResult};
|
||||
use rustc_errors::PResult;
|
||||
use rustc_expand::base::{ExtCtxt, LintStoreExpand, ResolverExpand};
|
||||
use rustc_hir::def_id::{StableCrateId, LOCAL_CRATE};
|
||||
use rustc_lint::{BufferedEarlyLint, EarlyCheckNode, LintStore};
|
||||
use rustc_lint::{unerased_lint_store, BufferedEarlyLint, EarlyCheckNode, LintStore};
|
||||
use rustc_metadata::creader::CStore;
|
||||
use rustc_middle::arena::Arena;
|
||||
use rustc_middle::dep_graph::DepGraph;
|
||||
|
@ -171,14 +172,12 @@ impl LintStoreExpand for LintStoreExpandImpl<'_> {
|
|||
/// syntax expansion, secondary `cfg` expansion, synthesis of a test
|
||||
/// harness if one is to be provided, injection of a dependency on the
|
||||
/// standard library and prelude, and name resolution.
|
||||
pub fn configure_and_expand(
|
||||
sess: &Session,
|
||||
lint_store: &LintStore,
|
||||
mut krate: ast::Crate,
|
||||
crate_name: Symbol,
|
||||
resolver: &mut Resolver<'_, '_>,
|
||||
) -> Result<ast::Crate> {
|
||||
trace!("configure_and_expand");
|
||||
#[instrument(level = "trace", skip(krate, resolver))]
|
||||
fn configure_and_expand(mut krate: ast::Crate, resolver: &mut Resolver<'_, '_>) -> ast::Crate {
|
||||
let tcx = resolver.tcx();
|
||||
let sess = tcx.sess;
|
||||
let lint_store = unerased_lint_store(tcx);
|
||||
let crate_name = tcx.crate_name(LOCAL_CRATE);
|
||||
pre_expansion_lint(sess, lint_store, resolver.registered_tools(), &krate, crate_name);
|
||||
rustc_builtin_macros::register_builtin_macros(resolver);
|
||||
|
||||
|
@ -249,20 +248,19 @@ pub fn configure_and_expand(
|
|||
ecx.check_unused_macros();
|
||||
});
|
||||
|
||||
let recursion_limit_hit = ecx.reduced_recursion_limit.is_some();
|
||||
// If we hit a recursion limit, exit early to avoid later passes getting overwhelmed
|
||||
// with a large AST
|
||||
if ecx.reduced_recursion_limit.is_some() {
|
||||
sess.abort_if_errors();
|
||||
unreachable!();
|
||||
}
|
||||
|
||||
if cfg!(windows) {
|
||||
env::set_var("PATH", &old_path);
|
||||
}
|
||||
|
||||
if recursion_limit_hit {
|
||||
// If we hit a recursion limit, exit early to avoid later passes getting overwhelmed
|
||||
// with a large AST
|
||||
Err(ErrorGuaranteed::unchecked_claim_error_was_emitted())
|
||||
} else {
|
||||
Ok(krate)
|
||||
}
|
||||
})?;
|
||||
krate
|
||||
});
|
||||
|
||||
sess.time("maybe_building_test_harness", || {
|
||||
rustc_builtin_macros::test_harness::inject(sess, resolver, &mut krate)
|
||||
|
@ -365,7 +363,7 @@ pub fn configure_and_expand(
|
|||
)
|
||||
});
|
||||
|
||||
Ok(krate)
|
||||
krate
|
||||
}
|
||||
|
||||
// Returns all the paths that correspond to generated files.
|
||||
|
@ -564,6 +562,28 @@ fn write_out_deps(
|
|||
}
|
||||
}
|
||||
|
||||
fn resolver_for_lowering<'tcx>(
|
||||
tcx: TyCtxt<'tcx>,
|
||||
(): (),
|
||||
) -> &'tcx Steal<(ty::ResolverAstLowering, Lrc<ast::Crate>)> {
|
||||
let arenas = Resolver::arenas();
|
||||
let krate = tcx.crate_for_resolver(()).steal();
|
||||
let mut resolver = Resolver::new(tcx, &krate, &arenas);
|
||||
let krate = configure_and_expand(krate, &mut resolver);
|
||||
|
||||
// Make sure we don't mutate the cstore from here on.
|
||||
tcx.untracked().cstore.leak();
|
||||
|
||||
let ty::ResolverOutputs {
|
||||
global_ctxt: untracked_resolutions,
|
||||
ast_lowering: untracked_resolver_for_lowering,
|
||||
} = resolver.into_outputs();
|
||||
|
||||
let feed = tcx.feed_unit_query();
|
||||
feed.resolutions(tcx.arena.alloc(untracked_resolutions));
|
||||
tcx.arena.alloc(Steal::new((untracked_resolver_for_lowering, Lrc::new(krate))))
|
||||
}
|
||||
|
||||
fn output_filenames(tcx: TyCtxt<'_>, (): ()) -> Arc<OutputFilenames> {
|
||||
let sess = tcx.sess;
|
||||
let _timer = sess.timer("prepare_outputs");
|
||||
|
@ -597,7 +617,7 @@ fn output_filenames(tcx: TyCtxt<'_>, (): ()) -> Arc<OutputFilenames> {
|
|||
}
|
||||
}
|
||||
|
||||
write_out_deps(sess, tcx.cstore_untracked(), &outputs, &output_paths);
|
||||
write_out_deps(sess, &*tcx.cstore_untracked(), &outputs, &output_paths);
|
||||
|
||||
let only_dep_info = sess.opts.output_types.contains_key(&OutputType::DepInfo)
|
||||
&& sess.opts.output_types.len() == 1;
|
||||
|
@ -618,6 +638,7 @@ pub static DEFAULT_QUERY_PROVIDERS: LazyLock<Providers> = LazyLock::new(|| {
|
|||
providers.analysis = analysis;
|
||||
providers.hir_crate = rustc_ast_lowering::lower_to_hir;
|
||||
providers.output_filenames = output_filenames;
|
||||
providers.resolver_for_lowering = resolver_for_lowering;
|
||||
proc_macro_decls::provide(providers);
|
||||
rustc_const_eval::provide(providers);
|
||||
rustc_middle::hir::provide(providers);
|
||||
|
|
|
@ -7,16 +7,19 @@ use rustc_codegen_ssa::traits::CodegenBackend;
|
|||
use rustc_codegen_ssa::CodegenResults;
|
||||
use rustc_data_structures::steal::Steal;
|
||||
use rustc_data_structures::svh::Svh;
|
||||
use rustc_data_structures::sync::{Lrc, OnceCell, WorkerLocal};
|
||||
use rustc_hir::def_id::LOCAL_CRATE;
|
||||
use rustc_data_structures::sync::{Lrc, OnceCell, RwLock, WorkerLocal};
|
||||
use rustc_hir::def_id::{CRATE_DEF_ID, LOCAL_CRATE};
|
||||
use rustc_hir::definitions::Definitions;
|
||||
use rustc_incremental::DepGraphFuture;
|
||||
use rustc_index::vec::IndexVec;
|
||||
use rustc_lint::LintStore;
|
||||
use rustc_metadata::creader::CStore;
|
||||
use rustc_middle::arena::Arena;
|
||||
use rustc_middle::dep_graph::DepGraph;
|
||||
use rustc_middle::ty::{self, GlobalCtxt, TyCtxt};
|
||||
use rustc_middle::ty::{GlobalCtxt, TyCtxt};
|
||||
use rustc_query_impl::Queries as TcxQueries;
|
||||
use rustc_resolve::Resolver;
|
||||
use rustc_session::config::{self, OutputFilenames, OutputType};
|
||||
use rustc_session::cstore::Untracked;
|
||||
use rustc_session::{output::find_crate_name, Session};
|
||||
use rustc_span::symbol::sym;
|
||||
use rustc_span::Symbol;
|
||||
|
@ -187,35 +190,18 @@ impl<'tcx> Queries<'tcx> {
|
|||
self.gcx.compute(|| {
|
||||
let crate_name = *self.crate_name()?.borrow();
|
||||
let (krate, lint_store) = self.register_plugins()?.steal();
|
||||
let (krate, resolver_outputs) = {
|
||||
let _timer = self.session().timer("configure_and_expand");
|
||||
let sess = self.session();
|
||||
|
||||
let arenas = Resolver::arenas();
|
||||
let mut resolver = Resolver::new(
|
||||
sess,
|
||||
&krate,
|
||||
crate_name,
|
||||
self.codegen_backend().metadata_loader(),
|
||||
&arenas,
|
||||
);
|
||||
let krate = passes::configure_and_expand(
|
||||
sess,
|
||||
&lint_store,
|
||||
krate,
|
||||
crate_name,
|
||||
&mut resolver,
|
||||
)?;
|
||||
(Lrc::new(krate), resolver.into_outputs())
|
||||
};
|
||||
let sess = self.session();
|
||||
|
||||
let ty::ResolverOutputs {
|
||||
untracked,
|
||||
global_ctxt: untracked_resolutions,
|
||||
ast_lowering: untracked_resolver_for_lowering,
|
||||
} = resolver_outputs;
|
||||
let cstore = RwLock::new(Box::new(CStore::new(sess)) as _);
|
||||
let definitions = RwLock::new(Definitions::new(sess.local_stable_crate_id()));
|
||||
let mut source_span = IndexVec::default();
|
||||
let _id = source_span.push(krate.spans.inner_span);
|
||||
debug_assert_eq!(_id, CRATE_DEF_ID);
|
||||
let source_span = RwLock::new(source_span);
|
||||
let untracked = Untracked { cstore, source_span, definitions };
|
||||
|
||||
let gcx = passes::create_global_ctxt(
|
||||
let qcx = passes::create_global_ctxt(
|
||||
self.compiler,
|
||||
lint_store,
|
||||
self.dep_graph()?.steal(),
|
||||
|
@ -226,17 +212,18 @@ impl<'tcx> Queries<'tcx> {
|
|||
&self.hir_arena,
|
||||
);
|
||||
|
||||
gcx.enter(|tcx| {
|
||||
let feed = tcx.feed_unit_query();
|
||||
feed.resolver_for_lowering(
|
||||
tcx.arena.alloc(Steal::new((untracked_resolver_for_lowering, krate))),
|
||||
);
|
||||
feed.resolutions(tcx.arena.alloc(untracked_resolutions));
|
||||
feed.features_query(tcx.sess.features_untracked());
|
||||
qcx.enter(|tcx| {
|
||||
let feed = tcx.feed_local_crate();
|
||||
feed.crate_name(crate_name);
|
||||
|
||||
let feed = tcx.feed_unit_query();
|
||||
feed.crate_for_resolver(tcx.arena.alloc(Steal::new(krate)));
|
||||
feed.metadata_loader(
|
||||
tcx.arena.alloc(Steal::new(self.codegen_backend().metadata_loader())),
|
||||
);
|
||||
feed.features_query(tcx.sess.features_untracked());
|
||||
});
|
||||
Ok(gcx)
|
||||
Ok(qcx)
|
||||
})
|
||||
}
|
||||
|
||||
|
|
|
@ -14,8 +14,8 @@ use rustc_session::filesearch::sysroot_candidates;
|
|||
use rustc_session::lint::{self, BuiltinLintDiagnostics, LintBuffer};
|
||||
use rustc_session::parse::CrateConfig;
|
||||
use rustc_session::{early_error, filesearch, output, Session};
|
||||
use rustc_span::edit_distance::find_best_match_for_name;
|
||||
use rustc_span::edition::Edition;
|
||||
use rustc_span::lev_distance::find_best_match_for_name;
|
||||
use rustc_span::source_map::FileLoader;
|
||||
use rustc_span::symbol::{sym, Symbol};
|
||||
use session::CompilerIO;
|
||||
|
|
|
@ -39,7 +39,7 @@ use rustc_middle::ty::{self, print::Printer, subst::GenericArg, RegisteredTools,
|
|||
use rustc_session::lint::{BuiltinLintDiagnostics, LintExpectationId};
|
||||
use rustc_session::lint::{FutureIncompatibleInfo, Level, Lint, LintBuffer, LintId};
|
||||
use rustc_session::Session;
|
||||
use rustc_span::lev_distance::find_best_match_for_name;
|
||||
use rustc_span::edit_distance::find_best_match_for_name;
|
||||
use rustc_span::symbol::{sym, Ident, Symbol};
|
||||
use rustc_span::{BytePos, Span};
|
||||
use rustc_target::abi;
|
||||
|
|
|
@ -8,15 +8,15 @@ use rustc_ast::expand::allocator::AllocatorKind;
|
|||
use rustc_ast::{self as ast, *};
|
||||
use rustc_data_structures::fx::{FxHashMap, FxHashSet};
|
||||
use rustc_data_structures::svh::Svh;
|
||||
use rustc_data_structures::sync::ReadGuard;
|
||||
use rustc_data_structures::sync::MappedReadGuard;
|
||||
use rustc_expand::base::SyntaxExtension;
|
||||
use rustc_hir::def_id::{CrateNum, LocalDefId, StableCrateId, LOCAL_CRATE};
|
||||
use rustc_hir::definitions::Definitions;
|
||||
use rustc_index::vec::IndexVec;
|
||||
use rustc_middle::ty::TyCtxt;
|
||||
use rustc_session::config::{self, CrateType, ExternLocation};
|
||||
use rustc_session::cstore::ExternCrateSource;
|
||||
use rustc_session::cstore::{CrateDepKind, CrateSource, ExternCrate};
|
||||
use rustc_session::cstore::{ExternCrateSource, MetadataLoaderDyn};
|
||||
use rustc_session::lint;
|
||||
use rustc_session::output::validate_crate_name;
|
||||
use rustc_session::search_paths::PathKind;
|
||||
|
@ -60,17 +60,22 @@ impl std::fmt::Debug for CStore {
|
|||
}
|
||||
}
|
||||
|
||||
pub struct CrateLoader<'a> {
|
||||
pub struct CrateLoader<'a, 'tcx: 'a> {
|
||||
// Immutable configuration.
|
||||
sess: &'a Session,
|
||||
metadata_loader: &'a MetadataLoaderDyn,
|
||||
definitions: ReadGuard<'a, Definitions>,
|
||||
local_crate_name: Symbol,
|
||||
tcx: TyCtxt<'tcx>,
|
||||
// Mutable output.
|
||||
cstore: &'a mut CStore,
|
||||
used_extern_options: &'a mut FxHashSet<Symbol>,
|
||||
}
|
||||
|
||||
impl<'a, 'tcx> std::ops::Deref for CrateLoader<'a, 'tcx> {
|
||||
type Target = TyCtxt<'tcx>;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.tcx
|
||||
}
|
||||
}
|
||||
|
||||
pub enum LoadedMacro {
|
||||
MacroDef(ast::Item, Edition),
|
||||
ProcMacro(SyntaxExtension),
|
||||
|
@ -127,11 +132,10 @@ impl<'a> std::fmt::Debug for CrateDump<'a> {
|
|||
}
|
||||
|
||||
impl CStore {
|
||||
pub fn from_tcx(tcx: TyCtxt<'_>) -> &CStore {
|
||||
tcx.cstore_untracked()
|
||||
.as_any()
|
||||
.downcast_ref::<CStore>()
|
||||
.expect("`tcx.cstore` is not a `CStore`")
|
||||
pub fn from_tcx(tcx: TyCtxt<'_>) -> MappedReadGuard<'_, CStore> {
|
||||
MappedReadGuard::map(tcx.cstore_untracked(), |c| {
|
||||
c.as_any().downcast_ref::<CStore>().expect("`tcx.cstore` is not a `CStore`")
|
||||
})
|
||||
}
|
||||
|
||||
fn alloc_new_crate_num(&mut self) -> CrateNum {
|
||||
|
@ -256,23 +260,13 @@ impl CStore {
|
|||
}
|
||||
}
|
||||
|
||||
impl<'a> CrateLoader<'a> {
|
||||
impl<'a, 'tcx> CrateLoader<'a, 'tcx> {
|
||||
pub fn new(
|
||||
sess: &'a Session,
|
||||
metadata_loader: &'a MetadataLoaderDyn,
|
||||
local_crate_name: Symbol,
|
||||
tcx: TyCtxt<'tcx>,
|
||||
cstore: &'a mut CStore,
|
||||
definitions: ReadGuard<'a, Definitions>,
|
||||
used_extern_options: &'a mut FxHashSet<Symbol>,
|
||||
) -> Self {
|
||||
CrateLoader {
|
||||
sess,
|
||||
metadata_loader,
|
||||
local_crate_name,
|
||||
cstore,
|
||||
used_extern_options,
|
||||
definitions,
|
||||
}
|
||||
CrateLoader { tcx, cstore, used_extern_options }
|
||||
}
|
||||
pub fn cstore(&self) -> &CStore {
|
||||
&self.cstore
|
||||
|
@ -563,9 +557,10 @@ impl<'a> CrateLoader<'a> {
|
|||
(LoadResult::Previous(cnum), None)
|
||||
} else {
|
||||
info!("falling back to a load");
|
||||
let metadata_loader = self.tcx.metadata_loader(()).borrow();
|
||||
let mut locator = CrateLocator::new(
|
||||
self.sess,
|
||||
&*self.metadata_loader,
|
||||
&**metadata_loader,
|
||||
name,
|
||||
hash,
|
||||
extra_filename,
|
||||
|
@ -970,7 +965,7 @@ impl<'a> CrateLoader<'a> {
|
|||
&format!(
|
||||
"external crate `{}` unused in `{}`: remove the dependency or add `use {} as _;`",
|
||||
name,
|
||||
self.local_crate_name,
|
||||
self.tcx.crate_name(LOCAL_CRATE),
|
||||
name),
|
||||
);
|
||||
}
|
||||
|
@ -990,6 +985,7 @@ impl<'a> CrateLoader<'a> {
|
|||
&mut self,
|
||||
item: &ast::Item,
|
||||
def_id: LocalDefId,
|
||||
definitions: &Definitions,
|
||||
) -> Option<CrateNum> {
|
||||
match item.kind {
|
||||
ast::ItemKind::ExternCrate(orig_name) => {
|
||||
|
@ -1012,7 +1008,7 @@ impl<'a> CrateLoader<'a> {
|
|||
|
||||
let cnum = self.resolve_crate(name, item.span, dep_kind)?;
|
||||
|
||||
let path_len = self.definitions.def_path(def_id).data.len();
|
||||
let path_len = definitions.def_path(def_id).data.len();
|
||||
self.update_extern_crate(
|
||||
cnum,
|
||||
ExternCrate {
|
||||
|
|
|
@ -130,7 +130,13 @@ macro_rules! provide_one {
|
|||
$tcx.ensure().crate_hash($def_id.krate);
|
||||
}
|
||||
|
||||
let $cdata = CStore::from_tcx($tcx).get_crate_data($def_id.krate);
|
||||
let cdata = rustc_data_structures::sync::MappedReadGuard::map(CStore::from_tcx($tcx), |c| {
|
||||
c.get_crate_data($def_id.krate).cdata
|
||||
});
|
||||
let $cdata = crate::creader::CrateMetadataRef {
|
||||
cdata: &cdata,
|
||||
cstore: &CStore::from_tcx($tcx),
|
||||
};
|
||||
|
||||
$compute
|
||||
}
|
||||
|
|
|
@ -35,6 +35,8 @@ macro_rules! arena_types {
|
|||
rustc_data_structures::sync::Lrc<rustc_ast::Crate>,
|
||||
)>,
|
||||
[] output_filenames: std::sync::Arc<rustc_session::config::OutputFilenames>,
|
||||
[] metadata_loader: rustc_data_structures::steal::Steal<Box<rustc_session::cstore::MetadataLoaderDyn>>,
|
||||
[] crate_for_resolver: rustc_data_structures::steal::Steal<rustc_ast::ast::Crate>,
|
||||
[] resolutions: rustc_middle::ty::ResolverGlobalCtxt,
|
||||
[decode] unsafety_check_result: rustc_middle::mir::UnsafetyCheckResult,
|
||||
[decode] code_region: rustc_middle::mir::coverage::CodeRegion,
|
||||
|
|
|
@ -194,11 +194,6 @@ impl EffectiveVisibilities {
|
|||
}
|
||||
}
|
||||
|
||||
pub trait IntoDefIdTree {
|
||||
type Tree: DefIdTree;
|
||||
fn tree(self) -> Self::Tree;
|
||||
}
|
||||
|
||||
impl<Id: Eq + Hash> EffectiveVisibilities<Id> {
|
||||
pub fn iter(&self) -> impl Iterator<Item = (&Id, &EffectiveVisibility)> {
|
||||
self.map.iter()
|
||||
|
@ -217,25 +212,21 @@ impl<Id: Eq + Hash> EffectiveVisibilities<Id> {
|
|||
self.map.entry(id).or_insert_with(|| EffectiveVisibility::from_vis(lazy_private_vis()))
|
||||
}
|
||||
|
||||
pub fn update<T: IntoDefIdTree>(
|
||||
pub fn update(
|
||||
&mut self,
|
||||
id: Id,
|
||||
nominal_vis: Visibility,
|
||||
lazy_private_vis: impl FnOnce(T) -> (Visibility, T),
|
||||
lazy_private_vis: impl FnOnce() -> Visibility,
|
||||
inherited_effective_vis: EffectiveVisibility,
|
||||
level: Level,
|
||||
mut into_tree: T,
|
||||
tree: impl DefIdTree,
|
||||
) -> bool {
|
||||
let mut changed = false;
|
||||
let mut current_effective_vis = match self.map.get(&id).copied() {
|
||||
Some(eff_vis) => eff_vis,
|
||||
None => {
|
||||
let private_vis;
|
||||
(private_vis, into_tree) = lazy_private_vis(into_tree);
|
||||
EffectiveVisibility::from_vis(private_vis)
|
||||
}
|
||||
};
|
||||
let tree = into_tree.tree();
|
||||
let mut current_effective_vis = self
|
||||
.map
|
||||
.get(&id)
|
||||
.copied()
|
||||
.unwrap_or_else(|| EffectiveVisibility::from_vis(lazy_private_vis()));
|
||||
|
||||
let mut inherited_effective_vis_at_prev_level = *inherited_effective_vis.at_level(level);
|
||||
let mut calculated_effective_vis = inherited_effective_vis_at_prev_level;
|
||||
|
|
|
@ -33,7 +33,7 @@ rustc_queries! {
|
|||
}
|
||||
|
||||
query resolver_for_lowering(_: ()) -> &'tcx Steal<(ty::ResolverAstLowering, Lrc<ast::Crate>)> {
|
||||
feedable
|
||||
eval_always
|
||||
no_hash
|
||||
desc { "getting the resolver for lowering" }
|
||||
}
|
||||
|
@ -2077,6 +2077,18 @@ rustc_queries! {
|
|||
desc { "looking up enabled feature gates" }
|
||||
}
|
||||
|
||||
query metadata_loader((): ()) -> &'tcx Steal<Box<rustc_session::cstore::MetadataLoaderDyn>> {
|
||||
feedable
|
||||
no_hash
|
||||
desc { "raw operations for metadata file access" }
|
||||
}
|
||||
|
||||
query crate_for_resolver((): ()) -> &'tcx Steal<rustc_ast::ast::Crate> {
|
||||
feedable
|
||||
no_hash
|
||||
desc { "the ast before macro expansion and name resolution" }
|
||||
}
|
||||
|
||||
/// Attempt to resolve the given `DefId` to an `Instance`, for the
|
||||
/// given generics args (`SubstsRef`), returning one of:
|
||||
/// * `Ok(Some(instance))` on success
|
||||
|
|
|
@ -36,7 +36,7 @@ use rustc_data_structures::profiling::SelfProfilerRef;
|
|||
use rustc_data_structures::sharded::{IntoPointer, ShardedHashMap};
|
||||
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
|
||||
use rustc_data_structures::steal::Steal;
|
||||
use rustc_data_structures::sync::{self, Lock, Lrc, ReadGuard, WorkerLocal};
|
||||
use rustc_data_structures::sync::{self, Lock, Lrc, MappedReadGuard, ReadGuard, WorkerLocal};
|
||||
use rustc_errors::{
|
||||
DecorateLint, DiagnosticBuilder, DiagnosticMessage, ErrorGuaranteed, MultiSpan,
|
||||
};
|
||||
|
@ -836,7 +836,7 @@ impl<'tcx> TyCtxt<'tcx> {
|
|||
if let Some(id) = id.as_local() {
|
||||
self.definitions_untracked().def_key(id)
|
||||
} else {
|
||||
self.untracked.cstore.def_key(id)
|
||||
self.cstore_untracked().def_key(id)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -850,7 +850,7 @@ impl<'tcx> TyCtxt<'tcx> {
|
|||
if let Some(id) = id.as_local() {
|
||||
self.definitions_untracked().def_path(id)
|
||||
} else {
|
||||
self.untracked.cstore.def_path(id)
|
||||
self.cstore_untracked().def_path(id)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -860,7 +860,7 @@ impl<'tcx> TyCtxt<'tcx> {
|
|||
if let Some(def_id) = def_id.as_local() {
|
||||
self.definitions_untracked().def_path_hash(def_id)
|
||||
} else {
|
||||
self.untracked.cstore.def_path_hash(def_id)
|
||||
self.cstore_untracked().def_path_hash(def_id)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -869,7 +869,7 @@ impl<'tcx> TyCtxt<'tcx> {
|
|||
if crate_num == LOCAL_CRATE {
|
||||
self.sess.local_stable_crate_id()
|
||||
} else {
|
||||
self.untracked.cstore.stable_crate_id(crate_num)
|
||||
self.cstore_untracked().stable_crate_id(crate_num)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -880,7 +880,7 @@ impl<'tcx> TyCtxt<'tcx> {
|
|||
if stable_crate_id == self.sess.local_stable_crate_id() {
|
||||
LOCAL_CRATE
|
||||
} else {
|
||||
self.untracked.cstore.stable_crate_id_to_crate_num(stable_crate_id)
|
||||
self.cstore_untracked().stable_crate_id_to_crate_num(stable_crate_id)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -899,7 +899,7 @@ impl<'tcx> TyCtxt<'tcx> {
|
|||
} else {
|
||||
// If this is a DefPathHash from an upstream crate, let the CrateStore map
|
||||
// it to a DefId.
|
||||
let cstore = &*self.untracked.cstore;
|
||||
let cstore = &*self.cstore_untracked();
|
||||
let cnum = cstore.stable_crate_id_to_crate_num(stable_crate_id);
|
||||
cstore.def_path_hash_to_def_id(cnum, hash)
|
||||
}
|
||||
|
@ -913,7 +913,7 @@ impl<'tcx> TyCtxt<'tcx> {
|
|||
let (crate_name, stable_crate_id) = if def_id.is_local() {
|
||||
(self.crate_name(LOCAL_CRATE), self.sess.local_stable_crate_id())
|
||||
} else {
|
||||
let cstore = &*self.untracked.cstore;
|
||||
let cstore = &*self.cstore_untracked();
|
||||
(cstore.crate_name(def_id.krate), cstore.stable_crate_id(def_id.krate))
|
||||
};
|
||||
|
||||
|
@ -1011,10 +1011,14 @@ impl<'tcx> TyCtxt<'tcx> {
|
|||
|
||||
/// Note that this is *untracked* and should only be used within the query
|
||||
/// system if the result is otherwise tracked through queries
|
||||
pub fn cstore_untracked(self) -> &'tcx CrateStoreDyn {
|
||||
&*self.untracked.cstore
|
||||
pub fn cstore_untracked(self) -> MappedReadGuard<'tcx, CrateStoreDyn> {
|
||||
ReadGuard::map(self.untracked.cstore.read(), |c| &**c)
|
||||
}
|
||||
|
||||
/// Give out access to the untracked data without any sanity checks.
|
||||
pub fn untracked(self) -> &'tcx Untracked {
|
||||
&self.untracked
|
||||
}
|
||||
/// Note that this is *untracked* and should only be used within the query
|
||||
/// system if the result is otherwise tracked through queries
|
||||
#[inline]
|
||||
|
@ -1026,7 +1030,7 @@ impl<'tcx> TyCtxt<'tcx> {
|
|||
/// system if the result is otherwise tracked through queries
|
||||
#[inline]
|
||||
pub fn source_span_untracked(self, def_id: LocalDefId) -> Span {
|
||||
self.untracked.source_span.get(def_id).copied().unwrap_or(DUMMY_SP)
|
||||
self.untracked.source_span.read().get(def_id).copied().unwrap_or(DUMMY_SP)
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
|
@ -2518,5 +2522,5 @@ pub fn provide(providers: &mut ty::query::Providers) {
|
|||
tcx.lang_items().panic_impl().map_or(false, |did| did.is_local())
|
||||
};
|
||||
providers.source_span =
|
||||
|tcx, def_id| tcx.untracked.source_span.get(def_id).copied().unwrap_or(DUMMY_SP);
|
||||
|tcx, def_id| tcx.untracked.source_span.read().get(def_id).copied().unwrap_or(DUMMY_SP);
|
||||
}
|
||||
|
|
|
@ -43,7 +43,6 @@ use rustc_index::vec::IndexVec;
|
|||
use rustc_macros::HashStable;
|
||||
use rustc_query_system::ich::StableHashingContext;
|
||||
use rustc_serialize::{Decodable, Encodable};
|
||||
use rustc_session::cstore::Untracked;
|
||||
use rustc_span::hygiene::MacroKind;
|
||||
use rustc_span::symbol::{kw, sym, Ident, Symbol};
|
||||
use rustc_span::{ExpnId, ExpnKind, Span};
|
||||
|
@ -157,7 +156,6 @@ pub type RegisteredTools = FxHashSet<Ident>;
|
|||
pub struct ResolverOutputs {
|
||||
pub global_ctxt: ResolverGlobalCtxt,
|
||||
pub ast_lowering: ResolverAstLowering,
|
||||
pub untracked: Untracked,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
|
|
|
@ -19,8 +19,8 @@ use rustc_errors::{
|
|||
struct_span_err, Applicability, DiagnosticBuilder, ErrorGuaranteed, IntoDiagnostic, PResult,
|
||||
StashKey,
|
||||
};
|
||||
use rustc_span::edit_distance::edit_distance;
|
||||
use rustc_span::edition::Edition;
|
||||
use rustc_span::lev_distance::lev_distance;
|
||||
use rustc_span::source_map::{self, Span};
|
||||
use rustc_span::symbol::{kw, sym, Ident, Symbol};
|
||||
use rustc_span::DUMMY_SP;
|
||||
|
@ -459,7 +459,8 @@ impl<'a> Parser<'a> {
|
|||
// Maybe the user misspelled `macro_rules` (issue #91227)
|
||||
if self.token.is_ident()
|
||||
&& path.segments.len() == 1
|
||||
&& lev_distance("macro_rules", &path.segments[0].ident.to_string(), 3).is_some()
|
||||
&& edit_distance("macro_rules", &path.segments[0].ident.to_string(), 2)
|
||||
.is_some()
|
||||
{
|
||||
err.span_suggestion(
|
||||
path.span,
|
||||
|
|
|
@ -90,7 +90,7 @@ impl<'a> StableHashingContext<'a> {
|
|||
if let Some(def_id) = def_id.as_local() {
|
||||
self.local_def_path_hash(def_id)
|
||||
} else {
|
||||
self.untracked.cstore.def_path_hash(def_id)
|
||||
self.untracked.cstore.read().def_path_hash(def_id)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -146,7 +146,7 @@ impl<'a> rustc_span::HashStableContext for StableHashingContext<'a> {
|
|||
|
||||
#[inline]
|
||||
fn def_span(&self, def_id: LocalDefId) -> Span {
|
||||
*self.untracked.source_span.get(def_id).unwrap_or(&DUMMY_SP)
|
||||
*self.untracked.source_span.read().get(def_id).unwrap_or(&DUMMY_SP)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
|
|
|
@ -130,11 +130,13 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
|||
def_key.disambiguated_data.data.get_opt_name().expect("module without name")
|
||||
};
|
||||
|
||||
let expn_id = self.cstore().module_expansion_untracked(def_id, &self.tcx.sess);
|
||||
let span = self.cstore().get_span_untracked(def_id, &self.tcx.sess);
|
||||
Some(self.new_module(
|
||||
parent,
|
||||
ModuleKind::Def(def_kind, def_id, name),
|
||||
self.cstore().module_expansion_untracked(def_id, &self.session),
|
||||
self.cstore().get_span_untracked(def_id, &self.session),
|
||||
expn_id,
|
||||
span,
|
||||
// FIXME: Account for `#[no_implicit_prelude]` attributes.
|
||||
parent.map_or(false, |module| module.no_implicit_prelude),
|
||||
))
|
||||
|
@ -179,7 +181,8 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
|||
return macro_data.clone();
|
||||
}
|
||||
|
||||
let (ext, macro_rules) = match self.cstore().load_macro_untracked(def_id, &self.session) {
|
||||
let load_macro_untracked = self.cstore().load_macro_untracked(def_id, &self.tcx.sess);
|
||||
let (ext, macro_rules) = match load_macro_untracked {
|
||||
LoadedMacro::MacroDef(item, edition) => (
|
||||
Lrc::new(self.compile_macro(&item, edition).0),
|
||||
matches!(item.kind, ItemKind::MacroDef(def) if def.macro_rules),
|
||||
|
@ -204,9 +207,9 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
|||
}
|
||||
|
||||
pub(crate) fn build_reduced_graph_external(&mut self, module: Module<'a>) {
|
||||
for child in
|
||||
Vec::from_iter(self.cstore().module_children_untracked(module.def_id(), self.session))
|
||||
{
|
||||
let children =
|
||||
Vec::from_iter(self.cstore().module_children_untracked(module.def_id(), self.tcx.sess));
|
||||
for child in children {
|
||||
let parent_scope = ParentScope::module(module, self);
|
||||
BuildReducedGraphVisitor { r: self, parent_scope }
|
||||
.build_reduced_graph_for_external_crate_res(child);
|
||||
|
@ -346,7 +349,7 @@ impl<'a, 'b, 'tcx> BuildReducedGraphVisitor<'a, 'b, 'tcx> {
|
|||
|
||||
fn insert_field_names_extern(&mut self, def_id: DefId) {
|
||||
let field_names =
|
||||
self.r.cstore().struct_field_names_untracked(def_id, self.r.session).collect();
|
||||
self.r.cstore().struct_field_names_untracked(def_id, self.r.tcx.sess).collect();
|
||||
self.r.field_names.insert(def_id, field_names);
|
||||
}
|
||||
|
||||
|
@ -539,14 +542,15 @@ impl<'a, 'b, 'tcx> BuildReducedGraphVisitor<'a, 'b, 'tcx> {
|
|||
}
|
||||
|
||||
self.r
|
||||
.session
|
||||
.tcx
|
||||
.sess
|
||||
.struct_span_err(item.span, "`$crate` may not be imported")
|
||||
.emit();
|
||||
}
|
||||
}
|
||||
|
||||
if ident.name == kw::Crate {
|
||||
self.r.session.span_err(
|
||||
self.r.tcx.sess.span_err(
|
||||
ident.span,
|
||||
"crate root imports need to be explicitly named: \
|
||||
`use crate as name;`",
|
||||
|
@ -575,7 +579,7 @@ impl<'a, 'b, 'tcx> BuildReducedGraphVisitor<'a, 'b, 'tcx> {
|
|||
}
|
||||
ast::UseTreeKind::Glob => {
|
||||
let kind = ImportKind::Glob {
|
||||
is_prelude: self.r.session.contains_name(&item.attrs, sym::prelude_import),
|
||||
is_prelude: self.r.tcx.sess.contains_name(&item.attrs, sym::prelude_import),
|
||||
max_vis: Cell::new(None),
|
||||
id,
|
||||
};
|
||||
|
@ -690,7 +694,7 @@ impl<'a, 'b, 'tcx> BuildReducedGraphVisitor<'a, 'b, 'tcx> {
|
|||
expansion.to_expn_id(),
|
||||
item.span,
|
||||
parent.no_implicit_prelude
|
||||
|| self.r.session.contains_name(&item.attrs, sym::no_implicit_prelude),
|
||||
|| self.r.tcx.sess.contains_name(&item.attrs, sym::no_implicit_prelude),
|
||||
);
|
||||
self.r.define(parent, ident, TypeNS, (module, vis, sp, expansion));
|
||||
|
||||
|
@ -755,7 +759,7 @@ impl<'a, 'b, 'tcx> BuildReducedGraphVisitor<'a, 'b, 'tcx> {
|
|||
// If the structure is marked as non_exhaustive then lower the visibility
|
||||
// to within the crate.
|
||||
let mut ctor_vis = if vis.is_public()
|
||||
&& self.r.session.contains_name(&item.attrs, sym::non_exhaustive)
|
||||
&& self.r.tcx.sess.contains_name(&item.attrs, sym::non_exhaustive)
|
||||
{
|
||||
ty::Visibility::Restricted(CRATE_DEF_ID)
|
||||
} else {
|
||||
|
@ -837,7 +841,8 @@ impl<'a, 'b, 'tcx> BuildReducedGraphVisitor<'a, 'b, 'tcx> {
|
|||
|
||||
let (used, module, binding) = if orig_name.is_none() && ident.name == kw::SelfLower {
|
||||
self.r
|
||||
.session
|
||||
.tcx
|
||||
.sess
|
||||
.struct_span_err(item.span, "`extern crate self;` requires renaming")
|
||||
.span_suggestion(
|
||||
item.span,
|
||||
|
@ -850,7 +855,10 @@ impl<'a, 'b, 'tcx> BuildReducedGraphVisitor<'a, 'b, 'tcx> {
|
|||
} else if orig_name == Some(kw::SelfLower) {
|
||||
Some(self.r.graph_root)
|
||||
} else {
|
||||
let crate_id = self.r.crate_loader().process_extern_crate(item, local_def_id);
|
||||
let tcx = self.r.tcx;
|
||||
let crate_id = self.r.crate_loader(|c| {
|
||||
c.process_extern_crate(item, local_def_id, &tcx.definitions_untracked())
|
||||
});
|
||||
crate_id.map(|crate_id| {
|
||||
self.r.extern_crate_map.insert(local_def_id, crate_id);
|
||||
self.r.expect_module(crate_id.as_def_id())
|
||||
|
@ -887,7 +895,7 @@ impl<'a, 'b, 'tcx> BuildReducedGraphVisitor<'a, 'b, 'tcx> {
|
|||
{
|
||||
let msg = "macro-expanded `extern crate` items cannot \
|
||||
shadow names passed with `--extern`";
|
||||
self.r.session.span_err(item.span, msg);
|
||||
self.r.tcx.sess.span_err(item.span, msg);
|
||||
}
|
||||
}
|
||||
let entry = self.r.extern_prelude.entry(ident.normalize_to_macros_2_0()).or_insert(
|
||||
|
@ -998,23 +1006,26 @@ impl<'a, 'b, 'tcx> BuildReducedGraphVisitor<'a, 'b, 'tcx> {
|
|||
| Res::Err => bug!("unexpected resolution: {:?}", res),
|
||||
}
|
||||
// Record some extra data for better diagnostics.
|
||||
let cstore = self.r.cstore();
|
||||
match res {
|
||||
Res::Def(DefKind::Struct, def_id) => {
|
||||
let cstore = self.r.cstore();
|
||||
if let Some((ctor_kind, ctor_def_id)) = cstore.ctor_untracked(def_id) {
|
||||
let ctor_res = Res::Def(DefKind::Ctor(CtorOf::Struct, ctor_kind), ctor_def_id);
|
||||
let ctor_vis = cstore.visibility_untracked(ctor_def_id);
|
||||
let field_visibilities =
|
||||
cstore.struct_field_visibilities_untracked(def_id).collect();
|
||||
drop(cstore);
|
||||
self.r
|
||||
.struct_constructors
|
||||
.insert(def_id, (ctor_res, ctor_vis, field_visibilities));
|
||||
} else {
|
||||
drop(cstore);
|
||||
}
|
||||
self.insert_field_names_extern(def_id)
|
||||
}
|
||||
Res::Def(DefKind::Union, def_id) => self.insert_field_names_extern(def_id),
|
||||
Res::Def(DefKind::AssocFn, def_id) => {
|
||||
if cstore.fn_has_self_parameter_untracked(def_id, self.r.session) {
|
||||
if self.r.cstore().fn_has_self_parameter_untracked(def_id, self.r.tcx.sess) {
|
||||
self.r.has_self.insert(def_id);
|
||||
}
|
||||
}
|
||||
|
@ -1033,7 +1044,7 @@ impl<'a, 'b, 'tcx> BuildReducedGraphVisitor<'a, 'b, 'tcx> {
|
|||
let msg = format!("`{}` is already in scope", name);
|
||||
let note =
|
||||
"macro-expanded `#[macro_use]`s may not shadow existing macros (see RFC 1560)";
|
||||
self.r.session.struct_span_err(span, &msg).note(note).emit();
|
||||
self.r.tcx.sess.struct_span_err(span, &msg).note(note).emit();
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1045,7 +1056,7 @@ impl<'a, 'b, 'tcx> BuildReducedGraphVisitor<'a, 'b, 'tcx> {
|
|||
if attr.has_name(sym::macro_use) {
|
||||
if self.parent_scope.module.parent.is_some() {
|
||||
struct_span_err!(
|
||||
self.r.session,
|
||||
self.r.tcx.sess,
|
||||
item.span,
|
||||
E0468,
|
||||
"an `extern crate` loading macros must be at the crate root"
|
||||
|
@ -1055,7 +1066,8 @@ impl<'a, 'b, 'tcx> BuildReducedGraphVisitor<'a, 'b, 'tcx> {
|
|||
if let ItemKind::ExternCrate(Some(orig_name)) = item.kind {
|
||||
if orig_name == kw::SelfLower {
|
||||
self.r
|
||||
.session
|
||||
.tcx
|
||||
.sess
|
||||
.struct_span_err(
|
||||
attr.span,
|
||||
"`#[macro_use]` is not supported on `extern crate self`",
|
||||
|
@ -1064,7 +1076,7 @@ impl<'a, 'b, 'tcx> BuildReducedGraphVisitor<'a, 'b, 'tcx> {
|
|||
}
|
||||
}
|
||||
let ill_formed = |span| {
|
||||
struct_span_err!(self.r.session, span, E0466, "bad macro import").emit();
|
||||
struct_span_err!(self.r.tcx.sess, span, E0466, "bad macro import").emit();
|
||||
};
|
||||
match attr.meta() {
|
||||
Some(meta) => match meta.kind {
|
||||
|
@ -1135,8 +1147,13 @@ impl<'a, 'b, 'tcx> BuildReducedGraphVisitor<'a, 'b, 'tcx> {
|
|||
allow_shadowing,
|
||||
);
|
||||
} else {
|
||||
struct_span_err!(self.r.session, ident.span, E0469, "imported macro not found")
|
||||
.emit();
|
||||
struct_span_err!(
|
||||
self.r.tcx.sess,
|
||||
ident.span,
|
||||
E0469,
|
||||
"imported macro not found"
|
||||
)
|
||||
.emit();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1148,7 +1165,7 @@ impl<'a, 'b, 'tcx> BuildReducedGraphVisitor<'a, 'b, 'tcx> {
|
|||
for attr in attrs {
|
||||
if attr.has_name(sym::macro_escape) {
|
||||
let msg = "`#[macro_escape]` is a deprecated synonym for `#[macro_use]`";
|
||||
let mut err = self.r.session.struct_span_warn(attr.span, msg);
|
||||
let mut err = self.r.tcx.sess.struct_span_warn(attr.span, msg);
|
||||
if let ast::AttrStyle::Inner = attr.style {
|
||||
err.help("try an outer attribute: `#[macro_use]`").emit();
|
||||
} else {
|
||||
|
@ -1159,7 +1176,10 @@ impl<'a, 'b, 'tcx> BuildReducedGraphVisitor<'a, 'b, 'tcx> {
|
|||
}
|
||||
|
||||
if !attr.is_word() {
|
||||
self.r.session.span_err(attr.span, "arguments to `macro_use` are not allowed here");
|
||||
self.r
|
||||
.tcx
|
||||
.sess
|
||||
.span_err(attr.span, "arguments to `macro_use` are not allowed here");
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
@ -1183,11 +1203,11 @@ impl<'a, 'b, 'tcx> BuildReducedGraphVisitor<'a, 'b, 'tcx> {
|
|||
}
|
||||
|
||||
fn proc_macro_stub(&self, item: &ast::Item) -> Option<(MacroKind, Ident, Span)> {
|
||||
if self.r.session.contains_name(&item.attrs, sym::proc_macro) {
|
||||
if self.r.tcx.sess.contains_name(&item.attrs, sym::proc_macro) {
|
||||
return Some((MacroKind::Bang, item.ident, item.span));
|
||||
} else if self.r.session.contains_name(&item.attrs, sym::proc_macro_attribute) {
|
||||
} else if self.r.tcx.sess.contains_name(&item.attrs, sym::proc_macro_attribute) {
|
||||
return Some((MacroKind::Attr, item.ident, item.span));
|
||||
} else if let Some(attr) = self.r.session.find_by_name(&item.attrs, sym::proc_macro_derive)
|
||||
} else if let Some(attr) = self.r.tcx.sess.find_by_name(&item.attrs, sym::proc_macro_derive)
|
||||
{
|
||||
if let Some(nested_meta) = attr.meta_item_list().and_then(|list| list.get(0).cloned()) {
|
||||
if let Some(ident) = nested_meta.ident() {
|
||||
|
@ -1222,7 +1242,7 @@ impl<'a, 'b, 'tcx> BuildReducedGraphVisitor<'a, 'b, 'tcx> {
|
|||
let def_id = self.r.local_def_id(item.id);
|
||||
let (ext, ident, span, macro_rules, rule_spans) = match &item.kind {
|
||||
ItemKind::MacroDef(def) => {
|
||||
let (ext, rule_spans) = self.r.compile_macro(item, self.r.session.edition());
|
||||
let (ext, rule_spans) = self.r.compile_macro(item, self.r.tcx.sess.edition());
|
||||
let ext = Lrc::new(ext);
|
||||
(ext, item.ident, item.span, def.macro_rules, rule_spans)
|
||||
}
|
||||
|
@ -1243,7 +1263,7 @@ impl<'a, 'b, 'tcx> BuildReducedGraphVisitor<'a, 'b, 'tcx> {
|
|||
if macro_rules {
|
||||
let ident = ident.normalize_to_macros_2_0();
|
||||
self.r.macro_names.insert(ident);
|
||||
let is_macro_export = self.r.session.contains_name(&item.attrs, sym::macro_export);
|
||||
let is_macro_export = self.r.tcx.sess.contains_name(&item.attrs, sym::macro_export);
|
||||
let vis = if is_macro_export {
|
||||
ty::Visibility::Public
|
||||
} else {
|
||||
|
@ -1507,7 +1527,7 @@ impl<'a, 'b, 'tcx> Visitor<'b> for BuildReducedGraphVisitor<'a, 'b, 'tcx> {
|
|||
|
||||
// If the variant is marked as non_exhaustive then lower the visibility to within the crate.
|
||||
let ctor_vis = if vis.is_public()
|
||||
&& self.r.session.contains_name(&variant.attrs, sym::non_exhaustive)
|
||||
&& self.r.tcx.sess.contains_name(&variant.attrs, sym::non_exhaustive)
|
||||
{
|
||||
ty::Visibility::Restricted(CRATE_DEF_ID)
|
||||
} else {
|
||||
|
|
|
@ -290,7 +290,7 @@ impl Resolver<'_, '_> {
|
|||
let ms = MultiSpan::from_spans(spans.clone());
|
||||
let mut span_snippets = spans
|
||||
.iter()
|
||||
.filter_map(|s| match visitor.r.session.source_map().span_to_snippet(*s) {
|
||||
.filter_map(|s| match visitor.r.tcx.sess.source_map().span_to_snippet(*s) {
|
||||
Ok(s) => Some(format!("`{}`", s)),
|
||||
_ => None,
|
||||
})
|
||||
|
@ -317,7 +317,7 @@ impl Resolver<'_, '_> {
|
|||
// If we are in the `--test` mode, suppress a help that adds the `#[cfg(test)]`
|
||||
// attribute; however, if not, suggest adding the attribute. There is no way to
|
||||
// retrieve attributes here because we do not have a `TyCtxt` yet.
|
||||
let test_module_span = if visitor.r.session.opts.test {
|
||||
let test_module_span = if visitor.r.tcx.sess.opts.test {
|
||||
None
|
||||
} else {
|
||||
let parent_module = visitor.r.get_nearest_non_block_module(
|
||||
|
|
|
@ -21,9 +21,9 @@ use rustc_session::lint::builtin::ABSOLUTE_PATHS_NOT_STARTING_WITH_CRATE;
|
|||
use rustc_session::lint::builtin::MACRO_EXPANDED_MACRO_EXPORTS_ACCESSED_BY_ABSOLUTE_PATHS;
|
||||
use rustc_session::lint::BuiltinLintDiagnostics;
|
||||
use rustc_session::Session;
|
||||
use rustc_span::edit_distance::find_best_match_for_name;
|
||||
use rustc_span::edition::Edition;
|
||||
use rustc_span::hygiene::MacroKind;
|
||||
use rustc_span::lev_distance::find_best_match_for_name;
|
||||
use rustc_span::source_map::SourceMap;
|
||||
use rustc_span::symbol::{kw, sym, Ident, Symbol};
|
||||
use rustc_span::{BytePos, Span, SyntaxContext};
|
||||
|
@ -154,8 +154,8 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
|||
|
||||
if !candidates.is_empty() {
|
||||
show_candidates(
|
||||
&self.session,
|
||||
&self.untracked.source_span,
|
||||
&self.tcx.sess,
|
||||
&self.tcx.untracked().source_span.read(),
|
||||
&mut err,
|
||||
span,
|
||||
&candidates,
|
||||
|
@ -206,7 +206,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
|||
};
|
||||
|
||||
let (name, span) =
|
||||
(ident.name, self.session.source_map().guess_head_span(new_binding.span));
|
||||
(ident.name, self.tcx.sess.source_map().guess_head_span(new_binding.span));
|
||||
|
||||
if let Some(s) = self.name_already_seen.get(&name) {
|
||||
if s == &span {
|
||||
|
@ -226,15 +226,15 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
|||
let msg = format!("the name `{}` is defined multiple times", name);
|
||||
|
||||
let mut err = match (old_binding.is_extern_crate(), new_binding.is_extern_crate()) {
|
||||
(true, true) => struct_span_err!(self.session, span, E0259, "{}", msg),
|
||||
(true, true) => struct_span_err!(self.tcx.sess, span, E0259, "{}", msg),
|
||||
(true, _) | (_, true) => match new_binding.is_import() && old_binding.is_import() {
|
||||
true => struct_span_err!(self.session, span, E0254, "{}", msg),
|
||||
false => struct_span_err!(self.session, span, E0260, "{}", msg),
|
||||
true => struct_span_err!(self.tcx.sess, span, E0254, "{}", msg),
|
||||
false => struct_span_err!(self.tcx.sess, span, E0260, "{}", msg),
|
||||
},
|
||||
_ => match (old_binding.is_import_user_facing(), new_binding.is_import_user_facing()) {
|
||||
(false, false) => struct_span_err!(self.session, span, E0428, "{}", msg),
|
||||
(true, true) => struct_span_err!(self.session, span, E0252, "{}", msg),
|
||||
_ => struct_span_err!(self.session, span, E0255, "{}", msg),
|
||||
(false, false) => struct_span_err!(self.tcx.sess, span, E0428, "{}", msg),
|
||||
(true, true) => struct_span_err!(self.tcx.sess, span, E0252, "{}", msg),
|
||||
_ => struct_span_err!(self.tcx.sess, span, E0255, "{}", msg),
|
||||
},
|
||||
};
|
||||
|
||||
|
@ -248,7 +248,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
|||
err.span_label(span, format!("`{}` re{} here", name, new_participle));
|
||||
if !old_binding.span.is_dummy() && old_binding.span != span {
|
||||
err.span_label(
|
||||
self.session.source_map().guess_head_span(old_binding.span),
|
||||
self.tcx.sess.source_map().guess_head_span(old_binding.span),
|
||||
format!("previous {} of the {} `{}` here", old_noun, old_kind, name),
|
||||
);
|
||||
}
|
||||
|
@ -352,7 +352,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
|||
if let Some(pos) =
|
||||
source.span.hi().0.checked_sub(binding_span.lo().0).map(|pos| pos as usize)
|
||||
{
|
||||
if let Ok(snippet) = self.session.source_map().span_to_snippet(binding_span) {
|
||||
if let Ok(snippet) = self.tcx.sess.source_map().span_to_snippet(binding_span) {
|
||||
if pos <= snippet.len() {
|
||||
suggestion = Some(format!(
|
||||
"{} as {}{}",
|
||||
|
@ -426,12 +426,12 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
|||
// `a` and `import.use_span` is `issue_52891::{d, e, a};`.
|
||||
|
||||
let (found_closing_brace, span) =
|
||||
find_span_of_binding_until_next_binding(self.session, binding_span, import.use_span);
|
||||
find_span_of_binding_until_next_binding(self.tcx.sess, binding_span, import.use_span);
|
||||
|
||||
// If there was a closing brace then identify the span to remove any trailing commas from
|
||||
// previous imports.
|
||||
if found_closing_brace {
|
||||
if let Some(span) = extend_span_to_previous_binding(self.session, span) {
|
||||
if let Some(span) = extend_span_to_previous_binding(self.tcx.sess, span) {
|
||||
err.tool_only_span_suggestion(span, message, "", Applicability::MaybeIncorrect);
|
||||
} else {
|
||||
// Remove the entire line if we cannot extend the span back, this indicates an
|
||||
|
@ -462,7 +462,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
|||
|
||||
let first_name = match path.get(0) {
|
||||
// In the 2018 edition this lint is a hard error, so nothing to do
|
||||
Some(seg) if seg.ident.span.is_rust_2015() && self.session.is_rust_2015() => {
|
||||
Some(seg) if seg.ident.span.is_rust_2015() && self.tcx.sess.is_rust_2015() => {
|
||||
seg.ident.name
|
||||
}
|
||||
_ => return,
|
||||
|
@ -541,14 +541,14 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
|||
match resolution_error {
|
||||
ResolutionError::GenericParamsFromOuterFunction(outer_res, has_generic_params) => {
|
||||
let mut err = struct_span_err!(
|
||||
self.session,
|
||||
self.tcx.sess,
|
||||
span,
|
||||
E0401,
|
||||
"can't use generic parameters from outer function",
|
||||
);
|
||||
err.span_label(span, "use of generic parameter from outer function");
|
||||
|
||||
let sm = self.session.source_map();
|
||||
let sm = self.tcx.sess.source_map();
|
||||
let def_id = match outer_res {
|
||||
Res::SelfTyParam { .. } => {
|
||||
err.span_label(span, "can't use `Self` here");
|
||||
|
@ -605,10 +605,11 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
|||
err
|
||||
}
|
||||
ResolutionError::NameAlreadyUsedInParameterList(name, first_use_span) => self
|
||||
.session
|
||||
.tcx
|
||||
.sess
|
||||
.create_err(errs::NameAlreadyUsedInParameterList { span, first_use_span, name }),
|
||||
ResolutionError::MethodNotMemberOfTrait(method, trait_, candidate) => {
|
||||
self.session.create_err(errs::MethodNotMemberOfTrait {
|
||||
self.tcx.sess.create_err(errs::MethodNotMemberOfTrait {
|
||||
span,
|
||||
method,
|
||||
trait_,
|
||||
|
@ -619,7 +620,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
|||
})
|
||||
}
|
||||
ResolutionError::TypeNotMemberOfTrait(type_, trait_, candidate) => {
|
||||
self.session.create_err(errs::TypeNotMemberOfTrait {
|
||||
self.tcx.sess.create_err(errs::TypeNotMemberOfTrait {
|
||||
span,
|
||||
type_,
|
||||
trait_,
|
||||
|
@ -630,7 +631,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
|||
})
|
||||
}
|
||||
ResolutionError::ConstNotMemberOfTrait(const_, trait_, candidate) => {
|
||||
self.session.create_err(errs::ConstNotMemberOfTrait {
|
||||
self.tcx.sess.create_err(errs::ConstNotMemberOfTrait {
|
||||
span,
|
||||
const_,
|
||||
trait_,
|
||||
|
@ -648,7 +649,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
|||
|
||||
let msp = MultiSpan::from_spans(target_sp.clone());
|
||||
let mut err = struct_span_err!(
|
||||
self.session,
|
||||
self.tcx.sess,
|
||||
msp,
|
||||
E0408,
|
||||
"variable `{}` is not bound in all patterns",
|
||||
|
@ -686,8 +687,8 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
|||
err.span_help(span, &help_msg);
|
||||
}
|
||||
show_candidates(
|
||||
&self.session,
|
||||
&self.untracked.source_span,
|
||||
&self.tcx.sess,
|
||||
&self.tcx.untracked().source_span.read(),
|
||||
&mut err,
|
||||
Some(span),
|
||||
&import_suggestions,
|
||||
|
@ -701,17 +702,19 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
|||
err
|
||||
}
|
||||
ResolutionError::VariableBoundWithDifferentMode(variable_name, first_binding_span) => {
|
||||
self.session.create_err(errs::VariableBoundWithDifferentMode {
|
||||
self.tcx.sess.create_err(errs::VariableBoundWithDifferentMode {
|
||||
span,
|
||||
first_binding_span,
|
||||
variable_name,
|
||||
})
|
||||
}
|
||||
ResolutionError::IdentifierBoundMoreThanOnceInParameterList(identifier) => self
|
||||
.session
|
||||
.tcx
|
||||
.sess
|
||||
.create_err(errs::IdentifierBoundMoreThanOnceInParameterList { span, identifier }),
|
||||
ResolutionError::IdentifierBoundMoreThanOnceInSamePattern(identifier) => self
|
||||
.session
|
||||
.tcx
|
||||
.sess
|
||||
.create_err(errs::IdentifierBoundMoreThanOnceInSamePattern { span, identifier }),
|
||||
ResolutionError::UndeclaredLabel { name, suggestion } => {
|
||||
let ((sub_reachable, sub_reachable_suggestion), sub_unreachable) = match suggestion
|
||||
|
@ -737,7 +740,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
|||
// No similarly-named labels exist.
|
||||
None => ((None, None), None),
|
||||
};
|
||||
self.session.create_err(errs::UndeclaredLabel {
|
||||
self.tcx.sess.create_err(errs::UndeclaredLabel {
|
||||
span,
|
||||
name,
|
||||
sub_reachable,
|
||||
|
@ -762,21 +765,22 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
|||
};
|
||||
(Some(suggestion), Some(mpart_suggestion))
|
||||
};
|
||||
self.session.create_err(errs::SelfImportsOnlyAllowedWithin {
|
||||
self.tcx.sess.create_err(errs::SelfImportsOnlyAllowedWithin {
|
||||
span,
|
||||
suggestion,
|
||||
mpart_suggestion,
|
||||
})
|
||||
}
|
||||
ResolutionError::SelfImportCanOnlyAppearOnceInTheList => {
|
||||
self.session.create_err(errs::SelfImportCanOnlyAppearOnceInTheList { span })
|
||||
}
|
||||
ResolutionError::SelfImportOnlyInImportListWithNonEmptyPrefix => {
|
||||
self.session.create_err(errs::SelfImportOnlyInImportListWithNonEmptyPrefix { span })
|
||||
self.tcx.sess.create_err(errs::SelfImportCanOnlyAppearOnceInTheList { span })
|
||||
}
|
||||
ResolutionError::SelfImportOnlyInImportListWithNonEmptyPrefix => self
|
||||
.tcx
|
||||
.sess
|
||||
.create_err(errs::SelfImportOnlyInImportListWithNonEmptyPrefix { span }),
|
||||
ResolutionError::FailedToResolve { label, suggestion } => {
|
||||
let mut err =
|
||||
struct_span_err!(self.session, span, E0433, "failed to resolve: {}", &label);
|
||||
struct_span_err!(self.tcx.sess, span, E0433, "failed to resolve: {}", &label);
|
||||
err.span_label(span, label);
|
||||
|
||||
if let Some((suggestions, msg, applicability)) = suggestion {
|
||||
|
@ -790,7 +794,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
|||
err
|
||||
}
|
||||
ResolutionError::CannotCaptureDynamicEnvironmentInFnItem => {
|
||||
self.session.create_err(errs::CannotCaptureDynamicEnvironmentInFnItem { span })
|
||||
self.tcx.sess.create_err(errs::CannotCaptureDynamicEnvironmentInFnItem { span })
|
||||
}
|
||||
ResolutionError::AttemptToUseNonConstantValueInConstant(ident, suggestion, current) => {
|
||||
// let foo =...
|
||||
|
@ -802,12 +806,13 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
|||
// the further the two are apart, the higher the chance of the suggestion being wrong
|
||||
|
||||
let sp = self
|
||||
.session
|
||||
.tcx
|
||||
.sess
|
||||
.source_map()
|
||||
.span_extend_to_prev_str(ident.span, current, true, false);
|
||||
|
||||
let ((with, with_label), without) = match sp {
|
||||
Some(sp) if !self.session.source_map().is_multiline(sp) => {
|
||||
Some(sp) if !self.tcx.sess.source_map().is_multiline(sp) => {
|
||||
let sp = sp.with_lo(BytePos(sp.lo().0 - (current.len() as u32)));
|
||||
(
|
||||
(Some(errs::AttemptToUseNonConstantValueInConstantWithSuggestion {
|
||||
|
@ -828,7 +833,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
|||
),
|
||||
};
|
||||
|
||||
self.session.create_err(errs::AttemptToUseNonConstantValueInConstant {
|
||||
self.tcx.sess.create_err(errs::AttemptToUseNonConstantValueInConstant {
|
||||
span,
|
||||
with,
|
||||
with_label,
|
||||
|
@ -842,7 +847,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
|||
article,
|
||||
shadowed_binding,
|
||||
shadowed_binding_span,
|
||||
} => self.session.create_err(errs::BindingShadowsSomethingUnacceptable {
|
||||
} => self.tcx.sess.create_err(errs::BindingShadowsSomethingUnacceptable {
|
||||
span,
|
||||
shadowing_binding,
|
||||
shadowed_binding,
|
||||
|
@ -859,13 +864,13 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
|||
name,
|
||||
}),
|
||||
ResolutionError::ForwardDeclaredGenericParam => {
|
||||
self.session.create_err(errs::ForwardDeclaredGenericParam { span })
|
||||
self.tcx.sess.create_err(errs::ForwardDeclaredGenericParam { span })
|
||||
}
|
||||
ResolutionError::ParamInTyOfConstParam(name) => {
|
||||
self.session.create_err(errs::ParamInTyOfConstParam { span, name })
|
||||
self.tcx.sess.create_err(errs::ParamInTyOfConstParam { span, name })
|
||||
}
|
||||
ResolutionError::ParamInNonTrivialAnonConst { name, is_type } => {
|
||||
self.session.create_err(errs::ParamInNonTrivialAnonConst {
|
||||
self.tcx.sess.create_err(errs::ParamInNonTrivialAnonConst {
|
||||
span,
|
||||
name,
|
||||
sub_is_type: if is_type {
|
||||
|
@ -874,13 +879,14 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
|||
errs::ParamInNonTrivialAnonConstIsType::NotAType { name }
|
||||
},
|
||||
help: self
|
||||
.session
|
||||
.tcx
|
||||
.sess
|
||||
.is_nightly_build()
|
||||
.then_some(errs::ParamInNonTrivialAnonConstHelp),
|
||||
})
|
||||
}
|
||||
ResolutionError::SelfInGenericParamDefault => {
|
||||
self.session.create_err(errs::SelfInGenericParamDefault { span })
|
||||
self.tcx.sess.create_err(errs::SelfInGenericParamDefault { span })
|
||||
}
|
||||
ResolutionError::UnreachableLabel { name, definition_span, suggestion } => {
|
||||
let ((sub_suggestion_label, sub_suggestion), sub_unreachable_label) =
|
||||
|
@ -908,7 +914,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
|||
// No similarly-named labels exist.
|
||||
None => ((None, None), None),
|
||||
};
|
||||
self.session.create_err(errs::UnreachableLabel {
|
||||
self.tcx.sess.create_err(errs::UnreachableLabel {
|
||||
span,
|
||||
name,
|
||||
definition_span,
|
||||
|
@ -924,7 +930,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
|||
trait_item_span,
|
||||
trait_path,
|
||||
} => {
|
||||
let mut err = self.session.struct_span_err_with_code(
|
||||
let mut err = self.tcx.sess.struct_span_err_with_code(
|
||||
span,
|
||||
&format!(
|
||||
"item `{}` is an associated {}, which doesn't match its trait `{}`",
|
||||
|
@ -937,9 +943,12 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
|||
err
|
||||
}
|
||||
ResolutionError::TraitImplDuplicate { name, trait_item_span, old_span } => self
|
||||
.session
|
||||
.tcx
|
||||
.sess
|
||||
.create_err(errs::TraitImplDuplicate { span, name, trait_item_span, old_span }),
|
||||
ResolutionError::InvalidAsmSym => self.session.create_err(errs::InvalidAsmSym { span }),
|
||||
ResolutionError::InvalidAsmSym => {
|
||||
self.tcx.sess.create_err(errs::InvalidAsmSym { span })
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -949,7 +958,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
|||
) -> ErrorGuaranteed {
|
||||
match vis_resolution_error {
|
||||
VisResolutionError::Relative2018(span, path) => {
|
||||
self.session.create_err(errs::Relative2018 {
|
||||
self.tcx.sess.create_err(errs::Relative2018 {
|
||||
span,
|
||||
path_span: path.span,
|
||||
// intentionally converting to String, as the text would also be used as
|
||||
|
@ -958,18 +967,20 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
|||
})
|
||||
}
|
||||
VisResolutionError::AncestorOnly(span) => {
|
||||
self.session.create_err(errs::AncestorOnly(span))
|
||||
self.tcx.sess.create_err(errs::AncestorOnly(span))
|
||||
}
|
||||
VisResolutionError::FailedToResolve(span, label, suggestion) => {
|
||||
self.into_struct_error(span, ResolutionError::FailedToResolve { label, suggestion })
|
||||
}
|
||||
VisResolutionError::ExpectedFound(span, path_str, res) => {
|
||||
self.session.create_err(errs::ExpectedFound { span, res, path_str })
|
||||
self.tcx.sess.create_err(errs::ExpectedFound { span, res, path_str })
|
||||
}
|
||||
VisResolutionError::Indeterminate(span) => {
|
||||
self.session.create_err(errs::Indeterminate(span))
|
||||
self.tcx.sess.create_err(errs::Indeterminate(span))
|
||||
}
|
||||
VisResolutionError::ModuleOnly(span) => {
|
||||
self.tcx.sess.create_err(errs::ModuleOnly(span))
|
||||
}
|
||||
VisResolutionError::ModuleOnly(span) => self.session.create_err(errs::ModuleOnly(span)),
|
||||
}
|
||||
.emit()
|
||||
}
|
||||
|
@ -1206,7 +1217,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
|||
// a note about editions
|
||||
let note = if let Some(did) = did {
|
||||
let requires_note = !did.is_local()
|
||||
&& this.cstore().item_attrs_untracked(did, this.session).any(
|
||||
&& this.cstore().item_attrs_untracked(did, this.tcx.sess).any(
|
||||
|attr| {
|
||||
if attr.has_name(sym::rustc_diagnostic_item) {
|
||||
[sym::TryInto, sym::TryFrom, sym::FromIterator]
|
||||
|
@ -1304,7 +1315,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
|||
// otherwise cause duplicate suggestions.
|
||||
continue;
|
||||
}
|
||||
let crate_id = self.crate_loader().maybe_process_path_extern(ident.name);
|
||||
let crate_id = self.crate_loader(|c| c.maybe_process_path_extern(ident.name));
|
||||
if let Some(crate_id) = crate_id {
|
||||
let crate_root = self.expect_module(crate_id.as_def_id());
|
||||
suggestions.extend(self.lookup_import_candidates_from_module(
|
||||
|
@ -1341,8 +1352,8 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
|||
let import_suggestions =
|
||||
self.lookup_import_candidates(ident, Namespace::MacroNS, parent_scope, is_expected);
|
||||
show_candidates(
|
||||
&self.session,
|
||||
&self.untracked.source_span,
|
||||
&self.tcx.sess,
|
||||
&self.tcx.untracked().source_span.read(),
|
||||
err,
|
||||
None,
|
||||
&import_suggestions,
|
||||
|
@ -1366,7 +1377,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
|||
&& let ModuleKind::Def(DefKind::Enum, def_id, _) = parent_scope.module.kind
|
||||
&& let Some(span) = self.opt_span(def_id)
|
||||
{
|
||||
let source_map = self.session.source_map();
|
||||
let source_map = self.tcx.sess.source_map();
|
||||
let head_span = source_map.guess_head_span(span);
|
||||
if let Ok(head) = source_map.span_to_snippet(head_span) {
|
||||
err.span_suggestion(head_span, "consider adding a derive", format!("#[derive(Default)]\n{head}"), Applicability::MaybeIncorrect);
|
||||
|
@ -1443,7 +1454,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
|||
};
|
||||
let def_span = suggestion.res.opt_def_id().and_then(|def_id| match def_id.krate {
|
||||
LOCAL_CRATE => self.opt_span(def_id),
|
||||
_ => Some(self.cstore().get_span_untracked(def_id, self.session)),
|
||||
_ => Some(self.cstore().get_span_untracked(def_id, self.tcx.sess)),
|
||||
});
|
||||
if let Some(def_span) = def_span {
|
||||
if span.overlaps(def_span) {
|
||||
|
@ -1473,7 +1484,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
|||
};
|
||||
|
||||
err.span_label(
|
||||
self.session.source_map().guess_head_span(def_span),
|
||||
self.tcx.sess.source_map().guess_head_span(def_span),
|
||||
&format!(
|
||||
"{}{} `{}` defined here",
|
||||
prefix,
|
||||
|
@ -1498,7 +1509,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
|||
|
||||
fn binding_description(&self, b: &NameBinding<'_>, ident: Ident, from_prelude: bool) -> String {
|
||||
let res = b.res();
|
||||
if b.span.is_dummy() || !self.session.source_map().is_span_accessible(b.span) {
|
||||
if b.span.is_dummy() || !self.tcx.sess.source_map().is_span_accessible(b.span) {
|
||||
// These already contain the "built-in" prefix or look bad with it.
|
||||
let add_built_in =
|
||||
!matches!(b.res(), Res::NonMacroAttr(..) | Res::PrimTy(..) | Res::ToolMod);
|
||||
|
@ -1506,7 +1517,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
|||
("", " from prelude")
|
||||
} else if b.is_extern_crate()
|
||||
&& !b.is_import()
|
||||
&& self.session.opts.externs.get(ident.as_str()).is_some()
|
||||
&& self.tcx.sess.opts.externs.get(ident.as_str()).is_some()
|
||||
{
|
||||
("", " passed with `--extern`")
|
||||
} else if add_built_in {
|
||||
|
@ -1532,7 +1543,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
|||
(b1, b2, misc1, misc2, false)
|
||||
};
|
||||
|
||||
let mut err = struct_span_err!(self.session, ident.span, E0659, "`{ident}` is ambiguous");
|
||||
let mut err = struct_span_err!(self.tcx.sess, ident.span, E0659, "`{ident}` is ambiguous");
|
||||
err.span_label(ident.span, "ambiguous name");
|
||||
err.note(&format!("ambiguous because of {}", kind.descr()));
|
||||
|
||||
|
@ -1604,7 +1615,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
|||
// Print the primary message.
|
||||
let descr = get_descr(binding);
|
||||
let mut err =
|
||||
struct_span_err!(self.session, ident.span, E0603, "{} `{}` is private", descr, ident);
|
||||
struct_span_err!(self.tcx.sess, ident.span, E0603, "{} `{}` is private", descr, ident);
|
||||
err.span_label(ident.span, &format!("private {}", descr));
|
||||
if let Some(span) = ctor_fields_span {
|
||||
err.span_label(span, "a constructor is private if any of the fields is private");
|
||||
|
@ -1650,7 +1661,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
|||
which = if first { "" } else { " which" },
|
||||
dots = if next_binding.is_some() { "..." } else { "" },
|
||||
);
|
||||
let def_span = self.session.source_map().guess_head_span(binding.span);
|
||||
let def_span = self.tcx.sess.source_map().guess_head_span(binding.span);
|
||||
let mut note_span = MultiSpan::from_span(def_span);
|
||||
if !first && binding.vis.is_public() {
|
||||
note_span.push_span_label(def_span, "consider importing it directly");
|
||||
|
@ -1719,7 +1730,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
|||
Applicability::MaybeIncorrect,
|
||||
)),
|
||||
)
|
||||
} else if self.session.is_rust_2015() {
|
||||
} else if self.tcx.sess.is_rust_2015() {
|
||||
(
|
||||
format!("maybe a missing crate `{ident}`?"),
|
||||
Some((
|
||||
|
@ -1738,7 +1749,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
|||
let parent = match parent {
|
||||
// ::foo is mounted at the crate root for 2015, and is the extern
|
||||
// prelude for 2018+
|
||||
kw::PathRoot if self.session.edition() > Edition::Edition2015 => {
|
||||
kw::PathRoot if self.tcx.sess.edition() > Edition::Edition2015 => {
|
||||
"the list of imported crates".to_owned()
|
||||
}
|
||||
kw::PathRoot | kw::Crate => "the crate root".to_owned(),
|
||||
|
@ -2079,7 +2090,7 @@ impl<'a, 'b, 'tcx> ImportResolver<'a, 'b, 'tcx> {
|
|||
// ie. `use a::b::{c, d, e};`
|
||||
// ^^^
|
||||
let (found_closing_brace, binding_span) = find_span_of_binding_until_next_binding(
|
||||
self.r.session,
|
||||
self.r.tcx.sess,
|
||||
import.span,
|
||||
import.use_span,
|
||||
);
|
||||
|
@ -2098,7 +2109,7 @@ impl<'a, 'b, 'tcx> ImportResolver<'a, 'b, 'tcx> {
|
|||
// ie. `use a::b::{c, d};`
|
||||
// ^^^
|
||||
if let Some(previous_span) =
|
||||
extend_span_to_previous_binding(self.r.session, binding_span)
|
||||
extend_span_to_previous_binding(self.r.tcx.sess, binding_span)
|
||||
{
|
||||
debug!("check_for_module_export_macro: previous_span={:?}", previous_span);
|
||||
removal_span = removal_span.with_lo(previous_span.lo());
|
||||
|
@ -2116,7 +2127,7 @@ impl<'a, 'b, 'tcx> ImportResolver<'a, 'b, 'tcx> {
|
|||
// or `use a::{b, c, d}};`
|
||||
// ^^^^^^^^^^^
|
||||
let (has_nested, after_crate_name) = find_span_immediately_after_crate_name(
|
||||
self.r.session,
|
||||
self.r.tcx.sess,
|
||||
module_name,
|
||||
import.use_span,
|
||||
);
|
||||
|
@ -2125,7 +2136,7 @@ impl<'a, 'b, 'tcx> ImportResolver<'a, 'b, 'tcx> {
|
|||
has_nested, after_crate_name
|
||||
);
|
||||
|
||||
let source_map = self.r.session.source_map();
|
||||
let source_map = self.r.tcx.sess.source_map();
|
||||
|
||||
// Make sure this is actually crate-relative.
|
||||
let is_definitely_crate = import
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
use crate::{NameBinding, NameBindingKind, Resolver, ResolverTree};
|
||||
use crate::{NameBinding, NameBindingKind, Resolver};
|
||||
use rustc_ast::ast;
|
||||
use rustc_ast::visit;
|
||||
use rustc_ast::visit::Visitor;
|
||||
|
@ -7,8 +7,8 @@ use rustc_ast::EnumDef;
|
|||
use rustc_data_structures::intern::Interned;
|
||||
use rustc_hir::def_id::LocalDefId;
|
||||
use rustc_hir::def_id::CRATE_DEF_ID;
|
||||
use rustc_middle::middle::privacy::Level;
|
||||
use rustc_middle::middle::privacy::{EffectiveVisibilities, EffectiveVisibility};
|
||||
use rustc_middle::middle::privacy::{IntoDefIdTree, Level};
|
||||
use rustc_middle::ty::{DefIdTree, Visibility};
|
||||
use std::mem;
|
||||
|
||||
|
@ -67,13 +67,6 @@ impl Resolver<'_, '_> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<'a, 'b, 'tcx> IntoDefIdTree for &'b mut Resolver<'a, 'tcx> {
|
||||
type Tree = &'b Resolver<'a, 'tcx>;
|
||||
fn tree(self) -> Self::Tree {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl<'r, 'a, 'tcx> EffectiveVisibilitiesVisitor<'r, 'a, 'tcx> {
|
||||
/// Fills the `Resolver::effective_visibilities` table with public & exported items
|
||||
/// For now, this doesn't resolve macros (FIXME) and cannot resolve Impl, as we
|
||||
|
@ -107,11 +100,7 @@ impl<'r, 'a, 'tcx> EffectiveVisibilitiesVisitor<'r, 'a, 'tcx> {
|
|||
for (binding, eff_vis) in visitor.import_effective_visibilities.iter() {
|
||||
let NameBindingKind::Import { import, .. } = binding.kind else { unreachable!() };
|
||||
if let Some(node_id) = import.id() {
|
||||
r.effective_visibilities.update_eff_vis(
|
||||
r.local_def_id(node_id),
|
||||
eff_vis,
|
||||
ResolverTree(&r.untracked),
|
||||
)
|
||||
r.effective_visibilities.update_eff_vis(r.local_def_id(node_id), eff_vis, r.tcx)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -167,26 +156,28 @@ impl<'r, 'a, 'tcx> EffectiveVisibilitiesVisitor<'r, 'a, 'tcx> {
|
|||
let nominal_vis = binding.vis.expect_local();
|
||||
let private_vis = self.cheap_private_vis(parent_id);
|
||||
let inherited_eff_vis = self.effective_vis_or_private(parent_id);
|
||||
let tcx = self.r.tcx;
|
||||
self.changed |= self.import_effective_visibilities.update(
|
||||
binding,
|
||||
nominal_vis,
|
||||
|r| (private_vis.unwrap_or_else(|| r.private_vis_import(binding)), r),
|
||||
|| private_vis.unwrap_or_else(|| self.r.private_vis_import(binding)),
|
||||
inherited_eff_vis,
|
||||
parent_id.level(),
|
||||
&mut *self.r,
|
||||
tcx,
|
||||
);
|
||||
}
|
||||
|
||||
fn update_def(&mut self, def_id: LocalDefId, nominal_vis: Visibility, parent_id: ParentId<'a>) {
|
||||
let private_vis = self.cheap_private_vis(parent_id);
|
||||
let inherited_eff_vis = self.effective_vis_or_private(parent_id);
|
||||
let tcx = self.r.tcx;
|
||||
self.changed |= self.def_effective_visibilities.update(
|
||||
def_id,
|
||||
nominal_vis,
|
||||
|r| (private_vis.unwrap_or_else(|| r.private_vis_def(def_id)), r),
|
||||
|| private_vis.unwrap_or_else(|| self.r.private_vis_def(def_id)),
|
||||
inherited_eff_vis,
|
||||
parent_id.level(),
|
||||
&mut *self.r,
|
||||
tcx,
|
||||
);
|
||||
}
|
||||
|
||||
|
|
|
@ -1179,7 +1179,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
|||
}
|
||||
|
||||
ConstantItemRibKind(trivial, _) => {
|
||||
let features = self.session.features_untracked();
|
||||
let features = self.tcx.sess.features_untracked();
|
||||
// HACK(min_const_generics): We currently only allow `N` or `{ N }`.
|
||||
if !(trivial == ConstantHasGenerics::Yes
|
||||
|| features.generic_const_exprs)
|
||||
|
@ -1208,7 +1208,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
|||
is_type: true,
|
||||
},
|
||||
);
|
||||
self.session.delay_span_bug(span, CG_BUG_STR);
|
||||
self.tcx.sess.delay_span_bug(span, CG_BUG_STR);
|
||||
}
|
||||
|
||||
return Res::Err;
|
||||
|
@ -1255,7 +1255,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
|||
| ForwardGenericParamBanRibKind => continue,
|
||||
|
||||
ConstantItemRibKind(trivial, _) => {
|
||||
let features = self.session.features_untracked();
|
||||
let features = self.tcx.sess.features_untracked();
|
||||
// HACK(min_const_generics): We currently only allow `N` or `{ N }`.
|
||||
if !(trivial == ConstantHasGenerics::Yes
|
||||
|| features.generic_const_exprs)
|
||||
|
@ -1268,7 +1268,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
|||
is_type: false,
|
||||
},
|
||||
);
|
||||
self.session.delay_span_bug(span, CG_BUG_STR);
|
||||
self.tcx.sess.delay_span_bug(span, CG_BUG_STR);
|
||||
}
|
||||
|
||||
return Res::Err;
|
||||
|
@ -1397,7 +1397,9 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
|||
module = Some(ModuleOrUniformRoot::ExternPrelude);
|
||||
continue;
|
||||
}
|
||||
if name == kw::PathRoot && ident.span.is_rust_2015() && self.session.rust_2018()
|
||||
if name == kw::PathRoot
|
||||
&& ident.span.is_rust_2015()
|
||||
&& self.tcx.sess.rust_2018()
|
||||
{
|
||||
// `::a::b` from 2015 macro on 2018 global edition
|
||||
module = Some(ModuleOrUniformRoot::CrateRootAndExternPrelude);
|
||||
|
@ -1494,7 +1496,8 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
|||
record_segment_res(self, res);
|
||||
} else if res == Res::ToolMod && i + 1 != path.len() {
|
||||
if binding.is_import() {
|
||||
self.session
|
||||
self.tcx
|
||||
.sess
|
||||
.struct_span_err(
|
||||
ident.span,
|
||||
"cannot use a tool module through an import",
|
||||
|
|
|
@ -21,8 +21,8 @@ use rustc_middle::span_bug;
|
|||
use rustc_middle::ty;
|
||||
use rustc_session::lint::builtin::{PUB_USE_OF_PRIVATE_EXTERN_CRATE, UNUSED_IMPORTS};
|
||||
use rustc_session::lint::BuiltinLintDiagnostics;
|
||||
use rustc_span::edit_distance::find_best_match_for_name;
|
||||
use rustc_span::hygiene::LocalExpnId;
|
||||
use rustc_span::lev_distance::find_best_match_for_name;
|
||||
use rustc_span::symbol::{kw, Ident, Symbol};
|
||||
use rustc_span::Span;
|
||||
|
||||
|
@ -526,7 +526,7 @@ impl<'a, 'b, 'tcx> ImportResolver<'a, 'b, 'tcx> {
|
|||
.collect::<Vec<_>>();
|
||||
let msg = format!("unresolved import{} {}", pluralize!(paths.len()), paths.join(", "),);
|
||||
|
||||
let mut diag = struct_span_err!(self.r.session, span, E0432, "{}", &msg);
|
||||
let mut diag = struct_span_err!(self.r.tcx.sess, span, E0432, "{}", &msg);
|
||||
|
||||
if let Some((_, UnresolvedImportError { note: Some(note), .. })) = errors.iter().last() {
|
||||
diag.note(note);
|
||||
|
@ -548,8 +548,8 @@ impl<'a, 'b, 'tcx> ImportResolver<'a, 'b, 'tcx> {
|
|||
if let Some(candidates) = &err.candidates {
|
||||
match &import.kind {
|
||||
ImportKind::Single { nested: false, source, target, .. } => import_candidates(
|
||||
self.r.session,
|
||||
&self.r.untracked.source_span,
|
||||
self.r.tcx.sess,
|
||||
&self.r.tcx.untracked().source_span.read(),
|
||||
&mut diag,
|
||||
Some(err.span),
|
||||
&candidates,
|
||||
|
@ -561,8 +561,8 @@ impl<'a, 'b, 'tcx> ImportResolver<'a, 'b, 'tcx> {
|
|||
),
|
||||
ImportKind::Single { nested: true, source, target, .. } => {
|
||||
import_candidates(
|
||||
self.r.session,
|
||||
&self.r.untracked.source_span,
|
||||
self.r.tcx.sess,
|
||||
&self.r.tcx.untracked().source_span.read(),
|
||||
&mut diag,
|
||||
None,
|
||||
&candidates,
|
||||
|
@ -658,7 +658,7 @@ impl<'a, 'b, 'tcx> ImportResolver<'a, 'b, 'tcx> {
|
|||
source_binding @ (Ok(..) | Err(Determined)) => {
|
||||
if source_binding.is_ok() {
|
||||
let msg = format!("`{}` is not directly importable", target);
|
||||
struct_span_err!(this.session, import.span, E0253, "{}", &msg)
|
||||
struct_span_err!(this.tcx.sess, import.span, E0253, "{}", &msg)
|
||||
.span_label(import.span, "cannot be imported directly")
|
||||
.emit();
|
||||
}
|
||||
|
@ -706,7 +706,7 @@ impl<'a, 'b, 'tcx> ImportResolver<'a, 'b, 'tcx> {
|
|||
} else if self.r.privacy_errors.is_empty() {
|
||||
let msg = "cannot determine resolution for the import";
|
||||
let msg_note = "import resolution is stuck, try simplifying other imports";
|
||||
self.r.session.struct_span_err(import.span, msg).note(msg_note).emit();
|
||||
self.r.tcx.sess.struct_span_err(import.span, msg).note(msg_note).emit();
|
||||
}
|
||||
|
||||
module
|
||||
|
@ -859,7 +859,7 @@ impl<'a, 'b, 'tcx> ImportResolver<'a, 'b, 'tcx> {
|
|||
let msg = "cannot determine resolution for the import";
|
||||
let msg_note =
|
||||
"import resolution is stuck, try simplifying other imports";
|
||||
this.session.struct_span_err(import.span, msg).note(msg_note).emit();
|
||||
this.tcx.sess.struct_span_err(import.span, msg).note(msg_note).emit();
|
||||
}
|
||||
}
|
||||
Err(..) => {
|
||||
|
@ -1035,13 +1035,13 @@ impl<'a, 'b, 'tcx> ImportResolver<'a, 'b, 'tcx> {
|
|||
format!("re-export of private `{}`", ident)
|
||||
};
|
||||
|
||||
struct_span_err!(self.r.session, import.span, E0365, "{}", error_msg)
|
||||
struct_span_err!(self.r.tcx.sess, import.span, E0365, "{}", error_msg)
|
||||
.span_label(import.span, label_msg)
|
||||
.note(&format!("consider declaring type or module `{}` with `pub`", ident))
|
||||
.emit();
|
||||
} else {
|
||||
let mut err =
|
||||
struct_span_err!(self.r.session, import.span, E0364, "{error_msg}");
|
||||
struct_span_err!(self.r.tcx.sess, import.span, E0364, "{error_msg}");
|
||||
match binding.kind {
|
||||
NameBindingKind::Res(Res::Def(DefKind::Macro(_), def_id))
|
||||
// exclude decl_macro
|
||||
|
@ -1164,12 +1164,12 @@ impl<'a, 'b, 'tcx> ImportResolver<'a, 'b, 'tcx> {
|
|||
let ImportKind::Glob { id, is_prelude, .. } = import.kind else { unreachable!() };
|
||||
|
||||
let ModuleOrUniformRoot::Module(module) = import.imported_module.get().unwrap() else {
|
||||
self.r.session.span_err(import.span, "cannot glob-import all possible crates");
|
||||
self.r.tcx.sess.span_err(import.span, "cannot glob-import all possible crates");
|
||||
return;
|
||||
};
|
||||
|
||||
if module.is_trait() {
|
||||
self.r.session.span_err(import.span, "items in traits are not importable");
|
||||
self.r.tcx.sess.span_err(import.span, "items in traits are not importable");
|
||||
return;
|
||||
} else if ptr::eq(module, import.parent_scope.module) {
|
||||
return;
|
||||
|
|
|
@ -682,7 +682,7 @@ impl<'a: 'ast, 'ast, 'tcx> Visitor<'ast> for LateResolutionVisitor<'a, '_, 'ast,
|
|||
// Elided lifetime in reference: we resolve as if there was some lifetime `'_` with
|
||||
// NodeId `ty.id`.
|
||||
// This span will be used in case of elision failure.
|
||||
let span = self.r.session.source_map().start_point(ty.span);
|
||||
let span = self.r.tcx.sess.source_map().start_point(ty.span);
|
||||
self.resolve_elided_lifetime(ty.id, span);
|
||||
visit::walk_ty(self, ty);
|
||||
}
|
||||
|
@ -1571,7 +1571,7 @@ impl<'a: 'ast, 'b, 'ast, 'tcx> LateResolutionVisitor<'a, 'b, 'ast, 'tcx> {
|
|||
("`'_` cannot be used here", "`'_` is a reserved lifetime name")
|
||||
};
|
||||
let mut diag = rustc_errors::struct_span_err!(
|
||||
self.r.session,
|
||||
self.r.tcx.sess,
|
||||
lifetime.ident.span,
|
||||
E0637,
|
||||
"{}",
|
||||
|
@ -1748,7 +1748,7 @@ impl<'a: 'ast, 'b, 'ast, 'tcx> LateResolutionVisitor<'a, 'b, 'ast, 'tcx> {
|
|||
// impl Foo for std::cell::Ref<u32> // note lack of '_
|
||||
// async fn foo(_: std::cell::Ref<u32>) { ... }
|
||||
LifetimeRibKind::AnonymousCreateParameter { report_in_path: true, .. } => {
|
||||
let sess = self.r.session;
|
||||
let sess = self.r.tcx.sess;
|
||||
let mut err = rustc_errors::struct_span_err!(
|
||||
sess,
|
||||
path_span,
|
||||
|
@ -2194,7 +2194,8 @@ impl<'a: 'ast, 'b, 'ast, 'tcx> LateResolutionVisitor<'a, 'b, 'ast, 'tcx> {
|
|||
let what = if ns == TypeNS { "type parameters" } else { "local variables" };
|
||||
if this.should_report_errs() {
|
||||
this.r
|
||||
.session
|
||||
.tcx
|
||||
.sess
|
||||
.span_err(ident.span, &format!("imports cannot refer to {}", what));
|
||||
}
|
||||
};
|
||||
|
@ -2438,7 +2439,7 @@ impl<'a: 'ast, 'b, 'ast, 'tcx> LateResolutionVisitor<'a, 'b, 'ast, 'tcx> {
|
|||
if let GenericParamKind::Lifetime = param.kind
|
||||
&& let Some(&original) = seen_lifetimes.get(&ident)
|
||||
{
|
||||
diagnostics::signal_lifetime_shadowing(self.r.session, original, param.ident);
|
||||
diagnostics::signal_lifetime_shadowing(self.r.tcx.sess, original, param.ident);
|
||||
// Record lifetime res, so lowering knows there is something fishy.
|
||||
self.record_lifetime_param(param.id, LifetimeRes::Error);
|
||||
continue;
|
||||
|
@ -2462,7 +2463,7 @@ impl<'a: 'ast, 'b, 'ast, 'tcx> LateResolutionVisitor<'a, 'b, 'ast, 'tcx> {
|
|||
|
||||
if param.ident.name == kw::UnderscoreLifetime {
|
||||
rustc_errors::struct_span_err!(
|
||||
self.r.session,
|
||||
self.r.tcx.sess,
|
||||
param.ident.span,
|
||||
E0637,
|
||||
"`'_` cannot be used here"
|
||||
|
@ -2476,7 +2477,7 @@ impl<'a: 'ast, 'b, 'ast, 'tcx> LateResolutionVisitor<'a, 'b, 'ast, 'tcx> {
|
|||
|
||||
if param.ident.name == kw::StaticLifetime {
|
||||
rustc_errors::struct_span_err!(
|
||||
self.r.session,
|
||||
self.r.tcx.sess,
|
||||
param.ident.span,
|
||||
E0262,
|
||||
"invalid lifetime parameter name: `{}`",
|
||||
|
@ -2506,7 +2507,7 @@ impl<'a: 'ast, 'b, 'ast, 'tcx> LateResolutionVisitor<'a, 'b, 'ast, 'tcx> {
|
|||
let res = match kind {
|
||||
ItemRibKind(..) | AssocItemRibKind => Res::Def(def_kind, def_id.to_def_id()),
|
||||
NormalRibKind => {
|
||||
if self.r.session.features_untracked().non_lifetime_binders {
|
||||
if self.r.tcx.sess.features_untracked().non_lifetime_binders {
|
||||
Res::Def(def_kind, def_id.to_def_id())
|
||||
} else {
|
||||
Res::Err
|
||||
|
@ -3384,7 +3385,7 @@ impl<'a: 'ast, 'b, 'ast, 'tcx> LateResolutionVisitor<'a, 'b, 'ast, 'tcx> {
|
|||
Res::SelfCtor(_) => {
|
||||
// We resolve `Self` in pattern position as an ident sometimes during recovery,
|
||||
// so delay a bug instead of ICEing.
|
||||
self.r.session.delay_span_bug(
|
||||
self.r.tcx.sess.delay_span_bug(
|
||||
ident.span,
|
||||
"unexpected `SelfCtor` in pattern, expected identifier"
|
||||
);
|
||||
|
@ -3664,7 +3665,7 @@ impl<'a: 'ast, 'b, 'ast, 'tcx> LateResolutionVisitor<'a, 'b, 'ast, 'tcx> {
|
|||
#[inline]
|
||||
/// If we're actually rustdoc then avoid giving a name resolution error for `cfg()` items.
|
||||
fn should_report_errs(&self) -> bool {
|
||||
!(self.r.session.opts.actually_rustdoc && self.in_func_body)
|
||||
!(self.r.tcx.sess.opts.actually_rustdoc && self.in_func_body)
|
||||
}
|
||||
|
||||
// Resolve in alternative namespaces if resolution in the primary namespace fails.
|
||||
|
@ -3829,7 +3830,7 @@ impl<'a: 'ast, 'b, 'ast, 'tcx> LateResolutionVisitor<'a, 'b, 'ast, 'tcx> {
|
|||
}
|
||||
|
||||
if let Ok((_, orig_span)) = self.resolve_label(label.ident) {
|
||||
diagnostics::signal_label_shadowing(self.r.session, orig_span, label.ident)
|
||||
diagnostics::signal_label_shadowing(self.r.tcx.sess, orig_span, label.ident)
|
||||
}
|
||||
|
||||
self.with_label_rib(NormalRibKind, |this| {
|
||||
|
@ -4135,9 +4136,9 @@ impl<'a: 'ast, 'b, 'ast, 'tcx> LateResolutionVisitor<'a, 'b, 'ast, 'tcx> {
|
|||
fn record_candidate_traits_for_expr_if_necessary(&mut self, expr: &'ast Expr) {
|
||||
match expr.kind {
|
||||
ExprKind::Field(_, ident) => {
|
||||
// FIXME(#6890): Even though you can't treat a method like a
|
||||
// field, we need to add any trait methods we find that match
|
||||
// the field name so that we can do some nice error reporting
|
||||
// #6890: Even though you can't treat a method like a field,
|
||||
// we need to add any trait methods we find that match the
|
||||
// field name so that we can do some nice error reporting
|
||||
// later on in typeck.
|
||||
let traits = self.traits_in_scope(ident, ValueNS);
|
||||
self.r.trait_map.insert(expr.id, traits);
|
||||
|
@ -4211,8 +4212,8 @@ impl<'a: 'ast, 'b, 'ast, 'tcx> LateResolutionVisitor<'a, 'b, 'ast, 'tcx> {
|
|||
if let Some(res) = res
|
||||
&& let Some(def_id) = res.opt_def_id()
|
||||
&& !def_id.is_local()
|
||||
&& self.r.session.crate_types().contains(&CrateType::ProcMacro)
|
||||
&& matches!(self.r.session.opts.resolve_doc_links, ResolveDocLinks::ExportedMetadata) {
|
||||
&& self.r.tcx.sess.crate_types().contains(&CrateType::ProcMacro)
|
||||
&& matches!(self.r.tcx.sess.opts.resolve_doc_links, ResolveDocLinks::ExportedMetadata) {
|
||||
// Encoding foreign def ids in proc macro crate metadata will ICE.
|
||||
return None;
|
||||
}
|
||||
|
@ -4224,10 +4225,10 @@ impl<'a: 'ast, 'b, 'ast, 'tcx> LateResolutionVisitor<'a, 'b, 'ast, 'tcx> {
|
|||
}
|
||||
|
||||
fn resolve_doc_links(&mut self, attrs: &[Attribute], maybe_exported: MaybeExported<'_>) {
|
||||
match self.r.session.opts.resolve_doc_links {
|
||||
match self.r.tcx.sess.opts.resolve_doc_links {
|
||||
ResolveDocLinks::None => return,
|
||||
ResolveDocLinks::ExportedMetadata
|
||||
if !self.r.session.crate_types().iter().copied().any(CrateType::has_metadata)
|
||||
if !self.r.tcx.sess.crate_types().iter().copied().any(CrateType::has_metadata)
|
||||
|| !maybe_exported.eval(self.r) =>
|
||||
{
|
||||
return;
|
||||
|
@ -4281,9 +4282,9 @@ impl<'a: 'ast, 'b, 'ast, 'tcx> LateResolutionVisitor<'a, 'b, 'ast, 'tcx> {
|
|||
.into_iter()
|
||||
.filter_map(|tr| {
|
||||
if !tr.def_id.is_local()
|
||||
&& self.r.session.crate_types().contains(&CrateType::ProcMacro)
|
||||
&& self.r.tcx.sess.crate_types().contains(&CrateType::ProcMacro)
|
||||
&& matches!(
|
||||
self.r.session.opts.resolve_doc_links,
|
||||
self.r.tcx.sess.opts.resolve_doc_links,
|
||||
ResolveDocLinks::ExportedMetadata
|
||||
)
|
||||
{
|
||||
|
|
|
@ -25,9 +25,9 @@ use rustc_middle::ty::DefIdTree;
|
|||
use rustc_session::lint;
|
||||
use rustc_session::parse::feature_err;
|
||||
use rustc_session::Session;
|
||||
use rustc_span::edit_distance::find_best_match_for_name;
|
||||
use rustc_span::edition::Edition;
|
||||
use rustc_span::hygiene::MacroKind;
|
||||
use rustc_span::lev_distance::find_best_match_for_name;
|
||||
use rustc_span::symbol::{kw, sym, Ident, Symbol};
|
||||
use rustc_span::{BytePos, Span};
|
||||
|
||||
|
@ -170,7 +170,7 @@ impl<'a: 'ast, 'ast, 'tcx> LateResolutionVisitor<'a, '_, 'ast, 'tcx> {
|
|||
fn def_span(&self, def_id: DefId) -> Option<Span> {
|
||||
match def_id.krate {
|
||||
LOCAL_CRATE => self.r.opt_span(def_id),
|
||||
_ => Some(self.r.cstore().get_span_untracked(def_id, self.r.session)),
|
||||
_ => Some(self.r.cstore().get_span_untracked(def_id, self.r.tcx.sess)),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -200,7 +200,8 @@ impl<'a: 'ast, 'ast, 'tcx> LateResolutionVisitor<'a, '_, 'ast, 'tcx> {
|
|||
Res::Def(DefKind::Fn, _) => {
|
||||
// Verify whether this is a fn call or an Fn used as a type.
|
||||
self.r
|
||||
.session
|
||||
.tcx
|
||||
.sess
|
||||
.source_map()
|
||||
.span_to_snippet(span)
|
||||
.map(|snippet| snippet.ends_with(')'))
|
||||
|
@ -255,7 +256,7 @@ impl<'a: 'ast, 'ast, 'tcx> LateResolutionVisitor<'a, '_, 'ast, 'tcx> {
|
|||
};
|
||||
(String::new(), "this scope".to_string(), suggestion)
|
||||
} else if path.len() == 2 && path[0].ident.name == kw::PathRoot {
|
||||
if self.r.session.edition() > Edition::Edition2015 {
|
||||
if self.r.tcx.sess.edition() > Edition::Edition2015 {
|
||||
// In edition 2018 onwards, the `::foo` syntax may only pull from the extern prelude
|
||||
// which overrides all other expectations of item type
|
||||
expected = "crate";
|
||||
|
@ -323,7 +324,7 @@ impl<'a: 'ast, 'ast, 'tcx> LateResolutionVisitor<'a, '_, 'ast, 'tcx> {
|
|||
let base_error = self.make_base_error(path, span, source, res);
|
||||
let code = source.error_code(res.is_some());
|
||||
let mut err =
|
||||
self.r.session.struct_span_err_with_code(base_error.span, &base_error.msg, code);
|
||||
self.r.tcx.sess.struct_span_err_with_code(base_error.span, &base_error.msg, code);
|
||||
|
||||
self.suggest_swapping_misplaced_self_ty_and_trait(&mut err, source, res, base_error.span);
|
||||
|
||||
|
@ -432,7 +433,8 @@ impl<'a: 'ast, 'ast, 'tcx> LateResolutionVisitor<'a, '_, 'ast, 'tcx> {
|
|||
} else {
|
||||
(
|
||||
self.r
|
||||
.session
|
||||
.tcx
|
||||
.sess
|
||||
.source_map()
|
||||
.span_through_char(*fn_span, '(')
|
||||
.shrink_to_hi(),
|
||||
|
@ -505,7 +507,8 @@ impl<'a: 'ast, 'ast, 'tcx> LateResolutionVisitor<'a, '_, 'ast, 'tcx> {
|
|||
{
|
||||
if self
|
||||
.r
|
||||
.session
|
||||
.tcx
|
||||
.sess
|
||||
.parse_sess
|
||||
.type_ascription_path_suggestions
|
||||
.borrow()
|
||||
|
@ -542,7 +545,7 @@ impl<'a: 'ast, 'ast, 'tcx> LateResolutionVisitor<'a, '_, 'ast, 'tcx> {
|
|||
}
|
||||
}
|
||||
|
||||
// Try Levenshtein algorithm.
|
||||
// Try finding a suitable replacement.
|
||||
let typo_sugg =
|
||||
self.lookup_typo_candidate(path, source.namespace(), is_expected).to_opt_suggestion();
|
||||
if path.len() == 1 && self.self_type_is_available() {
|
||||
|
@ -596,7 +599,7 @@ impl<'a: 'ast, 'ast, 'tcx> LateResolutionVisitor<'a, '_, 'ast, 'tcx> {
|
|||
if let Some((call_span, args_span)) = self.call_has_self_arg(source) {
|
||||
let mut args_snippet = String::new();
|
||||
if let Some(args_span) = args_span {
|
||||
if let Ok(snippet) = self.r.session.source_map().span_to_snippet(args_span) {
|
||||
if let Ok(snippet) = self.r.tcx.sess.source_map().span_to_snippet(args_span) {
|
||||
args_snippet = snippet;
|
||||
}
|
||||
}
|
||||
|
@ -732,7 +735,7 @@ impl<'a: 'ast, 'ast, 'tcx> LateResolutionVisitor<'a, '_, 'ast, 'tcx> {
|
|||
let ident_span = path.last().map_or(span, |ident| ident.ident.span);
|
||||
let typo_sugg = self.lookup_typo_candidate(path, source.namespace(), is_expected);
|
||||
let is_in_same_file = &|sp1, sp2| {
|
||||
let source_map = self.r.session.source_map();
|
||||
let source_map = self.r.tcx.sess.source_map();
|
||||
let file1 = source_map.span_to_filename(sp1);
|
||||
let file2 = source_map.span_to_filename(sp2);
|
||||
file1 == file2
|
||||
|
@ -770,7 +773,7 @@ impl<'a: 'ast, 'ast, 'tcx> LateResolutionVisitor<'a, '_, 'ast, 'tcx> {
|
|||
_ => {}
|
||||
}
|
||||
|
||||
// If the trait has a single item (which wasn't matched by Levenshtein), suggest it
|
||||
// If the trait has a single item (which wasn't matched by the algorithm), suggest it
|
||||
let suggestion = self.get_single_associated_item(&path, &source, is_expected);
|
||||
if !self.r.add_typo_suggestion(err, suggestion, ident_span) {
|
||||
fallback = !self.let_binding_suggestion(err, ident_span);
|
||||
|
@ -895,7 +898,8 @@ impl<'a: 'ast, 'ast, 'tcx> LateResolutionVisitor<'a, '_, 'ast, 'tcx> {
|
|||
.map_or(*span, |ident| span.with_lo(ident.span.hi()));
|
||||
(
|
||||
self.r
|
||||
.session
|
||||
.tcx
|
||||
.sess
|
||||
.source_map()
|
||||
.span_through_char(span, '(')
|
||||
.shrink_to_hi(),
|
||||
|
@ -949,9 +953,9 @@ impl<'a: 'ast, 'ast, 'tcx> LateResolutionVisitor<'a, '_, 'ast, 'tcx> {
|
|||
&& let PathSource::Trait(_) = source
|
||||
&& let Some(Res::Def(DefKind::Struct | DefKind::Enum | DefKind::Union, _)) = res
|
||||
&& let Ok(self_ty_str) =
|
||||
self.r.session.source_map().span_to_snippet(self_ty.span)
|
||||
self.r.tcx.sess.source_map().span_to_snippet(self_ty.span)
|
||||
&& let Ok(trait_ref_str) =
|
||||
self.r.session.source_map().span_to_snippet(trait_ref.path.span)
|
||||
self.r.tcx.sess.source_map().span_to_snippet(trait_ref.path.span)
|
||||
{
|
||||
err.multipart_suggestion(
|
||||
"`impl` items mention the trait being implemented first and the type it is being implemented for second",
|
||||
|
@ -1095,7 +1099,8 @@ impl<'a: 'ast, 'ast, 'tcx> LateResolutionVisitor<'a, '_, 'ast, 'tcx> {
|
|||
format!(
|
||||
"{}: {}<{} = {}>",
|
||||
self.r
|
||||
.session
|
||||
.tcx
|
||||
.sess
|
||||
.source_map()
|
||||
.span_to_snippet(ty.span) // Account for `<&'a T as Foo>::Bar`.
|
||||
.unwrap_or_else(|_| constrain_ident.to_string()),
|
||||
|
@ -1164,7 +1169,7 @@ impl<'a: 'ast, 'ast, 'tcx> LateResolutionVisitor<'a, '_, 'ast, 'tcx> {
|
|||
// parser issue where a struct literal is being used on an expression
|
||||
// where a brace being opened means a block is being started. Look
|
||||
// ahead for the next text to see if `span` is followed by a `{`.
|
||||
let sm = self.r.session.source_map();
|
||||
let sm = self.r.tcx.sess.source_map();
|
||||
let sp = sm.span_look_ahead(span, None, Some(50));
|
||||
let followed_by_brace = matches!(sm.span_to_snippet(sp), Ok(ref snippet) if snippet == "{");
|
||||
// In case this could be a struct literal that needs to be surrounded
|
||||
|
@ -1212,7 +1217,7 @@ impl<'a: 'ast, 'ast, 'tcx> LateResolutionVisitor<'a, '_, 'ast, 'tcx> {
|
|||
true
|
||||
} else if kind == DefKind::Struct
|
||||
&& let Some(lhs_source_span) = lhs_span.find_ancestor_inside(expr.span)
|
||||
&& let Ok(snippet) = self.r.session.source_map().span_to_snippet(lhs_source_span)
|
||||
&& let Ok(snippet) = self.r.tcx.sess.source_map().span_to_snippet(lhs_source_span)
|
||||
{
|
||||
// The LHS is a type that originates from a macro call.
|
||||
// We have to add angle brackets around it.
|
||||
|
@ -1352,11 +1357,11 @@ impl<'a: 'ast, 'ast, 'tcx> LateResolutionVisitor<'a, '_, 'ast, 'tcx> {
|
|||
}
|
||||
(Res::Def(DefKind::TyAlias, def_id), PathSource::Trait(_)) => {
|
||||
err.span_label(span, "type aliases cannot be used as traits");
|
||||
if self.r.session.is_nightly_build() {
|
||||
if self.r.tcx.sess.is_nightly_build() {
|
||||
let msg = "you might have meant to use `#![feature(trait_alias)]` instead of a \
|
||||
`type` alias";
|
||||
if let Some(span) = self.def_span(def_id) {
|
||||
if let Ok(snip) = self.r.session.source_map().span_to_snippet(span) {
|
||||
if let Ok(snip) = self.r.tcx.sess.source_map().span_to_snippet(span) {
|
||||
// The span contains a type alias so we should be able to
|
||||
// replace `type` with `trait`.
|
||||
let snip = snip.replacen("type", "trait", 1);
|
||||
|
@ -1387,7 +1392,8 @@ impl<'a: 'ast, 'ast, 'tcx> LateResolutionVisitor<'a, '_, 'ast, 'tcx> {
|
|||
.last()
|
||||
.map(|sp| {
|
||||
self.r
|
||||
.session
|
||||
.tcx
|
||||
.sess
|
||||
.parse_sess
|
||||
.type_ascription_path_suggestions
|
||||
.borrow()
|
||||
|
@ -1694,8 +1700,7 @@ impl<'a: 'ast, 'ast, 'tcx> LateResolutionVisitor<'a, '_, 'ast, 'tcx> {
|
|||
let extern_prelude = self.r.extern_prelude.clone();
|
||||
names.extend(extern_prelude.iter().flat_map(|(ident, _)| {
|
||||
self.r
|
||||
.crate_loader()
|
||||
.maybe_process_path_extern(ident.name)
|
||||
.crate_loader(|c| c.maybe_process_path_extern(ident.name))
|
||||
.and_then(|crate_id| {
|
||||
let crate_mod =
|
||||
Res::Def(DefKind::Mod, crate_id.as_def_id());
|
||||
|
@ -1774,12 +1779,12 @@ impl<'a: 'ast, 'ast, 'tcx> LateResolutionVisitor<'a, '_, 'ast, 'tcx> {
|
|||
|
||||
/// Only used in a specific case of type ascription suggestions
|
||||
fn get_colon_suggestion_span(&self, start: Span) -> Span {
|
||||
let sm = self.r.session.source_map();
|
||||
let sm = self.r.tcx.sess.source_map();
|
||||
start.to(sm.next_point(start))
|
||||
}
|
||||
|
||||
fn type_ascription_suggestion(&self, err: &mut Diagnostic, base_span: Span) -> bool {
|
||||
let sm = self.r.session.source_map();
|
||||
let sm = self.r.tcx.sess.source_map();
|
||||
let base_snippet = sm.span_to_snippet(base_span);
|
||||
if let Some(&sp) = self.diagnostic_metadata.current_type_ascription.last() {
|
||||
if let Ok(snippet) = sm.span_to_snippet(sp) {
|
||||
|
@ -1809,7 +1814,8 @@ impl<'a: 'ast, 'ast, 'tcx> LateResolutionVisitor<'a, '_, 'ast, 'tcx> {
|
|||
show_label = false;
|
||||
if !self
|
||||
.r
|
||||
.session
|
||||
.tcx
|
||||
.sess
|
||||
.parse_sess
|
||||
.type_ascription_path_suggestions
|
||||
.borrow_mut()
|
||||
|
@ -2272,7 +2278,7 @@ impl<'a: 'ast, 'ast, 'tcx> LateResolutionVisitor<'a, '_, 'ast, 'tcx> {
|
|||
debug_assert_ne!(lifetime_ref.ident.name, kw::UnderscoreLifetime);
|
||||
let mut err = if let Some(outer) = outer_lifetime_ref {
|
||||
let mut err = struct_span_err!(
|
||||
self.r.session,
|
||||
self.r.tcx.sess,
|
||||
lifetime_ref.ident.span,
|
||||
E0401,
|
||||
"can't use generic parameters from outer item",
|
||||
|
@ -2282,7 +2288,7 @@ impl<'a: 'ast, 'ast, 'tcx> LateResolutionVisitor<'a, '_, 'ast, 'tcx> {
|
|||
err
|
||||
} else {
|
||||
let mut err = struct_span_err!(
|
||||
self.r.session,
|
||||
self.r.tcx.sess,
|
||||
lifetime_ref.ident.span,
|
||||
E0261,
|
||||
"use of undeclared lifetime name `{}`",
|
||||
|
@ -2340,8 +2346,13 @@ impl<'a: 'ast, 'ast, 'tcx> LateResolutionVisitor<'a, '_, 'ast, 'tcx> {
|
|||
);
|
||||
(span, sugg)
|
||||
} else {
|
||||
let span =
|
||||
self.r.session.source_map().span_through_char(span, '<').shrink_to_hi();
|
||||
let span = self
|
||||
.r
|
||||
.tcx
|
||||
.sess
|
||||
.source_map()
|
||||
.span_through_char(span, '<')
|
||||
.shrink_to_hi();
|
||||
let sugg = format!("{}, ", name.unwrap_or("'a"));
|
||||
(span, sugg)
|
||||
};
|
||||
|
@ -2375,7 +2386,7 @@ impl<'a: 'ast, 'ast, 'tcx> LateResolutionVisitor<'a, '_, 'ast, 'tcx> {
|
|||
|
||||
pub(crate) fn emit_non_static_lt_in_const_generic_error(&self, lifetime_ref: &ast::Lifetime) {
|
||||
struct_span_err!(
|
||||
self.r.session,
|
||||
self.r.tcx.sess,
|
||||
lifetime_ref.ident.span,
|
||||
E0771,
|
||||
"use of non-static lifetime `{}` in const generic",
|
||||
|
@ -2395,10 +2406,10 @@ impl<'a: 'ast, 'ast, 'tcx> LateResolutionVisitor<'a, '_, 'ast, 'tcx> {
|
|||
&self,
|
||||
lifetime_ref: &ast::Lifetime,
|
||||
) {
|
||||
let feature_active = self.r.session.features_untracked().generic_const_exprs;
|
||||
let feature_active = self.r.tcx.sess.features_untracked().generic_const_exprs;
|
||||
if !feature_active {
|
||||
feature_err(
|
||||
&self.r.session.parse_sess,
|
||||
&self.r.tcx.sess.parse_sess,
|
||||
sym::generic_const_exprs,
|
||||
lifetime_ref.ident.span,
|
||||
"a non-static lifetime is not allowed in a `const`",
|
||||
|
@ -2416,7 +2427,7 @@ impl<'a: 'ast, 'ast, 'tcx> LateResolutionVisitor<'a, '_, 'ast, 'tcx> {
|
|||
let spans: Vec<_> = lifetime_refs.iter().map(|lt| lt.span).collect();
|
||||
|
||||
let mut err = struct_span_err!(
|
||||
self.r.session,
|
||||
self.r.tcx.sess,
|
||||
spans,
|
||||
E0106,
|
||||
"missing lifetime specifier{}",
|
||||
|
|
|
@ -27,26 +27,25 @@ use rustc_ast::{self as ast, NodeId, CRATE_NODE_ID};
|
|||
use rustc_ast::{AngleBracketedArg, Crate, Expr, ExprKind, GenericArg, GenericArgs, LitKind, Path};
|
||||
use rustc_data_structures::fx::{FxHashMap, FxHashSet, FxIndexMap, FxIndexSet};
|
||||
use rustc_data_structures::intern::Interned;
|
||||
use rustc_data_structures::sync::{Lrc, RwLock};
|
||||
use rustc_data_structures::sync::{Lrc, MappedReadGuard};
|
||||
use rustc_errors::{Applicability, DiagnosticBuilder, ErrorGuaranteed};
|
||||
use rustc_expand::base::{DeriveResolutions, SyntaxExtension, SyntaxExtensionKind};
|
||||
use rustc_hir::def::Namespace::{self, *};
|
||||
use rustc_hir::def::{self, CtorOf, DefKind, DocLinkResMap, LifetimeRes, PartialRes, PerNS};
|
||||
use rustc_hir::def_id::{CrateNum, DefId, DefIdMap, LocalDefId};
|
||||
use rustc_hir::def_id::{CRATE_DEF_ID, LOCAL_CRATE};
|
||||
use rustc_hir::definitions::{DefPathData, Definitions};
|
||||
use rustc_hir::definitions::DefPathData;
|
||||
use rustc_hir::TraitCandidate;
|
||||
use rustc_index::vec::IndexVec;
|
||||
use rustc_metadata::creader::{CStore, CrateLoader};
|
||||
use rustc_middle::metadata::ModChild;
|
||||
use rustc_middle::middle::privacy::EffectiveVisibilities;
|
||||
use rustc_middle::span_bug;
|
||||
use rustc_middle::ty::{self, DefIdTree, MainDefinition, RegisteredTools};
|
||||
use rustc_middle::ty::{self, DefIdTree, MainDefinition, RegisteredTools, TyCtxt};
|
||||
use rustc_middle::ty::{ResolverGlobalCtxt, ResolverOutputs};
|
||||
use rustc_query_system::ich::StableHashingContext;
|
||||
use rustc_session::cstore::{CrateStore, MetadataLoaderDyn, Untracked};
|
||||
use rustc_session::cstore::CrateStore;
|
||||
use rustc_session::lint::LintBuffer;
|
||||
use rustc_session::Session;
|
||||
use rustc_span::hygiene::{ExpnId, LocalExpnId, MacroKind, SyntaxContext, Transparency};
|
||||
use rustc_span::source_map::Spanned;
|
||||
use rustc_span::symbol::{kw, sym, Ident, Symbol};
|
||||
|
@ -865,7 +864,7 @@ struct MacroData {
|
|||
///
|
||||
/// This is the visitor that walks the whole crate.
|
||||
pub struct Resolver<'a, 'tcx> {
|
||||
session: &'tcx Session,
|
||||
tcx: TyCtxt<'tcx>,
|
||||
|
||||
/// Item with a given `LocalDefId` was defined during macro expansion with ID `ExpnId`.
|
||||
expn_that_defined: FxHashMap<LocalDefId, ExpnId>,
|
||||
|
@ -956,9 +955,6 @@ pub struct Resolver<'a, 'tcx> {
|
|||
arenas: &'a ResolverArenas<'a>,
|
||||
dummy_binding: &'a NameBinding<'a>,
|
||||
|
||||
local_crate_name: Symbol,
|
||||
metadata_loader: Box<MetadataLoaderDyn>,
|
||||
untracked: Untracked,
|
||||
used_extern_options: FxHashSet<Symbol>,
|
||||
macro_names: FxHashSet<Ident>,
|
||||
builtin_macros: FxHashMap<Symbol, BuiltinMacroState>,
|
||||
|
@ -1117,27 +1113,10 @@ impl<'a, 'tcx> AsMut<Resolver<'a, 'tcx>> for Resolver<'a, 'tcx> {
|
|||
}
|
||||
}
|
||||
|
||||
/// A minimal subset of resolver that can implemenent `DefIdTree`, sometimes
|
||||
/// required to satisfy borrow checker by avoiding borrowing the whole resolver.
|
||||
#[derive(Clone, Copy)]
|
||||
struct ResolverTree<'a>(&'a Untracked);
|
||||
|
||||
impl DefIdTree for ResolverTree<'_> {
|
||||
#[inline]
|
||||
fn opt_parent(self, id: DefId) -> Option<DefId> {
|
||||
let ResolverTree(Untracked { definitions, cstore, .. }) = self;
|
||||
match id.as_local() {
|
||||
Some(id) => definitions.read().def_key(id).parent,
|
||||
None => cstore.as_any().downcast_ref::<CStore>().unwrap().def_key(id).parent,
|
||||
}
|
||||
.map(|index| DefId { index, ..id })
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, 'b, 'tcx> DefIdTree for &'a Resolver<'b, 'tcx> {
|
||||
#[inline]
|
||||
fn opt_parent(self, id: DefId) -> Option<DefId> {
|
||||
ResolverTree(&self.untracked).opt_parent(id)
|
||||
self.tcx.opt_parent(id)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1164,10 +1143,11 @@ impl<'tcx> Resolver<'_, 'tcx> {
|
|||
"adding a def'n for node-id {:?} and data {:?} but a previous def'n exists: {:?}",
|
||||
node_id,
|
||||
data,
|
||||
self.untracked.definitions.read().def_key(self.node_id_to_def_id[&node_id]),
|
||||
self.tcx.definitions_untracked().def_key(self.node_id_to_def_id[&node_id]),
|
||||
);
|
||||
|
||||
let def_id = self.untracked.definitions.write().create_def(parent, data);
|
||||
// FIXME: remove `def_span` body, pass in the right spans here and call `tcx.at().create_def()`
|
||||
let def_id = self.tcx.untracked().definitions.write().create_def(parent, data);
|
||||
|
||||
// Create the definition.
|
||||
if expn_id != ExpnId::root() {
|
||||
|
@ -1176,7 +1156,7 @@ impl<'tcx> Resolver<'_, 'tcx> {
|
|||
|
||||
// A relative span's parent must be an absolute span.
|
||||
debug_assert_eq!(span.data_untracked().parent, None);
|
||||
let _id = self.untracked.source_span.push(span);
|
||||
let _id = self.tcx.untracked().source_span.write().push(span);
|
||||
debug_assert_eq!(_id, def_id);
|
||||
|
||||
// Some things for which we allocate `LocalDefId`s don't correspond to
|
||||
|
@ -1195,17 +1175,19 @@ impl<'tcx> Resolver<'_, 'tcx> {
|
|||
if let Some(def_id) = def_id.as_local() {
|
||||
self.item_generics_num_lifetimes[&def_id]
|
||||
} else {
|
||||
self.cstore().item_generics_num_lifetimes(def_id, self.session)
|
||||
self.cstore().item_generics_num_lifetimes(def_id, self.tcx.sess)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn tcx(&self) -> TyCtxt<'tcx> {
|
||||
self.tcx
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
||||
pub fn new(
|
||||
session: &'tcx Session,
|
||||
tcx: TyCtxt<'tcx>,
|
||||
krate: &Crate,
|
||||
crate_name: Symbol,
|
||||
metadata_loader: Box<MetadataLoaderDyn>,
|
||||
arenas: &'a ResolverArenas<'a>,
|
||||
) -> Resolver<'a, 'tcx> {
|
||||
let root_def_id = CRATE_DEF_ID.to_def_id();
|
||||
|
@ -1215,7 +1197,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
|||
ModuleKind::Def(DefKind::Mod, root_def_id, kw::Empty),
|
||||
ExpnId::root(),
|
||||
krate.spans.inner_span,
|
||||
session.contains_name(&krate.attrs, sym::no_implicit_prelude),
|
||||
tcx.sess.contains_name(&krate.attrs, sym::no_implicit_prelude),
|
||||
&mut module_map,
|
||||
);
|
||||
let empty_module = arenas.new_module(
|
||||
|
@ -1227,8 +1209,6 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
|||
&mut FxHashMap::default(),
|
||||
);
|
||||
|
||||
let definitions = Definitions::new(session.local_stable_crate_id());
|
||||
|
||||
let mut visibilities = FxHashMap::default();
|
||||
visibilities.insert(CRATE_DEF_ID, ty::Visibility::Public);
|
||||
|
||||
|
@ -1240,11 +1220,8 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
|||
let mut invocation_parents = FxHashMap::default();
|
||||
invocation_parents.insert(LocalExpnId::ROOT, (CRATE_DEF_ID, ImplTraitContext::Existential));
|
||||
|
||||
let mut source_span = IndexVec::default();
|
||||
let _id = source_span.push(krate.spans.inner_span);
|
||||
debug_assert_eq!(_id, CRATE_DEF_ID);
|
||||
|
||||
let mut extern_prelude: FxHashMap<Ident, ExternPreludeEntry<'_>> = session
|
||||
let mut extern_prelude: FxHashMap<Ident, ExternPreludeEntry<'_>> = tcx
|
||||
.sess
|
||||
.opts
|
||||
.externs
|
||||
.iter()
|
||||
|
@ -1252,19 +1229,19 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
|||
.map(|(name, _)| (Ident::from_str(name), Default::default()))
|
||||
.collect();
|
||||
|
||||
if !session.contains_name(&krate.attrs, sym::no_core) {
|
||||
if !tcx.sess.contains_name(&krate.attrs, sym::no_core) {
|
||||
extern_prelude.insert(Ident::with_dummy_span(sym::core), Default::default());
|
||||
if !session.contains_name(&krate.attrs, sym::no_std) {
|
||||
if !tcx.sess.contains_name(&krate.attrs, sym::no_std) {
|
||||
extern_prelude.insert(Ident::with_dummy_span(sym::std), Default::default());
|
||||
}
|
||||
}
|
||||
|
||||
let registered_tools = macros::registered_tools(session, &krate.attrs);
|
||||
let registered_tools = macros::registered_tools(tcx.sess, &krate.attrs);
|
||||
|
||||
let features = session.features_untracked();
|
||||
let features = tcx.sess.features_untracked();
|
||||
|
||||
let mut resolver = Resolver {
|
||||
session,
|
||||
tcx,
|
||||
|
||||
expn_that_defined: Default::default(),
|
||||
|
||||
|
@ -1318,23 +1295,16 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
|||
vis: ty::Visibility::Public,
|
||||
}),
|
||||
|
||||
metadata_loader,
|
||||
local_crate_name: crate_name,
|
||||
used_extern_options: Default::default(),
|
||||
untracked: Untracked {
|
||||
cstore: Box::new(CStore::new(session)),
|
||||
source_span,
|
||||
definitions: RwLock::new(definitions),
|
||||
},
|
||||
macro_names: FxHashSet::default(),
|
||||
builtin_macros: Default::default(),
|
||||
builtin_macro_kinds: Default::default(),
|
||||
registered_tools,
|
||||
macro_use_prelude: FxHashMap::default(),
|
||||
macro_map: FxHashMap::default(),
|
||||
dummy_ext_bang: Lrc::new(SyntaxExtension::dummy_bang(session.edition())),
|
||||
dummy_ext_derive: Lrc::new(SyntaxExtension::dummy_derive(session.edition())),
|
||||
non_macro_attr: Lrc::new(SyntaxExtension::non_macro_attr(session.edition())),
|
||||
dummy_ext_bang: Lrc::new(SyntaxExtension::dummy_bang(tcx.sess.edition())),
|
||||
dummy_ext_derive: Lrc::new(SyntaxExtension::dummy_derive(tcx.sess.edition())),
|
||||
non_macro_attr: Lrc::new(SyntaxExtension::non_macro_attr(tcx.sess.edition())),
|
||||
invocation_parent_scopes: Default::default(),
|
||||
output_macro_rules_scopes: Default::default(),
|
||||
macro_rules_scopes: Default::default(),
|
||||
|
@ -1430,7 +1400,6 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
|||
let main_def = self.main_def;
|
||||
let confused_type_with_std_module = self.confused_type_with_std_module;
|
||||
let effective_visibilities = self.effective_visibilities;
|
||||
let untracked = self.untracked;
|
||||
let global_ctxt = ResolverGlobalCtxt {
|
||||
expn_that_defined,
|
||||
visibilities,
|
||||
|
@ -1469,26 +1438,21 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
|||
builtin_macro_kinds: self.builtin_macro_kinds,
|
||||
lifetime_elision_allowed: self.lifetime_elision_allowed,
|
||||
};
|
||||
ResolverOutputs { global_ctxt, ast_lowering, untracked }
|
||||
ResolverOutputs { global_ctxt, ast_lowering }
|
||||
}
|
||||
|
||||
fn create_stable_hashing_context(&self) -> StableHashingContext<'_> {
|
||||
StableHashingContext::new(self.session, &self.untracked)
|
||||
StableHashingContext::new(self.tcx.sess, self.tcx.untracked())
|
||||
}
|
||||
|
||||
fn crate_loader(&mut self) -> CrateLoader<'_> {
|
||||
CrateLoader::new(
|
||||
&self.session,
|
||||
&*self.metadata_loader,
|
||||
self.local_crate_name,
|
||||
&mut *self.untracked.cstore.untracked_as_any().downcast_mut().unwrap(),
|
||||
self.untracked.definitions.read(),
|
||||
&mut self.used_extern_options,
|
||||
)
|
||||
fn crate_loader<T>(&mut self, f: impl FnOnce(&mut CrateLoader<'_, '_>) -> T) -> T {
|
||||
let mut cstore = self.tcx.untracked().cstore.write();
|
||||
let cstore = cstore.untracked_as_any().downcast_mut().unwrap();
|
||||
f(&mut CrateLoader::new(self.tcx, &mut *cstore, &mut self.used_extern_options))
|
||||
}
|
||||
|
||||
fn cstore(&self) -> &CStore {
|
||||
self.untracked.cstore.as_any().downcast_ref().unwrap()
|
||||
fn cstore(&self) -> MappedReadGuard<'_, CStore> {
|
||||
CStore::from_tcx(self.tcx)
|
||||
}
|
||||
|
||||
fn dummy_ext(&self, macro_kind: MacroKind) -> Lrc<SyntaxExtension> {
|
||||
|
@ -1521,18 +1485,25 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
|||
|
||||
/// Entry point to crate resolution.
|
||||
pub fn resolve_crate(&mut self, krate: &Crate) {
|
||||
self.session.time("resolve_crate", || {
|
||||
self.session.time("finalize_imports", || ImportResolver { r: self }.finalize_imports());
|
||||
self.session.time("compute_effective_visibilities", || {
|
||||
self.tcx.sess.time("resolve_crate", || {
|
||||
self.tcx
|
||||
.sess
|
||||
.time("finalize_imports", || ImportResolver { r: self }.finalize_imports());
|
||||
self.tcx.sess.time("compute_effective_visibilities", || {
|
||||
EffectiveVisibilitiesVisitor::compute_effective_visibilities(self, krate)
|
||||
});
|
||||
self.session.time("finalize_macro_resolutions", || self.finalize_macro_resolutions());
|
||||
self.session.time("late_resolve_crate", || self.late_resolve_crate(krate));
|
||||
self.session.time("resolve_main", || self.resolve_main());
|
||||
self.session.time("resolve_check_unused", || self.check_unused(krate));
|
||||
self.session.time("resolve_report_errors", || self.report_errors(krate));
|
||||
self.session.time("resolve_postprocess", || self.crate_loader().postprocess(krate));
|
||||
self.tcx.sess.time("finalize_macro_resolutions", || self.finalize_macro_resolutions());
|
||||
self.tcx.sess.time("late_resolve_crate", || self.late_resolve_crate(krate));
|
||||
self.tcx.sess.time("resolve_main", || self.resolve_main());
|
||||
self.tcx.sess.time("resolve_check_unused", || self.check_unused(krate));
|
||||
self.tcx.sess.time("resolve_report_errors", || self.report_errors(krate));
|
||||
self.tcx
|
||||
.sess
|
||||
.time("resolve_postprocess", || self.crate_loader(|c| c.postprocess(krate)));
|
||||
});
|
||||
|
||||
// Make sure we don't mutate the cstore from here on.
|
||||
self.tcx.untracked().cstore.leak();
|
||||
}
|
||||
|
||||
fn traits_in_scope(
|
||||
|
@ -1871,10 +1842,10 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
|||
} else {
|
||||
let crate_id = if finalize {
|
||||
let Some(crate_id) =
|
||||
self.crate_loader().process_path_extern(ident.name, ident.span) else { return Some(self.dummy_binding); };
|
||||
self.crate_loader(|c| c.process_path_extern(ident.name, ident.span)) else { return Some(self.dummy_binding); };
|
||||
crate_id
|
||||
} else {
|
||||
self.crate_loader().maybe_process_path_extern(ident.name)?
|
||||
self.crate_loader(|c| c.maybe_process_path_extern(ident.name))?
|
||||
};
|
||||
let crate_root = self.expect_module(crate_id.as_def_id());
|
||||
let vis = ty::Visibility::<LocalDefId>::Public;
|
||||
|
@ -1922,14 +1893,14 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
|||
/// Retrieves the span of the given `DefId` if `DefId` is in the local crate.
|
||||
#[inline]
|
||||
fn opt_span(&self, def_id: DefId) -> Option<Span> {
|
||||
def_id.as_local().map(|def_id| self.untracked.source_span[def_id])
|
||||
def_id.as_local().map(|def_id| self.tcx.source_span(def_id))
|
||||
}
|
||||
|
||||
/// Retrieves the name of the given `DefId`.
|
||||
#[inline]
|
||||
fn opt_name(&self, def_id: DefId) -> Option<Symbol> {
|
||||
let def_key = match def_id.as_local() {
|
||||
Some(def_id) => self.untracked.definitions.read().def_key(def_id),
|
||||
Some(def_id) => self.tcx.definitions_untracked().def_key(def_id),
|
||||
None => self.cstore().def_key(def_id),
|
||||
};
|
||||
def_key.get_opt_name()
|
||||
|
@ -1961,7 +1932,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
|||
|
||||
let attr = self
|
||||
.cstore()
|
||||
.item_attrs_untracked(def_id, self.session)
|
||||
.item_attrs_untracked(def_id, self.tcx.sess)
|
||||
.find(|a| a.has_name(sym::rustc_legacy_const_generics))?;
|
||||
let mut ret = Vec::new();
|
||||
for meta in attr.meta_item_list()? {
|
||||
|
|
|
@ -195,7 +195,8 @@ impl<'a, 'tcx> ResolverExpand for Resolver<'a, 'tcx> {
|
|||
|
||||
fn register_builtin_macro(&mut self, name: Symbol, ext: SyntaxExtensionKind) {
|
||||
if self.builtin_macros.insert(name, BuiltinMacroState::NotYetSeen(ext)).is_some() {
|
||||
self.session
|
||||
self.tcx
|
||||
.sess
|
||||
.diagnostic()
|
||||
.bug(&format!("built-in macro `{}` was already registered", name));
|
||||
}
|
||||
|
@ -216,7 +217,7 @@ impl<'a, 'tcx> ResolverExpand for Resolver<'a, 'tcx> {
|
|||
ExpnData::allow_unstable(
|
||||
ExpnKind::AstPass(pass),
|
||||
call_site,
|
||||
self.session.edition(),
|
||||
self.tcx.sess.edition(),
|
||||
features.into(),
|
||||
None,
|
||||
parent_module,
|
||||
|
@ -430,7 +431,7 @@ impl<'a, 'tcx> ResolverExpand for Resolver<'a, 'tcx> {
|
|||
PathResult::NonModule(..) |
|
||||
// HACK(Urgau): This shouldn't be necessary
|
||||
PathResult::Failed { is_error_from_last_segment: false, .. } => {
|
||||
self.session
|
||||
self.tcx.sess
|
||||
.struct_span_err(span, "not sure whether the path is accessible or not")
|
||||
.note("the type may have associated items, but we are currently not checking them")
|
||||
.emit();
|
||||
|
@ -455,7 +456,7 @@ impl<'a, 'tcx> ResolverExpand for Resolver<'a, 'tcx> {
|
|||
}
|
||||
|
||||
fn get_proc_macro_quoted_span(&self, krate: CrateNum, id: usize) -> Span {
|
||||
self.cstore().get_proc_macro_quoted_span_untracked(krate, id, self.session)
|
||||
self.cstore().get_proc_macro_quoted_span_untracked(krate, id, self.tcx.sess)
|
||||
}
|
||||
|
||||
fn declare_proc_macro(&mut self, id: NodeId) {
|
||||
|
@ -493,10 +494,10 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
|||
// Report errors for the resolved macro.
|
||||
for segment in &path.segments {
|
||||
if let Some(args) = &segment.args {
|
||||
self.session.span_err(args.span(), "generic arguments in macro path");
|
||||
self.tcx.sess.span_err(args.span(), "generic arguments in macro path");
|
||||
}
|
||||
if kind == MacroKind::Attr && segment.ident.as_str().starts_with("rustc") {
|
||||
self.session.span_err(
|
||||
self.tcx.sess.span_err(
|
||||
segment.ident.span,
|
||||
"attributes starting with `rustc` are reserved for use by the `rustc` compiler",
|
||||
);
|
||||
|
@ -508,7 +509,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
|||
if let Some(def_id) = def_id.as_local() {
|
||||
self.unused_macros.remove(&def_id);
|
||||
if self.proc_macro_stubs.contains(&def_id) {
|
||||
self.session.span_err(
|
||||
self.tcx.sess.span_err(
|
||||
path.span,
|
||||
"can't use a procedural macro from the same crate that defines it",
|
||||
);
|
||||
|
@ -540,7 +541,8 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
|||
if let Some((article, expected)) = unexpected_res {
|
||||
let path_str = pprust::path_to_string(path);
|
||||
let msg = format!("expected {}, found {} `{}`", expected, res.descr(), path_str);
|
||||
self.session
|
||||
self.tcx
|
||||
.sess
|
||||
.struct_span_err(path.span, &msg)
|
||||
.span_label(path.span, format!("not {} {}", article, expected))
|
||||
.emit();
|
||||
|
@ -550,7 +552,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
|||
// We are trying to avoid reporting this error if other related errors were reported.
|
||||
if res != Res::Err
|
||||
&& inner_attr
|
||||
&& !self.session.features_untracked().custom_inner_attributes
|
||||
&& !self.tcx.sess.features_untracked().custom_inner_attributes
|
||||
{
|
||||
let msg = match res {
|
||||
Res::Def(..) => "inner macro attributes are unstable",
|
||||
|
@ -558,10 +560,15 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
|||
_ => unreachable!(),
|
||||
};
|
||||
if soft_custom_inner_attributes_gate {
|
||||
self.session.parse_sess.buffer_lint(SOFT_UNSTABLE, path.span, node_id, msg);
|
||||
self.tcx.sess.parse_sess.buffer_lint(SOFT_UNSTABLE, path.span, node_id, msg);
|
||||
} else {
|
||||
feature_err(&self.session.parse_sess, sym::custom_inner_attributes, path.span, msg)
|
||||
.emit();
|
||||
feature_err(
|
||||
&self.tcx.sess.parse_sess,
|
||||
sym::custom_inner_attributes,
|
||||
path.span,
|
||||
msg,
|
||||
)
|
||||
.emit();
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -655,7 +662,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
|||
// Make sure compilation does not succeed if preferred macro resolution
|
||||
// has changed after the macro had been expanded. In theory all such
|
||||
// situations should be reported as errors, so this is a bug.
|
||||
this.session.delay_span_bug(span, "inconsistent resolution for a macro");
|
||||
this.tcx.sess.delay_span_bug(span, "inconsistent resolution for a macro");
|
||||
}
|
||||
} else {
|
||||
// It's possible that the macro was unresolved (indeterminate) and silently
|
||||
|
@ -672,7 +679,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
|||
Segment::names_to_string(path)
|
||||
);
|
||||
let msg_note = "import resolution is stuck, try simplifying macro imports";
|
||||
this.session.struct_span_err(span, &msg).note(msg_note).emit();
|
||||
this.tcx.sess.struct_span_err(span, &msg).note(msg_note).emit();
|
||||
}
|
||||
}
|
||||
};
|
||||
|
@ -699,7 +706,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
|||
// try to suggest if it's not a macro, maybe a function
|
||||
if let PathResult::NonModule(partial_res) = self.maybe_resolve_path(&path, Some(ValueNS), &parent_scope)
|
||||
&& partial_res.unresolved_segments() == 0 {
|
||||
let sm = self.session.source_map();
|
||||
let sm = self.tcx.sess.source_map();
|
||||
let exclamation_span = sm.next_point(span);
|
||||
suggestion = Some((
|
||||
vec![(exclamation_span, "".to_string())],
|
||||
|
@ -762,7 +769,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
|||
Err(..) => {
|
||||
let expected = kind.descr_expected();
|
||||
let msg = format!("cannot find {} `{}` in this scope", expected, ident);
|
||||
let mut err = self.session.struct_span_err(ident.span, &msg);
|
||||
let mut err = self.tcx.sess.struct_span_err(ident.span, &msg);
|
||||
self.unresolved_macro_suggestions(&mut err, kind, &parent_scope, ident);
|
||||
err.emit();
|
||||
}
|
||||
|
@ -804,7 +811,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
|||
let soft_handler =
|
||||
|lint, span, msg: &_| lint_buffer.buffer_lint(lint, node_id, span, msg);
|
||||
stability::report_unstable(
|
||||
self.session,
|
||||
self.tcx.sess,
|
||||
feature,
|
||||
reason.to_opt_reason(),
|
||||
issue,
|
||||
|
@ -840,7 +847,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
|||
if kind != NonMacroAttrKind::Tool && binding.map_or(true, |b| b.is_import()) {
|
||||
let msg =
|
||||
format!("cannot use {} {} through an import", kind.article(), kind.descr());
|
||||
let mut err = self.session.struct_span_err(span, &msg);
|
||||
let mut err = self.tcx.sess.struct_span_err(span, &msg);
|
||||
if let Some(binding) = binding {
|
||||
err.span_note(binding.span, &format!("the {} imported here", kind.descr()));
|
||||
}
|
||||
|
@ -855,7 +862,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
|||
if ident.name == sym::cfg || ident.name == sym::cfg_attr {
|
||||
let macro_kind = self.get_macro(res).map(|macro_data| macro_data.ext.macro_kind());
|
||||
if macro_kind.is_some() && sub_namespace_match(macro_kind, Some(MacroKind::Attr)) {
|
||||
self.session.span_err(
|
||||
self.tcx.sess.span_err(
|
||||
ident.span,
|
||||
&format!("name `{}` is reserved in attribute namespace", ident),
|
||||
);
|
||||
|
@ -871,12 +878,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
|||
item: &ast::Item,
|
||||
edition: Edition,
|
||||
) -> (SyntaxExtension, Vec<(usize, Span)>) {
|
||||
let (mut result, mut rule_spans) = compile_declarative_macro(
|
||||
&self.session,
|
||||
self.session.features_untracked(),
|
||||
item,
|
||||
edition,
|
||||
);
|
||||
let (mut result, mut rule_spans) = compile_declarative_macro(self.tcx.sess, item, edition);
|
||||
|
||||
if let Some(builtin_name) = result.builtin_name {
|
||||
// The macro was marked with `#[rustc_builtin_macro]`.
|
||||
|
@ -895,7 +897,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
|||
}
|
||||
BuiltinMacroState::AlreadySeen(span) => {
|
||||
struct_span_err!(
|
||||
self.session,
|
||||
self.tcx.sess,
|
||||
item.span,
|
||||
E0773,
|
||||
"attempted to define built-in macro more than once"
|
||||
|
@ -906,7 +908,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
|||
}
|
||||
} else {
|
||||
let msg = format!("cannot find a built-in macro with name `{}`", item.ident);
|
||||
self.session.span_err(item.span, &msg);
|
||||
self.tcx.sess.span_err(item.span, &msg);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -430,11 +430,6 @@ impl<D: Decoder, T: Decodable<D> + Copy> Decodable<D> for Cell<T> {
|
|||
}
|
||||
}
|
||||
|
||||
// FIXME: #15036
|
||||
// Should use `try_borrow`, returning an
|
||||
// `encoder.error("attempting to Encode borrowed RefCell")`
|
||||
// from `encode` when `try_borrow` returns `None`.
|
||||
|
||||
impl<S: Encoder, T: Encodable<S>> Encodable<S> for RefCell<T> {
|
||||
fn encode(&self, s: &mut S) {
|
||||
self.borrow().encode(s);
|
||||
|
|
|
@ -200,12 +200,12 @@ pub enum ExternCrateSource {
|
|||
/// At the time of this writing, there is only one backend and one way to store
|
||||
/// metadata in library -- this trait just serves to decouple rustc_metadata from
|
||||
/// the archive reader, which depends on LLVM.
|
||||
pub trait MetadataLoader {
|
||||
pub trait MetadataLoader: std::fmt::Debug {
|
||||
fn get_rlib_metadata(&self, target: &Target, filename: &Path) -> Result<MetadataRef, String>;
|
||||
fn get_dylib_metadata(&self, target: &Target, filename: &Path) -> Result<MetadataRef, String>;
|
||||
}
|
||||
|
||||
pub type MetadataLoaderDyn = dyn MetadataLoader + Sync;
|
||||
pub type MetadataLoaderDyn = dyn MetadataLoader + Send + Sync;
|
||||
|
||||
/// A store of Rust crates, through which their metadata can be accessed.
|
||||
///
|
||||
|
@ -250,12 +250,12 @@ pub trait CrateStore: std::fmt::Debug {
|
|||
fn import_source_files(&self, sess: &Session, cnum: CrateNum);
|
||||
}
|
||||
|
||||
pub type CrateStoreDyn = dyn CrateStore + sync::Sync;
|
||||
pub type CrateStoreDyn = dyn CrateStore + sync::Sync + sync::Send;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Untracked {
|
||||
pub cstore: Box<CrateStoreDyn>,
|
||||
pub cstore: RwLock<Box<CrateStoreDyn>>,
|
||||
/// Reference span for definitions.
|
||||
pub source_span: IndexVec<LocalDefId, Span>,
|
||||
pub source_span: RwLock<IndexVec<LocalDefId, Span>>,
|
||||
pub definitions: RwLock<Definitions>,
|
||||
}
|
||||
|
|
229
compiler/rustc_span/src/edit_distance.rs
Normal file
229
compiler/rustc_span/src/edit_distance.rs
Normal file
|
@ -0,0 +1,229 @@
|
|||
//! Edit distances.
|
||||
//!
|
||||
//! The [edit distance] is a metric for measuring the difference between two strings.
|
||||
//!
|
||||
//! [edit distance]: https://en.wikipedia.org/wiki/Edit_distance
|
||||
|
||||
// The current implementation is the restricted Damerau-Levenshtein algorithm. It is restricted
|
||||
// because it does not permit modifying characters that have already been transposed. The specific
|
||||
// algorithm should not matter to the caller of the methods, which is why it is not noted in the
|
||||
// documentation.
|
||||
|
||||
use crate::symbol::Symbol;
|
||||
use std::{cmp, mem};
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
|
||||
/// Finds the [edit distance] between two strings.
|
||||
///
|
||||
/// Returns `None` if the distance exceeds the limit.
|
||||
///
|
||||
/// [edit distance]: https://en.wikipedia.org/wiki/Edit_distance
|
||||
pub fn edit_distance(a: &str, b: &str, limit: usize) -> Option<usize> {
|
||||
let mut a = &a.chars().collect::<Vec<_>>()[..];
|
||||
let mut b = &b.chars().collect::<Vec<_>>()[..];
|
||||
|
||||
// Ensure that `b` is the shorter string, minimizing memory use.
|
||||
if a.len() < b.len() {
|
||||
mem::swap(&mut a, &mut b);
|
||||
}
|
||||
|
||||
let min_dist = a.len() - b.len();
|
||||
// If we know the limit will be exceeded, we can return early.
|
||||
if min_dist > limit {
|
||||
return None;
|
||||
}
|
||||
|
||||
// Strip common prefix.
|
||||
while let Some(((b_char, b_rest), (a_char, a_rest))) = b.split_first().zip(a.split_first())
|
||||
&& a_char == b_char
|
||||
{
|
||||
a = a_rest;
|
||||
b = b_rest;
|
||||
}
|
||||
// Strip common suffix.
|
||||
while let Some(((b_char, b_rest), (a_char, a_rest))) = b.split_last().zip(a.split_last())
|
||||
&& a_char == b_char
|
||||
{
|
||||
a = a_rest;
|
||||
b = b_rest;
|
||||
}
|
||||
|
||||
// If either string is empty, the distance is the length of the other.
|
||||
// We know that `b` is the shorter string, so we don't need to check `a`.
|
||||
if b.len() == 0 {
|
||||
return Some(min_dist);
|
||||
}
|
||||
|
||||
let mut prev_prev = vec![usize::MAX; b.len() + 1];
|
||||
let mut prev = (0..=b.len()).collect::<Vec<_>>();
|
||||
let mut current = vec![0; b.len() + 1];
|
||||
|
||||
// row by row
|
||||
for i in 1..=a.len() {
|
||||
current[0] = i;
|
||||
let a_idx = i - 1;
|
||||
|
||||
// column by column
|
||||
for j in 1..=b.len() {
|
||||
let b_idx = j - 1;
|
||||
|
||||
// There is no cost to substitute a character with itself.
|
||||
let substitution_cost = if a[a_idx] == b[b_idx] { 0 } else { 1 };
|
||||
|
||||
current[j] = cmp::min(
|
||||
// deletion
|
||||
prev[j] + 1,
|
||||
cmp::min(
|
||||
// insertion
|
||||
current[j - 1] + 1,
|
||||
// substitution
|
||||
prev[j - 1] + substitution_cost,
|
||||
),
|
||||
);
|
||||
|
||||
if (i > 1) && (j > 1) && (a[a_idx] == b[b_idx - 1]) && (a[a_idx - 1] == b[b_idx]) {
|
||||
// transposition
|
||||
current[j] = cmp::min(current[j], prev_prev[j - 2] + 1);
|
||||
}
|
||||
}
|
||||
|
||||
// Rotate the buffers, reusing the memory.
|
||||
[prev_prev, prev, current] = [prev, current, prev_prev];
|
||||
}
|
||||
|
||||
// `prev` because we already rotated the buffers.
|
||||
let distance = prev[b.len()];
|
||||
(distance <= limit).then_some(distance)
|
||||
}
|
||||
|
||||
/// Provides a word similarity score between two words that accounts for substrings being more
|
||||
/// meaningful than a typical edit distance. The lower the score, the closer the match. 0 is an
|
||||
/// identical match.
|
||||
///
|
||||
/// Uses the edit distance between the two strings and removes the cost of the length difference.
|
||||
/// If this is 0 then it is either a substring match or a full word match, in the substring match
|
||||
/// case we detect this and return `1`. To prevent finding meaningless substrings, eg. "in" in
|
||||
/// "shrink", we only perform this subtraction of length difference if one of the words is not
|
||||
/// greater than twice the length of the other. For cases where the words are close in size but not
|
||||
/// an exact substring then the cost of the length difference is discounted by half.
|
||||
///
|
||||
/// Returns `None` if the distance exceeds the limit.
|
||||
pub fn edit_distance_with_substrings(a: &str, b: &str, limit: usize) -> Option<usize> {
|
||||
let n = a.chars().count();
|
||||
let m = b.chars().count();
|
||||
|
||||
// Check one isn't less than half the length of the other. If this is true then there is a
|
||||
// big difference in length.
|
||||
let big_len_diff = (n * 2) < m || (m * 2) < n;
|
||||
let len_diff = if n < m { m - n } else { n - m };
|
||||
let distance = edit_distance(a, b, limit + len_diff)?;
|
||||
|
||||
// This is the crux, subtracting length difference means exact substring matches will now be 0
|
||||
let score = distance - len_diff;
|
||||
|
||||
// If the score is 0 but the words have different lengths then it's a substring match not a full
|
||||
// word match
|
||||
let score = if score == 0 && len_diff > 0 && !big_len_diff {
|
||||
1 // Exact substring match, but not a total word match so return non-zero
|
||||
} else if !big_len_diff {
|
||||
// Not a big difference in length, discount cost of length difference
|
||||
score + (len_diff + 1) / 2
|
||||
} else {
|
||||
// A big difference in length, add back the difference in length to the score
|
||||
score + len_diff
|
||||
};
|
||||
|
||||
(score <= limit).then_some(score)
|
||||
}
|
||||
|
||||
/// Finds the best match for given word in the given iterator where substrings are meaningful.
|
||||
///
|
||||
/// A version of [`find_best_match_for_name`] that uses [`edit_distance_with_substrings`] as the
|
||||
/// score for word similarity. This takes an optional distance limit which defaults to one-third of
|
||||
/// the given word.
|
||||
///
|
||||
/// We use case insensitive comparison to improve accuracy on an edge case with a lower(upper)case
|
||||
/// letters mismatch.
|
||||
pub fn find_best_match_for_name_with_substrings(
|
||||
candidates: &[Symbol],
|
||||
lookup: Symbol,
|
||||
dist: Option<usize>,
|
||||
) -> Option<Symbol> {
|
||||
find_best_match_for_name_impl(true, candidates, lookup, dist)
|
||||
}
|
||||
|
||||
/// Finds the best match for a given word in the given iterator.
|
||||
///
|
||||
/// As a loose rule to avoid the obviously incorrect suggestions, it takes
|
||||
/// an optional limit for the maximum allowable edit distance, which defaults
|
||||
/// to one-third of the given word.
|
||||
///
|
||||
/// We use case insensitive comparison to improve accuracy on an edge case with a lower(upper)case
|
||||
/// letters mismatch.
|
||||
pub fn find_best_match_for_name(
|
||||
candidates: &[Symbol],
|
||||
lookup: Symbol,
|
||||
dist: Option<usize>,
|
||||
) -> Option<Symbol> {
|
||||
find_best_match_for_name_impl(false, candidates, lookup, dist)
|
||||
}
|
||||
|
||||
#[cold]
|
||||
fn find_best_match_for_name_impl(
|
||||
use_substring_score: bool,
|
||||
candidates: &[Symbol],
|
||||
lookup: Symbol,
|
||||
dist: Option<usize>,
|
||||
) -> Option<Symbol> {
|
||||
let lookup = lookup.as_str();
|
||||
let lookup_uppercase = lookup.to_uppercase();
|
||||
|
||||
// Priority of matches:
|
||||
// 1. Exact case insensitive match
|
||||
// 2. Edit distance match
|
||||
// 3. Sorted word match
|
||||
if let Some(c) = candidates.iter().find(|c| c.as_str().to_uppercase() == lookup_uppercase) {
|
||||
return Some(*c);
|
||||
}
|
||||
|
||||
let mut dist = dist.unwrap_or_else(|| cmp::max(lookup.len(), 3) / 3);
|
||||
let mut best = None;
|
||||
for c in candidates {
|
||||
match if use_substring_score {
|
||||
edit_distance_with_substrings(lookup, c.as_str(), dist)
|
||||
} else {
|
||||
edit_distance(lookup, c.as_str(), dist)
|
||||
} {
|
||||
Some(0) => return Some(*c),
|
||||
Some(d) => {
|
||||
dist = d - 1;
|
||||
best = Some(*c);
|
||||
}
|
||||
None => {}
|
||||
}
|
||||
}
|
||||
if best.is_some() {
|
||||
return best;
|
||||
}
|
||||
|
||||
find_match_by_sorted_words(candidates, lookup)
|
||||
}
|
||||
|
||||
fn find_match_by_sorted_words(iter_names: &[Symbol], lookup: &str) -> Option<Symbol> {
|
||||
iter_names.iter().fold(None, |result, candidate| {
|
||||
if sort_by_words(candidate.as_str()) == sort_by_words(lookup) {
|
||||
Some(*candidate)
|
||||
} else {
|
||||
result
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
fn sort_by_words(name: &str) -> String {
|
||||
let mut split_words: Vec<&str> = name.split('_').collect();
|
||||
// We are sorting primitive &strs and can use unstable sort here.
|
||||
split_words.sort_unstable();
|
||||
split_words.join("_")
|
||||
}
|
80
compiler/rustc_span/src/edit_distance/tests.rs
Normal file
80
compiler/rustc_span/src/edit_distance/tests.rs
Normal file
|
@ -0,0 +1,80 @@
|
|||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_edit_distance() {
|
||||
// Test bytelength agnosticity
|
||||
for c in (0..char::MAX as u32).filter_map(char::from_u32).map(|i| i.to_string()) {
|
||||
assert_eq!(edit_distance(&c[..], &c[..], usize::MAX), Some(0));
|
||||
}
|
||||
|
||||
let a = "\nMäry häd ä little lämb\n\nLittle lämb\n";
|
||||
let b = "\nMary häd ä little lämb\n\nLittle lämb\n";
|
||||
let c = "Mary häd ä little lämb\n\nLittle lämb\n";
|
||||
assert_eq!(edit_distance(a, b, usize::MAX), Some(1));
|
||||
assert_eq!(edit_distance(b, a, usize::MAX), Some(1));
|
||||
assert_eq!(edit_distance(a, c, usize::MAX), Some(2));
|
||||
assert_eq!(edit_distance(c, a, usize::MAX), Some(2));
|
||||
assert_eq!(edit_distance(b, c, usize::MAX), Some(1));
|
||||
assert_eq!(edit_distance(c, b, usize::MAX), Some(1));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_edit_distance_limit() {
|
||||
assert_eq!(edit_distance("abc", "abcd", 1), Some(1));
|
||||
assert_eq!(edit_distance("abc", "abcd", 0), None);
|
||||
assert_eq!(edit_distance("abc", "xyz", 3), Some(3));
|
||||
assert_eq!(edit_distance("abc", "xyz", 2), None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_method_name_similarity_score() {
|
||||
assert_eq!(edit_distance_with_substrings("empty", "is_empty", 1), Some(1));
|
||||
assert_eq!(edit_distance_with_substrings("shrunk", "rchunks", 2), None);
|
||||
assert_eq!(edit_distance_with_substrings("abc", "abcd", 1), Some(1));
|
||||
assert_eq!(edit_distance_with_substrings("a", "abcd", 1), None);
|
||||
assert_eq!(edit_distance_with_substrings("edf", "eq", 1), None);
|
||||
assert_eq!(edit_distance_with_substrings("abc", "xyz", 3), Some(3));
|
||||
assert_eq!(edit_distance_with_substrings("abcdef", "abcdef", 2), Some(0));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_find_best_match_for_name() {
|
||||
use crate::create_default_session_globals_then;
|
||||
create_default_session_globals_then(|| {
|
||||
let input = vec![Symbol::intern("aaab"), Symbol::intern("aaabc")];
|
||||
assert_eq!(
|
||||
find_best_match_for_name(&input, Symbol::intern("aaaa"), None),
|
||||
Some(Symbol::intern("aaab"))
|
||||
);
|
||||
|
||||
assert_eq!(find_best_match_for_name(&input, Symbol::intern("1111111111"), None), None);
|
||||
|
||||
let input = vec![Symbol::intern("AAAA")];
|
||||
assert_eq!(
|
||||
find_best_match_for_name(&input, Symbol::intern("aaaa"), None),
|
||||
Some(Symbol::intern("AAAA"))
|
||||
);
|
||||
|
||||
let input = vec![Symbol::intern("AAAA")];
|
||||
assert_eq!(
|
||||
find_best_match_for_name(&input, Symbol::intern("aaaa"), Some(4)),
|
||||
Some(Symbol::intern("AAAA"))
|
||||
);
|
||||
|
||||
let input = vec![Symbol::intern("a_longer_variable_name")];
|
||||
assert_eq!(
|
||||
find_best_match_for_name(&input, Symbol::intern("a_variable_longer_name"), None),
|
||||
Some(Symbol::intern("a_longer_variable_name"))
|
||||
);
|
||||
})
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_precise_algorithm() {
|
||||
// Not Levenshtein distance.
|
||||
assert_ne!(edit_distance("ab", "ba", usize::MAX), Some(2));
|
||||
// Not unrestricted Damerau-Levenshtein distance.
|
||||
assert_ne!(edit_distance("abde", "bcaed", usize::MAX), Some(3));
|
||||
// The current implementation is a restricted Damerau-Levenshtein distance.
|
||||
assert_eq!(edit_distance("abde", "bcaed", usize::MAX), Some(4));
|
||||
}
|
|
@ -1,177 +0,0 @@
|
|||
//! Levenshtein distances.
|
||||
//!
|
||||
//! The [Levenshtein distance] is a metric for measuring the difference between two strings.
|
||||
//!
|
||||
//! [Levenshtein distance]: https://en.wikipedia.org/wiki/Levenshtein_distance
|
||||
|
||||
use crate::symbol::Symbol;
|
||||
use std::cmp;
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
|
||||
/// Finds the Levenshtein distance between two strings.
|
||||
///
|
||||
/// Returns None if the distance exceeds the limit.
|
||||
pub fn lev_distance(a: &str, b: &str, limit: usize) -> Option<usize> {
|
||||
let n = a.chars().count();
|
||||
let m = b.chars().count();
|
||||
let min_dist = if n < m { m - n } else { n - m };
|
||||
|
||||
if min_dist > limit {
|
||||
return None;
|
||||
}
|
||||
if n == 0 || m == 0 {
|
||||
return (min_dist <= limit).then_some(min_dist);
|
||||
}
|
||||
|
||||
let mut dcol: Vec<_> = (0..=m).collect();
|
||||
|
||||
for (i, sc) in a.chars().enumerate() {
|
||||
let mut current = i;
|
||||
dcol[0] = current + 1;
|
||||
|
||||
for (j, tc) in b.chars().enumerate() {
|
||||
let next = dcol[j + 1];
|
||||
if sc == tc {
|
||||
dcol[j + 1] = current;
|
||||
} else {
|
||||
dcol[j + 1] = cmp::min(current, next);
|
||||
dcol[j + 1] = cmp::min(dcol[j + 1], dcol[j]) + 1;
|
||||
}
|
||||
current = next;
|
||||
}
|
||||
}
|
||||
|
||||
(dcol[m] <= limit).then_some(dcol[m])
|
||||
}
|
||||
|
||||
/// Provides a word similarity score between two words that accounts for substrings being more
|
||||
/// meaningful than a typical Levenshtein distance. The lower the score, the closer the match.
|
||||
/// 0 is an identical match.
|
||||
///
|
||||
/// Uses the Levenshtein distance between the two strings and removes the cost of the length
|
||||
/// difference. If this is 0 then it is either a substring match or a full word match, in the
|
||||
/// substring match case we detect this and return `1`. To prevent finding meaningless substrings,
|
||||
/// eg. "in" in "shrink", we only perform this subtraction of length difference if one of the words
|
||||
/// is not greater than twice the length of the other. For cases where the words are close in size
|
||||
/// but not an exact substring then the cost of the length difference is discounted by half.
|
||||
///
|
||||
/// Returns `None` if the distance exceeds the limit.
|
||||
pub fn lev_distance_with_substrings(a: &str, b: &str, limit: usize) -> Option<usize> {
|
||||
let n = a.chars().count();
|
||||
let m = b.chars().count();
|
||||
|
||||
// Check one isn't less than half the length of the other. If this is true then there is a
|
||||
// big difference in length.
|
||||
let big_len_diff = (n * 2) < m || (m * 2) < n;
|
||||
let len_diff = if n < m { m - n } else { n - m };
|
||||
let lev = lev_distance(a, b, limit + len_diff)?;
|
||||
|
||||
// This is the crux, subtracting length difference means exact substring matches will now be 0
|
||||
let score = lev - len_diff;
|
||||
|
||||
// If the score is 0 but the words have different lengths then it's a substring match not a full
|
||||
// word match
|
||||
let score = if score == 0 && len_diff > 0 && !big_len_diff {
|
||||
1 // Exact substring match, but not a total word match so return non-zero
|
||||
} else if !big_len_diff {
|
||||
// Not a big difference in length, discount cost of length difference
|
||||
score + (len_diff + 1) / 2
|
||||
} else {
|
||||
// A big difference in length, add back the difference in length to the score
|
||||
score + len_diff
|
||||
};
|
||||
|
||||
(score <= limit).then_some(score)
|
||||
}
|
||||
|
||||
/// Finds the best match for given word in the given iterator where substrings are meaningful.
|
||||
///
|
||||
/// A version of [`find_best_match_for_name`] that uses [`lev_distance_with_substrings`] as the score
|
||||
/// for word similarity. This takes an optional distance limit which defaults to one-third of the
|
||||
/// given word.
|
||||
///
|
||||
/// Besides the modified Levenshtein, we use case insensitive comparison to improve accuracy
|
||||
/// on an edge case with a lower(upper)case letters mismatch.
|
||||
pub fn find_best_match_for_name_with_substrings(
|
||||
candidates: &[Symbol],
|
||||
lookup: Symbol,
|
||||
dist: Option<usize>,
|
||||
) -> Option<Symbol> {
|
||||
find_best_match_for_name_impl(true, candidates, lookup, dist)
|
||||
}
|
||||
|
||||
/// Finds the best match for a given word in the given iterator.
|
||||
///
|
||||
/// As a loose rule to avoid the obviously incorrect suggestions, it takes
|
||||
/// an optional limit for the maximum allowable edit distance, which defaults
|
||||
/// to one-third of the given word.
|
||||
///
|
||||
/// Besides Levenshtein, we use case insensitive comparison to improve accuracy
|
||||
/// on an edge case with a lower(upper)case letters mismatch.
|
||||
pub fn find_best_match_for_name(
|
||||
candidates: &[Symbol],
|
||||
lookup: Symbol,
|
||||
dist: Option<usize>,
|
||||
) -> Option<Symbol> {
|
||||
find_best_match_for_name_impl(false, candidates, lookup, dist)
|
||||
}
|
||||
|
||||
#[cold]
|
||||
fn find_best_match_for_name_impl(
|
||||
use_substring_score: bool,
|
||||
candidates: &[Symbol],
|
||||
lookup: Symbol,
|
||||
dist: Option<usize>,
|
||||
) -> Option<Symbol> {
|
||||
let lookup = lookup.as_str();
|
||||
let lookup_uppercase = lookup.to_uppercase();
|
||||
|
||||
// Priority of matches:
|
||||
// 1. Exact case insensitive match
|
||||
// 2. Levenshtein distance match
|
||||
// 3. Sorted word match
|
||||
if let Some(c) = candidates.iter().find(|c| c.as_str().to_uppercase() == lookup_uppercase) {
|
||||
return Some(*c);
|
||||
}
|
||||
|
||||
let mut dist = dist.unwrap_or_else(|| cmp::max(lookup.len(), 3) / 3);
|
||||
let mut best = None;
|
||||
for c in candidates {
|
||||
match if use_substring_score {
|
||||
lev_distance_with_substrings(lookup, c.as_str(), dist)
|
||||
} else {
|
||||
lev_distance(lookup, c.as_str(), dist)
|
||||
} {
|
||||
Some(0) => return Some(*c),
|
||||
Some(d) => {
|
||||
dist = d - 1;
|
||||
best = Some(*c);
|
||||
}
|
||||
None => {}
|
||||
}
|
||||
}
|
||||
if best.is_some() {
|
||||
return best;
|
||||
}
|
||||
|
||||
find_match_by_sorted_words(candidates, lookup)
|
||||
}
|
||||
|
||||
fn find_match_by_sorted_words(iter_names: &[Symbol], lookup: &str) -> Option<Symbol> {
|
||||
iter_names.iter().fold(None, |result, candidate| {
|
||||
if sort_by_words(candidate.as_str()) == sort_by_words(lookup) {
|
||||
Some(*candidate)
|
||||
} else {
|
||||
result
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
fn sort_by_words(name: &str) -> String {
|
||||
let mut split_words: Vec<&str> = name.split('_').collect();
|
||||
// We are sorting primitive &strs and can use unstable sort here.
|
||||
split_words.sort_unstable();
|
||||
split_words.join("_")
|
||||
}
|
|
@ -1,70 +0,0 @@
|
|||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_lev_distance() {
|
||||
// Test bytelength agnosticity
|
||||
for c in (0..char::MAX as u32).filter_map(char::from_u32).map(|i| i.to_string()) {
|
||||
assert_eq!(lev_distance(&c[..], &c[..], usize::MAX), Some(0));
|
||||
}
|
||||
|
||||
let a = "\nMäry häd ä little lämb\n\nLittle lämb\n";
|
||||
let b = "\nMary häd ä little lämb\n\nLittle lämb\n";
|
||||
let c = "Mary häd ä little lämb\n\nLittle lämb\n";
|
||||
assert_eq!(lev_distance(a, b, usize::MAX), Some(1));
|
||||
assert_eq!(lev_distance(b, a, usize::MAX), Some(1));
|
||||
assert_eq!(lev_distance(a, c, usize::MAX), Some(2));
|
||||
assert_eq!(lev_distance(c, a, usize::MAX), Some(2));
|
||||
assert_eq!(lev_distance(b, c, usize::MAX), Some(1));
|
||||
assert_eq!(lev_distance(c, b, usize::MAX), Some(1));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_lev_distance_limit() {
|
||||
assert_eq!(lev_distance("abc", "abcd", 1), Some(1));
|
||||
assert_eq!(lev_distance("abc", "abcd", 0), None);
|
||||
assert_eq!(lev_distance("abc", "xyz", 3), Some(3));
|
||||
assert_eq!(lev_distance("abc", "xyz", 2), None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_method_name_similarity_score() {
|
||||
assert_eq!(lev_distance_with_substrings("empty", "is_empty", 1), Some(1));
|
||||
assert_eq!(lev_distance_with_substrings("shrunk", "rchunks", 2), None);
|
||||
assert_eq!(lev_distance_with_substrings("abc", "abcd", 1), Some(1));
|
||||
assert_eq!(lev_distance_with_substrings("a", "abcd", 1), None);
|
||||
assert_eq!(lev_distance_with_substrings("edf", "eq", 1), None);
|
||||
assert_eq!(lev_distance_with_substrings("abc", "xyz", 3), Some(3));
|
||||
assert_eq!(lev_distance_with_substrings("abcdef", "abcdef", 2), Some(0));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_find_best_match_for_name() {
|
||||
use crate::create_default_session_globals_then;
|
||||
create_default_session_globals_then(|| {
|
||||
let input = vec![Symbol::intern("aaab"), Symbol::intern("aaabc")];
|
||||
assert_eq!(
|
||||
find_best_match_for_name(&input, Symbol::intern("aaaa"), None),
|
||||
Some(Symbol::intern("aaab"))
|
||||
);
|
||||
|
||||
assert_eq!(find_best_match_for_name(&input, Symbol::intern("1111111111"), None), None);
|
||||
|
||||
let input = vec![Symbol::intern("AAAA")];
|
||||
assert_eq!(
|
||||
find_best_match_for_name(&input, Symbol::intern("aaaa"), None),
|
||||
Some(Symbol::intern("AAAA"))
|
||||
);
|
||||
|
||||
let input = vec![Symbol::intern("AAAA")];
|
||||
assert_eq!(
|
||||
find_best_match_for_name(&input, Symbol::intern("aaaa"), Some(4)),
|
||||
Some(Symbol::intern("AAAA"))
|
||||
);
|
||||
|
||||
let input = vec![Symbol::intern("a_longer_variable_name")];
|
||||
assert_eq!(
|
||||
find_best_match_for_name(&input, Symbol::intern("a_variable_longer_name"), None),
|
||||
Some(Symbol::intern("a_longer_variable_name"))
|
||||
);
|
||||
})
|
||||
}
|
|
@ -19,6 +19,7 @@
|
|||
#![feature(negative_impls)]
|
||||
#![feature(min_specialization)]
|
||||
#![feature(rustc_attrs)]
|
||||
#![feature(let_chains)]
|
||||
#![deny(rustc::untranslatable_diagnostic)]
|
||||
#![deny(rustc::diagnostic_outside_of_impl)]
|
||||
|
||||
|
@ -46,7 +47,7 @@ pub use hygiene::{ExpnData, ExpnHash, ExpnId, LocalExpnId, SyntaxContext};
|
|||
use rustc_data_structures::stable_hasher::HashingControls;
|
||||
pub mod def_id;
|
||||
use def_id::{CrateNum, DefId, DefPathHash, LocalDefId, LOCAL_CRATE};
|
||||
pub mod lev_distance;
|
||||
pub mod edit_distance;
|
||||
mod span_encoding;
|
||||
pub use span_encoding::{Span, DUMMY_SP};
|
||||
|
||||
|
|
|
@ -1,9 +1,29 @@
|
|||
macro_rules! int_impl {
|
||||
($SelfT:ty, $ActualT:ident, $UnsignedT:ty, $BITS:expr, $BITS_MINUS_ONE:expr, $Min:expr, $Max:expr,
|
||||
$rot:expr, $rot_op:expr, $rot_result:expr, $swap_op:expr, $swapped:expr,
|
||||
$reversed:expr, $le_bytes:expr, $be_bytes:expr,
|
||||
$to_xe_bytes_doc:expr, $from_xe_bytes_doc:expr,
|
||||
$bound_condition:expr) => {
|
||||
(
|
||||
Self = $SelfT:ty,
|
||||
ActualT = $ActualT:ident,
|
||||
UnsignedT = $UnsignedT:ty,
|
||||
|
||||
// There are all for use *only* in doc comments.
|
||||
// As such, they're all passed as literals -- passing them as a string
|
||||
// literal is fine if they need to be multiple code tokens.
|
||||
// In non-comments, use the associated constants rather than these.
|
||||
BITS = $BITS:literal,
|
||||
BITS_MINUS_ONE = $BITS_MINUS_ONE:literal,
|
||||
Min = $Min:literal,
|
||||
Max = $Max:literal,
|
||||
rot = $rot:literal,
|
||||
rot_op = $rot_op:literal,
|
||||
rot_result = $rot_result:literal,
|
||||
swap_op = $swap_op:literal,
|
||||
swapped = $swapped:literal,
|
||||
reversed = $reversed:literal,
|
||||
le_bytes = $le_bytes:literal,
|
||||
be_bytes = $be_bytes:literal,
|
||||
to_xe_bytes_doc = $to_xe_bytes_doc:expr,
|
||||
from_xe_bytes_doc = $from_xe_bytes_doc:expr,
|
||||
bound_condition = $bound_condition:literal,
|
||||
) => {
|
||||
/// The smallest value that can be represented by this integer type
|
||||
#[doc = concat!("(−2<sup>", $BITS_MINUS_ONE, "</sup>", $bound_condition, ").")]
|
||||
///
|
||||
|
@ -15,7 +35,7 @@ macro_rules! int_impl {
|
|||
#[doc = concat!("assert_eq!(", stringify!($SelfT), "::MIN, ", stringify!($Min), ");")]
|
||||
/// ```
|
||||
#[stable(feature = "assoc_int_consts", since = "1.43.0")]
|
||||
pub const MIN: Self = !0 ^ ((!0 as $UnsignedT) >> 1) as Self;
|
||||
pub const MIN: Self = !Self::MAX;
|
||||
|
||||
/// The largest value that can be represented by this integer type
|
||||
#[doc = concat!("(2<sup>", $BITS_MINUS_ONE, "</sup> − 1", $bound_condition, ").")]
|
||||
|
@ -28,7 +48,7 @@ macro_rules! int_impl {
|
|||
#[doc = concat!("assert_eq!(", stringify!($SelfT), "::MAX, ", stringify!($Max), ");")]
|
||||
/// ```
|
||||
#[stable(feature = "assoc_int_consts", since = "1.43.0")]
|
||||
pub const MAX: Self = !Self::MIN;
|
||||
pub const MAX: Self = (<$UnsignedT>::MAX >> 1) as Self;
|
||||
|
||||
/// The size of this integer type in bits.
|
||||
///
|
||||
|
@ -38,7 +58,7 @@ macro_rules! int_impl {
|
|||
#[doc = concat!("assert_eq!(", stringify!($SelfT), "::BITS, ", stringify!($BITS), ");")]
|
||||
/// ```
|
||||
#[stable(feature = "int_bits_const", since = "1.53.0")]
|
||||
pub const BITS: u32 = $BITS;
|
||||
pub const BITS: u32 = <$UnsignedT>::BITS;
|
||||
|
||||
/// Converts a string slice in a given base to an integer.
|
||||
///
|
||||
|
@ -1365,7 +1385,7 @@ macro_rules! int_impl {
|
|||
// SAFETY: the masking by the bitsize of the type ensures that we do not shift
|
||||
// out of bounds
|
||||
unsafe {
|
||||
self.unchecked_shl(rhs & ($BITS - 1))
|
||||
self.unchecked_shl(rhs & (Self::BITS - 1))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1395,7 +1415,7 @@ macro_rules! int_impl {
|
|||
// SAFETY: the masking by the bitsize of the type ensures that we do not shift
|
||||
// out of bounds
|
||||
unsafe {
|
||||
self.unchecked_shr(rhs & ($BITS - 1))
|
||||
self.unchecked_shr(rhs & (Self::BITS - 1))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1901,7 +1921,7 @@ macro_rules! int_impl {
|
|||
without modifying the original"]
|
||||
#[inline]
|
||||
pub const fn overflowing_shl(self, rhs: u32) -> (Self, bool) {
|
||||
(self.wrapping_shl(rhs), (rhs > ($BITS - 1)))
|
||||
(self.wrapping_shl(rhs), rhs >= Self::BITS)
|
||||
}
|
||||
|
||||
/// Shifts self right by `rhs` bits.
|
||||
|
@ -1924,7 +1944,7 @@ macro_rules! int_impl {
|
|||
without modifying the original"]
|
||||
#[inline]
|
||||
pub const fn overflowing_shr(self, rhs: u32) -> (Self, bool) {
|
||||
(self.wrapping_shr(rhs), (rhs > ($BITS - 1)))
|
||||
(self.wrapping_shr(rhs), rhs >= Self::BITS)
|
||||
}
|
||||
|
||||
/// Computes the absolute value of `self`.
|
||||
|
|
|
@ -226,72 +226,217 @@ macro_rules! widening_impl {
|
|||
}
|
||||
|
||||
impl i8 {
|
||||
int_impl! { i8, i8, u8, 8, 7, -128, 127, 2, "-0x7e", "0xa", "0x12", "0x12", "0x48",
|
||||
"[0x12]", "[0x12]", "", "", "" }
|
||||
int_impl! {
|
||||
Self = i8,
|
||||
ActualT = i8,
|
||||
UnsignedT = u8,
|
||||
BITS = 8,
|
||||
BITS_MINUS_ONE = 7,
|
||||
Min = -128,
|
||||
Max = 127,
|
||||
rot = 2,
|
||||
rot_op = "-0x7e",
|
||||
rot_result = "0xa",
|
||||
swap_op = "0x12",
|
||||
swapped = "0x12",
|
||||
reversed = "0x48",
|
||||
le_bytes = "[0x12]",
|
||||
be_bytes = "[0x12]",
|
||||
to_xe_bytes_doc = "",
|
||||
from_xe_bytes_doc = "",
|
||||
bound_condition = "",
|
||||
}
|
||||
}
|
||||
|
||||
impl i16 {
|
||||
int_impl! { i16, i16, u16, 16, 15, -32768, 32767, 4, "-0x5ffd", "0x3a", "0x1234", "0x3412",
|
||||
"0x2c48", "[0x34, 0x12]", "[0x12, 0x34]", "", "", "" }
|
||||
int_impl! {
|
||||
Self = i16,
|
||||
ActualT = i16,
|
||||
UnsignedT = u16,
|
||||
BITS = 16,
|
||||
BITS_MINUS_ONE = 15,
|
||||
Min = -32768,
|
||||
Max = 32767,
|
||||
rot = 4,
|
||||
rot_op = "-0x5ffd",
|
||||
rot_result = "0x3a",
|
||||
swap_op = "0x1234",
|
||||
swapped = "0x3412",
|
||||
reversed = "0x2c48",
|
||||
le_bytes = "[0x34, 0x12]",
|
||||
be_bytes = "[0x12, 0x34]",
|
||||
to_xe_bytes_doc = "",
|
||||
from_xe_bytes_doc = "",
|
||||
bound_condition = "",
|
||||
}
|
||||
}
|
||||
|
||||
impl i32 {
|
||||
int_impl! { i32, i32, u32, 32, 31, -2147483648, 2147483647, 8, "0x10000b3", "0xb301",
|
||||
"0x12345678", "0x78563412", "0x1e6a2c48", "[0x78, 0x56, 0x34, 0x12]",
|
||||
"[0x12, 0x34, 0x56, 0x78]", "", "", "" }
|
||||
int_impl! {
|
||||
Self = i32,
|
||||
ActualT = i32,
|
||||
UnsignedT = u32,
|
||||
BITS = 32,
|
||||
BITS_MINUS_ONE = 31,
|
||||
Min = -2147483648,
|
||||
Max = 2147483647,
|
||||
rot = 8,
|
||||
rot_op = "0x10000b3",
|
||||
rot_result = "0xb301",
|
||||
swap_op = "0x12345678",
|
||||
swapped = "0x78563412",
|
||||
reversed = "0x1e6a2c48",
|
||||
le_bytes = "[0x78, 0x56, 0x34, 0x12]",
|
||||
be_bytes = "[0x12, 0x34, 0x56, 0x78]",
|
||||
to_xe_bytes_doc = "",
|
||||
from_xe_bytes_doc = "",
|
||||
bound_condition = "",
|
||||
}
|
||||
}
|
||||
|
||||
impl i64 {
|
||||
int_impl! { i64, i64, u64, 64, 63, -9223372036854775808, 9223372036854775807, 12,
|
||||
"0xaa00000000006e1", "0x6e10aa", "0x1234567890123456", "0x5634129078563412",
|
||||
"0x6a2c48091e6a2c48", "[0x56, 0x34, 0x12, 0x90, 0x78, 0x56, 0x34, 0x12]",
|
||||
"[0x12, 0x34, 0x56, 0x78, 0x90, 0x12, 0x34, 0x56]", "", "", "" }
|
||||
int_impl! {
|
||||
Self = i64,
|
||||
ActualT = i64,
|
||||
UnsignedT = u64,
|
||||
BITS = 64,
|
||||
BITS_MINUS_ONE = 63,
|
||||
Min = -9223372036854775808,
|
||||
Max = 9223372036854775807,
|
||||
rot = 12,
|
||||
rot_op = "0xaa00000000006e1",
|
||||
rot_result = "0x6e10aa",
|
||||
swap_op = "0x1234567890123456",
|
||||
swapped = "0x5634129078563412",
|
||||
reversed = "0x6a2c48091e6a2c48",
|
||||
le_bytes = "[0x56, 0x34, 0x12, 0x90, 0x78, 0x56, 0x34, 0x12]",
|
||||
be_bytes = "[0x12, 0x34, 0x56, 0x78, 0x90, 0x12, 0x34, 0x56]",
|
||||
to_xe_bytes_doc = "",
|
||||
from_xe_bytes_doc = "",
|
||||
bound_condition = "",
|
||||
}
|
||||
}
|
||||
|
||||
impl i128 {
|
||||
int_impl! { i128, i128, u128, 128, 127, -170141183460469231731687303715884105728,
|
||||
170141183460469231731687303715884105727, 16,
|
||||
"0x13f40000000000000000000000004f76", "0x4f7613f4", "0x12345678901234567890123456789012",
|
||||
"0x12907856341290785634129078563412", "0x48091e6a2c48091e6a2c48091e6a2c48",
|
||||
"[0x12, 0x90, 0x78, 0x56, 0x34, 0x12, 0x90, 0x78, \
|
||||
0x56, 0x34, 0x12, 0x90, 0x78, 0x56, 0x34, 0x12]",
|
||||
"[0x12, 0x34, 0x56, 0x78, 0x90, 0x12, 0x34, 0x56, \
|
||||
0x78, 0x90, 0x12, 0x34, 0x56, 0x78, 0x90, 0x12]", "", "", "" }
|
||||
int_impl! {
|
||||
Self = i128,
|
||||
ActualT = i128,
|
||||
UnsignedT = u128,
|
||||
BITS = 128,
|
||||
BITS_MINUS_ONE = 127,
|
||||
Min = -170141183460469231731687303715884105728,
|
||||
Max = 170141183460469231731687303715884105727,
|
||||
rot = 16,
|
||||
rot_op = "0x13f40000000000000000000000004f76",
|
||||
rot_result = "0x4f7613f4",
|
||||
swap_op = "0x12345678901234567890123456789012",
|
||||
swapped = "0x12907856341290785634129078563412",
|
||||
reversed = "0x48091e6a2c48091e6a2c48091e6a2c48",
|
||||
le_bytes = "[0x12, 0x90, 0x78, 0x56, 0x34, 0x12, 0x90, 0x78, \
|
||||
0x56, 0x34, 0x12, 0x90, 0x78, 0x56, 0x34, 0x12]",
|
||||
be_bytes = "[0x12, 0x34, 0x56, 0x78, 0x90, 0x12, 0x34, 0x56, \
|
||||
0x78, 0x90, 0x12, 0x34, 0x56, 0x78, 0x90, 0x12]",
|
||||
to_xe_bytes_doc = "",
|
||||
from_xe_bytes_doc = "",
|
||||
bound_condition = "",
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(target_pointer_width = "16")]
|
||||
impl isize {
|
||||
int_impl! { isize, i16, usize, 16, 15, -32768, 32767, 4, "-0x5ffd", "0x3a", "0x1234",
|
||||
"0x3412", "0x2c48", "[0x34, 0x12]", "[0x12, 0x34]",
|
||||
usize_isize_to_xe_bytes_doc!(), usize_isize_from_xe_bytes_doc!(),
|
||||
" on 16-bit targets" }
|
||||
int_impl! {
|
||||
Self = isize,
|
||||
ActualT = i16,
|
||||
UnsignedT = usize,
|
||||
BITS = 16,
|
||||
BITS_MINUS_ONE = 15,
|
||||
Min = -32768,
|
||||
Max = 32767,
|
||||
rot = 4,
|
||||
rot_op = "-0x5ffd",
|
||||
rot_result = "0x3a",
|
||||
swap_op = "0x1234",
|
||||
swapped = "0x3412",
|
||||
reversed = "0x2c48",
|
||||
le_bytes = "[0x34, 0x12]",
|
||||
be_bytes = "[0x12, 0x34]",
|
||||
to_xe_bytes_doc = usize_isize_to_xe_bytes_doc!(),
|
||||
from_xe_bytes_doc = usize_isize_from_xe_bytes_doc!(),
|
||||
bound_condition = " on 16-bit targets",
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(target_pointer_width = "32")]
|
||||
impl isize {
|
||||
int_impl! { isize, i32, usize, 32, 31, -2147483648, 2147483647, 8, "0x10000b3", "0xb301",
|
||||
"0x12345678", "0x78563412", "0x1e6a2c48", "[0x78, 0x56, 0x34, 0x12]",
|
||||
"[0x12, 0x34, 0x56, 0x78]",
|
||||
usize_isize_to_xe_bytes_doc!(), usize_isize_from_xe_bytes_doc!(),
|
||||
" on 32-bit targets" }
|
||||
int_impl! {
|
||||
Self = isize,
|
||||
ActualT = i32,
|
||||
UnsignedT = usize,
|
||||
BITS = 32,
|
||||
BITS_MINUS_ONE = 31,
|
||||
Min = -2147483648,
|
||||
Max = 2147483647,
|
||||
rot = 8,
|
||||
rot_op = "0x10000b3",
|
||||
rot_result = "0xb301",
|
||||
swap_op = "0x12345678",
|
||||
swapped = "0x78563412",
|
||||
reversed = "0x1e6a2c48",
|
||||
le_bytes = "[0x78, 0x56, 0x34, 0x12]",
|
||||
be_bytes = "[0x12, 0x34, 0x56, 0x78]",
|
||||
to_xe_bytes_doc = usize_isize_to_xe_bytes_doc!(),
|
||||
from_xe_bytes_doc = usize_isize_from_xe_bytes_doc!(),
|
||||
bound_condition = " on 32-bit targets",
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(target_pointer_width = "64")]
|
||||
impl isize {
|
||||
int_impl! { isize, i64, usize, 64, 63, -9223372036854775808, 9223372036854775807,
|
||||
12, "0xaa00000000006e1", "0x6e10aa", "0x1234567890123456", "0x5634129078563412",
|
||||
"0x6a2c48091e6a2c48", "[0x56, 0x34, 0x12, 0x90, 0x78, 0x56, 0x34, 0x12]",
|
||||
"[0x12, 0x34, 0x56, 0x78, 0x90, 0x12, 0x34, 0x56]",
|
||||
usize_isize_to_xe_bytes_doc!(), usize_isize_from_xe_bytes_doc!(),
|
||||
" on 64-bit targets" }
|
||||
int_impl! {
|
||||
Self = isize,
|
||||
ActualT = i64,
|
||||
UnsignedT = usize,
|
||||
BITS = 64,
|
||||
BITS_MINUS_ONE = 63,
|
||||
Min = -9223372036854775808,
|
||||
Max = 9223372036854775807,
|
||||
rot = 12,
|
||||
rot_op = "0xaa00000000006e1",
|
||||
rot_result = "0x6e10aa",
|
||||
swap_op = "0x1234567890123456",
|
||||
swapped = "0x5634129078563412",
|
||||
reversed = "0x6a2c48091e6a2c48",
|
||||
le_bytes = "[0x56, 0x34, 0x12, 0x90, 0x78, 0x56, 0x34, 0x12]",
|
||||
be_bytes = "[0x12, 0x34, 0x56, 0x78, 0x90, 0x12, 0x34, 0x56]",
|
||||
to_xe_bytes_doc = usize_isize_to_xe_bytes_doc!(),
|
||||
from_xe_bytes_doc = usize_isize_from_xe_bytes_doc!(),
|
||||
bound_condition = " on 64-bit targets",
|
||||
}
|
||||
}
|
||||
|
||||
/// If 6th bit set ascii is upper case.
|
||||
const ASCII_CASE_MASK: u8 = 0b0010_0000;
|
||||
|
||||
impl u8 {
|
||||
uint_impl! { u8, u8, i8, NonZeroU8, 8, 255, 2, "0x82", "0xa", "0x12", "0x12", "0x48", "[0x12]",
|
||||
"[0x12]", "", "", "" }
|
||||
uint_impl! {
|
||||
Self = u8,
|
||||
ActualT = u8,
|
||||
SignedT = i8,
|
||||
NonZeroT = NonZeroU8,
|
||||
BITS = 8,
|
||||
MAX = 255,
|
||||
rot = 2,
|
||||
rot_op = "0x82",
|
||||
rot_result = "0xa",
|
||||
swap_op = "0x12",
|
||||
swapped = "0x12",
|
||||
reversed = "0x48",
|
||||
le_bytes = "[0x12]",
|
||||
be_bytes = "[0x12]",
|
||||
to_xe_bytes_doc = "",
|
||||
from_xe_bytes_doc = "",
|
||||
bound_condition = "",
|
||||
}
|
||||
widening_impl! { u8, u16, 8, unsigned }
|
||||
|
||||
/// Checks if the value is within the ASCII range.
|
||||
|
@ -875,8 +1020,25 @@ impl u8 {
|
|||
}
|
||||
|
||||
impl u16 {
|
||||
uint_impl! { u16, u16, i16, NonZeroU16, 16, 65535, 4, "0xa003", "0x3a", "0x1234", "0x3412", "0x2c48",
|
||||
"[0x34, 0x12]", "[0x12, 0x34]", "", "", "" }
|
||||
uint_impl! {
|
||||
Self = u16,
|
||||
ActualT = u16,
|
||||
SignedT = i16,
|
||||
NonZeroT = NonZeroU16,
|
||||
BITS = 16,
|
||||
MAX = 65535,
|
||||
rot = 4,
|
||||
rot_op = "0xa003",
|
||||
rot_result = "0x3a",
|
||||
swap_op = "0x1234",
|
||||
swapped = "0x3412",
|
||||
reversed = "0x2c48",
|
||||
le_bytes = "[0x34, 0x12]",
|
||||
be_bytes = "[0x12, 0x34]",
|
||||
to_xe_bytes_doc = "",
|
||||
from_xe_bytes_doc = "",
|
||||
bound_condition = "",
|
||||
}
|
||||
widening_impl! { u16, u32, 16, unsigned }
|
||||
|
||||
/// Checks if the value is a Unicode surrogate code point, which are disallowed values for [`char`].
|
||||
|
@ -906,56 +1068,144 @@ impl u16 {
|
|||
}
|
||||
|
||||
impl u32 {
|
||||
uint_impl! { u32, u32, i32, NonZeroU32, 32, 4294967295, 8, "0x10000b3", "0xb301", "0x12345678",
|
||||
"0x78563412", "0x1e6a2c48", "[0x78, 0x56, 0x34, 0x12]", "[0x12, 0x34, 0x56, 0x78]", "", "", "" }
|
||||
uint_impl! {
|
||||
Self = u32,
|
||||
ActualT = u32,
|
||||
SignedT = i32,
|
||||
NonZeroT = NonZeroU32,
|
||||
BITS = 32,
|
||||
MAX = 4294967295,
|
||||
rot = 8,
|
||||
rot_op = "0x10000b3",
|
||||
rot_result = "0xb301",
|
||||
swap_op = "0x12345678",
|
||||
swapped = "0x78563412",
|
||||
reversed = "0x1e6a2c48",
|
||||
le_bytes = "[0x78, 0x56, 0x34, 0x12]",
|
||||
be_bytes = "[0x12, 0x34, 0x56, 0x78]",
|
||||
to_xe_bytes_doc = "",
|
||||
from_xe_bytes_doc = "",
|
||||
bound_condition = "",
|
||||
}
|
||||
widening_impl! { u32, u64, 32, unsigned }
|
||||
}
|
||||
|
||||
impl u64 {
|
||||
uint_impl! { u64, u64, i64, NonZeroU64, 64, 18446744073709551615, 12, "0xaa00000000006e1", "0x6e10aa",
|
||||
"0x1234567890123456", "0x5634129078563412", "0x6a2c48091e6a2c48",
|
||||
"[0x56, 0x34, 0x12, 0x90, 0x78, 0x56, 0x34, 0x12]",
|
||||
"[0x12, 0x34, 0x56, 0x78, 0x90, 0x12, 0x34, 0x56]",
|
||||
"", "", ""}
|
||||
uint_impl! {
|
||||
Self = u64,
|
||||
ActualT = u64,
|
||||
SignedT = i64,
|
||||
NonZeroT = NonZeroU64,
|
||||
BITS = 64,
|
||||
MAX = 18446744073709551615,
|
||||
rot = 12,
|
||||
rot_op = "0xaa00000000006e1",
|
||||
rot_result = "0x6e10aa",
|
||||
swap_op = "0x1234567890123456",
|
||||
swapped = "0x5634129078563412",
|
||||
reversed = "0x6a2c48091e6a2c48",
|
||||
le_bytes = "[0x56, 0x34, 0x12, 0x90, 0x78, 0x56, 0x34, 0x12]",
|
||||
be_bytes = "[0x12, 0x34, 0x56, 0x78, 0x90, 0x12, 0x34, 0x56]",
|
||||
to_xe_bytes_doc = "",
|
||||
from_xe_bytes_doc = "",
|
||||
bound_condition = "",
|
||||
}
|
||||
widening_impl! { u64, u128, 64, unsigned }
|
||||
}
|
||||
|
||||
impl u128 {
|
||||
uint_impl! { u128, u128, i128, NonZeroU128, 128, 340282366920938463463374607431768211455, 16,
|
||||
"0x13f40000000000000000000000004f76", "0x4f7613f4", "0x12345678901234567890123456789012",
|
||||
"0x12907856341290785634129078563412", "0x48091e6a2c48091e6a2c48091e6a2c48",
|
||||
"[0x12, 0x90, 0x78, 0x56, 0x34, 0x12, 0x90, 0x78, \
|
||||
0x56, 0x34, 0x12, 0x90, 0x78, 0x56, 0x34, 0x12]",
|
||||
"[0x12, 0x34, 0x56, 0x78, 0x90, 0x12, 0x34, 0x56, \
|
||||
0x78, 0x90, 0x12, 0x34, 0x56, 0x78, 0x90, 0x12]",
|
||||
"", "", ""}
|
||||
uint_impl! {
|
||||
Self = u128,
|
||||
ActualT = u128,
|
||||
SignedT = i128,
|
||||
NonZeroT = NonZeroU128,
|
||||
BITS = 128,
|
||||
MAX = 340282366920938463463374607431768211455,
|
||||
rot = 16,
|
||||
rot_op = "0x13f40000000000000000000000004f76",
|
||||
rot_result = "0x4f7613f4",
|
||||
swap_op = "0x12345678901234567890123456789012",
|
||||
swapped = "0x12907856341290785634129078563412",
|
||||
reversed = "0x48091e6a2c48091e6a2c48091e6a2c48",
|
||||
le_bytes = "[0x12, 0x90, 0x78, 0x56, 0x34, 0x12, 0x90, 0x78, \
|
||||
0x56, 0x34, 0x12, 0x90, 0x78, 0x56, 0x34, 0x12]",
|
||||
be_bytes = "[0x12, 0x34, 0x56, 0x78, 0x90, 0x12, 0x34, 0x56, \
|
||||
0x78, 0x90, 0x12, 0x34, 0x56, 0x78, 0x90, 0x12]",
|
||||
to_xe_bytes_doc = "",
|
||||
from_xe_bytes_doc = "",
|
||||
bound_condition = "",
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(target_pointer_width = "16")]
|
||||
impl usize {
|
||||
uint_impl! { usize, u16, isize, NonZeroUsize, 16, 65535, 4, "0xa003", "0x3a", "0x1234", "0x3412", "0x2c48",
|
||||
"[0x34, 0x12]", "[0x12, 0x34]",
|
||||
usize_isize_to_xe_bytes_doc!(), usize_isize_from_xe_bytes_doc!(),
|
||||
" on 16-bit targets" }
|
||||
uint_impl! {
|
||||
Self = usize,
|
||||
ActualT = u16,
|
||||
SignedT = isize,
|
||||
NonZeroT = NonZeroUsize,
|
||||
BITS = 16,
|
||||
MAX = 65535,
|
||||
rot = 4,
|
||||
rot_op = "0xa003",
|
||||
rot_result = "0x3a",
|
||||
swap_op = "0x1234",
|
||||
swapped = "0x3412",
|
||||
reversed = "0x2c48",
|
||||
le_bytes = "[0x34, 0x12]",
|
||||
be_bytes = "[0x12, 0x34]",
|
||||
to_xe_bytes_doc = usize_isize_to_xe_bytes_doc!(),
|
||||
from_xe_bytes_doc = usize_isize_from_xe_bytes_doc!(),
|
||||
bound_condition = " on 16-bit targets",
|
||||
}
|
||||
widening_impl! { usize, u32, 16, unsigned }
|
||||
}
|
||||
|
||||
#[cfg(target_pointer_width = "32")]
|
||||
impl usize {
|
||||
uint_impl! { usize, u32, isize, NonZeroUsize, 32, 4294967295, 8, "0x10000b3", "0xb301", "0x12345678",
|
||||
"0x78563412", "0x1e6a2c48", "[0x78, 0x56, 0x34, 0x12]", "[0x12, 0x34, 0x56, 0x78]",
|
||||
usize_isize_to_xe_bytes_doc!(), usize_isize_from_xe_bytes_doc!(),
|
||||
" on 32-bit targets" }
|
||||
uint_impl! {
|
||||
Self = usize,
|
||||
ActualT = u32,
|
||||
SignedT = isize,
|
||||
NonZeroT = NonZeroUsize,
|
||||
BITS = 32,
|
||||
MAX = 4294967295,
|
||||
rot = 8,
|
||||
rot_op = "0x10000b3",
|
||||
rot_result = "0xb301",
|
||||
swap_op = "0x12345678",
|
||||
swapped = "0x78563412",
|
||||
reversed = "0x1e6a2c48",
|
||||
le_bytes = "[0x78, 0x56, 0x34, 0x12]",
|
||||
be_bytes = "[0x12, 0x34, 0x56, 0x78]",
|
||||
to_xe_bytes_doc = usize_isize_to_xe_bytes_doc!(),
|
||||
from_xe_bytes_doc = usize_isize_from_xe_bytes_doc!(),
|
||||
bound_condition = " on 32-bit targets",
|
||||
}
|
||||
widening_impl! { usize, u64, 32, unsigned }
|
||||
}
|
||||
|
||||
#[cfg(target_pointer_width = "64")]
|
||||
impl usize {
|
||||
uint_impl! { usize, u64, isize, NonZeroUsize, 64, 18446744073709551615, 12, "0xaa00000000006e1", "0x6e10aa",
|
||||
"0x1234567890123456", "0x5634129078563412", "0x6a2c48091e6a2c48",
|
||||
"[0x56, 0x34, 0x12, 0x90, 0x78, 0x56, 0x34, 0x12]",
|
||||
"[0x12, 0x34, 0x56, 0x78, 0x90, 0x12, 0x34, 0x56]",
|
||||
usize_isize_to_xe_bytes_doc!(), usize_isize_from_xe_bytes_doc!(),
|
||||
" on 64-bit targets" }
|
||||
uint_impl! {
|
||||
Self = usize,
|
||||
ActualT = u64,
|
||||
SignedT = isize,
|
||||
NonZeroT = NonZeroUsize,
|
||||
BITS = 64,
|
||||
MAX = 18446744073709551615,
|
||||
rot = 12,
|
||||
rot_op = "0xaa00000000006e1",
|
||||
rot_result = "0x6e10aa",
|
||||
swap_op = "0x1234567890123456",
|
||||
swapped = "0x5634129078563412",
|
||||
reversed = "0x6a2c48091e6a2c48",
|
||||
le_bytes = "[0x56, 0x34, 0x12, 0x90, 0x78, 0x56, 0x34, 0x12]",
|
||||
be_bytes = "[0x12, 0x34, 0x56, 0x78, 0x90, 0x12, 0x34, 0x56]",
|
||||
to_xe_bytes_doc = usize_isize_to_xe_bytes_doc!(),
|
||||
from_xe_bytes_doc = usize_isize_from_xe_bytes_doc!(),
|
||||
bound_condition = " on 64-bit targets",
|
||||
}
|
||||
widening_impl! { usize, u128, 64, unsigned }
|
||||
}
|
||||
|
||||
|
|
|
@ -1,10 +1,28 @@
|
|||
macro_rules! uint_impl {
|
||||
($SelfT:ty, $ActualT:ident, $SignedT:ident, $NonZeroT:ident,
|
||||
$BITS:expr, $MaxV:expr,
|
||||
$rot:expr, $rot_op:expr, $rot_result:expr, $swap_op:expr, $swapped:expr,
|
||||
$reversed:expr, $le_bytes:expr, $be_bytes:expr,
|
||||
$to_xe_bytes_doc:expr, $from_xe_bytes_doc:expr,
|
||||
$bound_condition:expr) => {
|
||||
(
|
||||
Self = $SelfT:ty,
|
||||
ActualT = $ActualT:ident,
|
||||
SignedT = $SignedT:ident,
|
||||
NonZeroT = $NonZeroT:ident,
|
||||
|
||||
// There are all for use *only* in doc comments.
|
||||
// As such, they're all passed as literals -- passing them as a string
|
||||
// literal is fine if they need to be multiple code tokens.
|
||||
// In non-comments, use the associated constants rather than these.
|
||||
BITS = $BITS:literal,
|
||||
MAX = $MaxV:literal,
|
||||
rot = $rot:literal,
|
||||
rot_op = $rot_op:literal,
|
||||
rot_result = $rot_result:literal,
|
||||
swap_op = $swap_op:literal,
|
||||
swapped = $swapped:literal,
|
||||
reversed = $reversed:literal,
|
||||
le_bytes = $le_bytes:literal,
|
||||
be_bytes = $be_bytes:literal,
|
||||
to_xe_bytes_doc = $to_xe_bytes_doc:expr,
|
||||
from_xe_bytes_doc = $from_xe_bytes_doc:expr,
|
||||
bound_condition = $bound_condition:literal,
|
||||
) => {
|
||||
/// The smallest value that can be represented by this integer type.
|
||||
///
|
||||
/// # Examples
|
||||
|
@ -38,7 +56,7 @@ macro_rules! uint_impl {
|
|||
#[doc = concat!("assert_eq!(", stringify!($SelfT), "::BITS, ", stringify!($BITS), ");")]
|
||||
/// ```
|
||||
#[stable(feature = "int_bits_const", since = "1.53.0")]
|
||||
pub const BITS: u32 = $BITS;
|
||||
pub const BITS: u32 = Self::MAX.count_ones();
|
||||
|
||||
/// Converts a string slice in a given base to an integer.
|
||||
///
|
||||
|
@ -1390,7 +1408,7 @@ macro_rules! uint_impl {
|
|||
// SAFETY: the masking by the bitsize of the type ensures that we do not shift
|
||||
// out of bounds
|
||||
unsafe {
|
||||
self.unchecked_shl(rhs & ($BITS - 1))
|
||||
self.unchecked_shl(rhs & (Self::BITS - 1))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1423,7 +1441,7 @@ macro_rules! uint_impl {
|
|||
// SAFETY: the masking by the bitsize of the type ensures that we do not shift
|
||||
// out of bounds
|
||||
unsafe {
|
||||
self.unchecked_shr(rhs & ($BITS - 1))
|
||||
self.unchecked_shr(rhs & (Self::BITS - 1))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1847,7 +1865,7 @@ macro_rules! uint_impl {
|
|||
without modifying the original"]
|
||||
#[inline(always)]
|
||||
pub const fn overflowing_shl(self, rhs: u32) -> (Self, bool) {
|
||||
(self.wrapping_shl(rhs), (rhs > ($BITS - 1)))
|
||||
(self.wrapping_shl(rhs), rhs >= Self::BITS)
|
||||
}
|
||||
|
||||
/// Shifts self right by `rhs` bits.
|
||||
|
@ -1872,7 +1890,7 @@ macro_rules! uint_impl {
|
|||
without modifying the original"]
|
||||
#[inline(always)]
|
||||
pub const fn overflowing_shr(self, rhs: u32) -> (Self, bool) {
|
||||
(self.wrapping_shr(rhs), (rhs > ($BITS - 1)))
|
||||
(self.wrapping_shr(rhs), rhs >= Self::BITS)
|
||||
}
|
||||
|
||||
/// Raises self to the power of `exp`, using exponentiation by squaring.
|
||||
|
|
|
@ -111,10 +111,18 @@ impl Step for Std {
|
|||
let compiler_to_use = builder.compiler_for(compiler.stage, compiler.host, target);
|
||||
if compiler_to_use != compiler {
|
||||
builder.ensure(Std::new(compiler_to_use, target));
|
||||
builder.info(&format!(
|
||||
"Uplifting stage1 library ({} -> {})",
|
||||
compiler_to_use.host, target
|
||||
));
|
||||
let msg = if compiler_to_use.host == target {
|
||||
format!(
|
||||
"Uplifting library (stage{} -> stage{})",
|
||||
compiler_to_use.stage, compiler.stage
|
||||
)
|
||||
} else {
|
||||
format!(
|
||||
"Uplifting library (stage{}:{} -> stage{}:{})",
|
||||
compiler_to_use.stage, compiler_to_use.host, compiler.stage, target
|
||||
)
|
||||
};
|
||||
builder.info(&msg);
|
||||
|
||||
// Even if we're not building std this stage, the new sysroot must
|
||||
// still contain the third party objects needed by various targets.
|
||||
|
@ -134,13 +142,23 @@ impl Step for Std {
|
|||
cargo.arg("-p").arg(krate);
|
||||
}
|
||||
|
||||
builder.info(&format!(
|
||||
"Building{} stage{} library artifacts ({} -> {})",
|
||||
crate_description(&self.crates),
|
||||
compiler.stage,
|
||||
&compiler.host,
|
||||
target,
|
||||
));
|
||||
let msg = if compiler.host == target {
|
||||
format!(
|
||||
"Building{} stage{} library artifacts ({}) ",
|
||||
crate_description(&self.crates),
|
||||
compiler.stage,
|
||||
compiler.host
|
||||
)
|
||||
} else {
|
||||
format!(
|
||||
"Building{} stage{} library artifacts ({} -> {})",
|
||||
crate_description(&self.crates),
|
||||
compiler.stage,
|
||||
compiler.host,
|
||||
target,
|
||||
)
|
||||
};
|
||||
builder.info(&msg);
|
||||
run_cargo(
|
||||
builder,
|
||||
cargo,
|
||||
|
@ -438,10 +456,6 @@ impl Step for StdLink {
|
|||
let compiler = self.compiler;
|
||||
let target_compiler = self.target_compiler;
|
||||
let target = self.target;
|
||||
builder.info(&format!(
|
||||
"Copying stage{} library from stage{} ({} -> {} / {})",
|
||||
target_compiler.stage, compiler.stage, &compiler.host, target_compiler.host, target
|
||||
));
|
||||
let libdir = builder.sysroot_libdir(target_compiler, target);
|
||||
let hostdir = builder.sysroot_libdir(target_compiler, compiler.host);
|
||||
add_to_sysroot(builder, &libdir, &hostdir, &libstd_stamp(builder, compiler, target));
|
||||
|
@ -715,8 +729,22 @@ impl Step for Rustc {
|
|||
let compiler_to_use = builder.compiler_for(compiler.stage, compiler.host, target);
|
||||
if compiler_to_use != compiler {
|
||||
builder.ensure(Rustc::new(compiler_to_use, target));
|
||||
builder
|
||||
.info(&format!("Uplifting stage1 rustc ({} -> {})", builder.config.build, target));
|
||||
let msg = if compiler_to_use.host == target {
|
||||
format!(
|
||||
"Uplifting rustc (stage{} -> stage{})",
|
||||
compiler_to_use.stage,
|
||||
compiler.stage + 1
|
||||
)
|
||||
} else {
|
||||
format!(
|
||||
"Uplifting rustc (stage{}:{} -> stage{}:{})",
|
||||
compiler_to_use.stage,
|
||||
compiler_to_use.host,
|
||||
compiler.stage + 1,
|
||||
target
|
||||
)
|
||||
};
|
||||
builder.info(&msg);
|
||||
builder.ensure(RustcLink::from_rustc(self, compiler_to_use));
|
||||
return;
|
||||
}
|
||||
|
@ -810,13 +838,24 @@ impl Step for Rustc {
|
|||
cargo.arg("-p").arg(krate);
|
||||
}
|
||||
|
||||
builder.info(&format!(
|
||||
"Building{} stage{} compiler artifacts ({} -> {})",
|
||||
crate_description(&self.crates),
|
||||
compiler.stage,
|
||||
&compiler.host,
|
||||
target,
|
||||
));
|
||||
let msg = if compiler.host == target {
|
||||
format!(
|
||||
"Building{} compiler artifacts (stage{} -> stage{})",
|
||||
crate_description(&self.crates),
|
||||
compiler.stage,
|
||||
compiler.stage + 1
|
||||
)
|
||||
} else {
|
||||
format!(
|
||||
"Building{} compiler artifacts (stage{}:{} -> stage{}:{})",
|
||||
crate_description(&self.crates),
|
||||
compiler.stage,
|
||||
compiler.host,
|
||||
compiler.stage + 1,
|
||||
target,
|
||||
)
|
||||
};
|
||||
builder.info(&msg);
|
||||
run_cargo(
|
||||
builder,
|
||||
cargo,
|
||||
|
@ -1000,10 +1039,6 @@ impl Step for RustcLink {
|
|||
let compiler = self.compiler;
|
||||
let target_compiler = self.target_compiler;
|
||||
let target = self.target;
|
||||
builder.info(&format!(
|
||||
"Copying stage{} rustc from stage{} ({} -> {} / {})",
|
||||
target_compiler.stage, compiler.stage, &compiler.host, target_compiler.host, target
|
||||
));
|
||||
add_to_sysroot(
|
||||
builder,
|
||||
&builder.sysroot_libdir(target_compiler, target),
|
||||
|
@ -1077,10 +1112,15 @@ impl Step for CodegenBackend {
|
|||
|
||||
let tmp_stamp = out_dir.join(".tmp.stamp");
|
||||
|
||||
builder.info(&format!(
|
||||
"Building stage{} codegen backend {} ({} -> {})",
|
||||
compiler.stage, backend, &compiler.host, target
|
||||
));
|
||||
let msg = if compiler.host == target {
|
||||
format!("Building stage{} codegen backend {}", compiler.stage, backend)
|
||||
} else {
|
||||
format!(
|
||||
"Building stage{} codegen backend {} ({} -> {})",
|
||||
compiler.stage, backend, compiler.host, target
|
||||
)
|
||||
};
|
||||
builder.info(&msg);
|
||||
let files = run_cargo(builder, cargo, vec![], &tmp_stamp, vec![], false, false);
|
||||
if builder.config.dry_run() {
|
||||
return;
|
||||
|
@ -1386,7 +1426,12 @@ impl Step for Assemble {
|
|||
|
||||
let stage = target_compiler.stage;
|
||||
let host = target_compiler.host;
|
||||
builder.info(&format!("Assembling stage{} compiler ({})", stage, host));
|
||||
let msg = if build_compiler.host == host {
|
||||
format!("Assembling stage{} compiler", stage)
|
||||
} else {
|
||||
format!("Assembling stage{} compiler ({})", stage, host)
|
||||
};
|
||||
builder.info(&msg);
|
||||
|
||||
// Link in all dylibs to the libdir
|
||||
let stamp = librustc_stamp(builder, build_compiler, target_compiler.host);
|
||||
|
|
|
@ -33,6 +33,44 @@ struct ToolBuild {
|
|||
allow_features: &'static str,
|
||||
}
|
||||
|
||||
fn tooling_output(
|
||||
mode: Mode,
|
||||
tool: &str,
|
||||
build_stage: u32,
|
||||
host: &TargetSelection,
|
||||
target: &TargetSelection,
|
||||
) -> String {
|
||||
match mode {
|
||||
// depends on compiler stage, different to host compiler
|
||||
Mode::ToolRustc => {
|
||||
if host == target {
|
||||
format!("Building tool {} (stage{} -> stage{})", tool, build_stage, build_stage + 1)
|
||||
} else {
|
||||
format!(
|
||||
"Building tool {} (stage{}:{} -> stage{}:{})",
|
||||
tool,
|
||||
build_stage,
|
||||
host,
|
||||
build_stage + 1,
|
||||
target
|
||||
)
|
||||
}
|
||||
}
|
||||
// doesn't depend on compiler, same as host compiler
|
||||
Mode::ToolStd => {
|
||||
if host == target {
|
||||
format!("Building tool {} (stage{})", tool, build_stage)
|
||||
} else {
|
||||
format!(
|
||||
"Building tool {} (stage{}:{} -> stage{}:{})",
|
||||
tool, build_stage, host, build_stage, target
|
||||
)
|
||||
}
|
||||
}
|
||||
_ => format!("Building tool {} (stage{})", tool, build_stage),
|
||||
}
|
||||
}
|
||||
|
||||
impl Step for ToolBuild {
|
||||
type Output = Option<PathBuf>;
|
||||
|
||||
|
@ -74,8 +112,14 @@ impl Step for ToolBuild {
|
|||
if !self.allow_features.is_empty() {
|
||||
cargo.allow_features(self.allow_features);
|
||||
}
|
||||
|
||||
builder.info(&format!("Building stage{} tool {} ({})", compiler.stage, tool, target));
|
||||
let msg = tooling_output(
|
||||
self.mode,
|
||||
self.tool,
|
||||
self.compiler.stage,
|
||||
&self.compiler.host,
|
||||
&self.target,
|
||||
);
|
||||
builder.info(&msg);
|
||||
let mut duplicates = Vec::new();
|
||||
let is_expected = compile::stream_cargo(builder, cargo, vec![], &mut |msg| {
|
||||
// Only care about big things like the RLS/Cargo for now
|
||||
|
@ -562,10 +606,14 @@ impl Step for Rustdoc {
|
|||
features.as_slice(),
|
||||
);
|
||||
|
||||
builder.info(&format!(
|
||||
"Building rustdoc for stage{} ({})",
|
||||
target_compiler.stage, target_compiler.host
|
||||
));
|
||||
let msg = tooling_output(
|
||||
Mode::ToolRustc,
|
||||
"rustdoc",
|
||||
build_compiler.stage,
|
||||
&self.compiler.host,
|
||||
&target,
|
||||
);
|
||||
builder.info(&msg);
|
||||
builder.run(&mut cargo.into());
|
||||
|
||||
// Cargo adds a number of paths to the dylib search path on windows, which results in
|
||||
|
|
|
@ -194,7 +194,11 @@ pub(crate) fn register_lints(_sess: &Session, lint_store: &mut LintStore) {
|
|||
true,
|
||||
"rustdoc::all",
|
||||
Some("rustdoc"),
|
||||
RUSTDOC_LINTS.iter().map(|&lint| LintId::of(lint)).collect(),
|
||||
RUSTDOC_LINTS
|
||||
.iter()
|
||||
.filter(|lint| lint.feature_gate.is_none()) // only include stable lints
|
||||
.map(|&lint| LintId::of(lint))
|
||||
.collect(),
|
||||
);
|
||||
for lint in &*RUSTDOC_LINTS {
|
||||
let name = lint.name_lower();
|
||||
|
|
|
@ -62,7 +62,7 @@ impl<'a, 'tcx> Stripper<'a, 'tcx> {
|
|||
|
||||
/// In case `i` is a non-hidden impl block, then we special-case it by changing the value
|
||||
/// of `is_in_hidden_item` to `true` because the impl children inherit its visibility.
|
||||
fn recurse_in_impl(&mut self, i: Item) -> Item {
|
||||
fn recurse_in_impl_or_exported_macro(&mut self, i: Item) -> Item {
|
||||
let prev = mem::replace(&mut self.is_in_hidden_item, false);
|
||||
let ret = self.fold_item_recur(i);
|
||||
self.is_in_hidden_item = prev;
|
||||
|
@ -73,9 +73,17 @@ impl<'a, 'tcx> Stripper<'a, 'tcx> {
|
|||
impl<'a, 'tcx> DocFolder for Stripper<'a, 'tcx> {
|
||||
fn fold_item(&mut self, i: Item) -> Option<Item> {
|
||||
let has_doc_hidden = i.attrs.lists(sym::doc).has_word(sym::hidden);
|
||||
let is_impl = matches!(*i.kind, clean::ImplItem(..));
|
||||
let is_impl_or_exported_macro = match *i.kind {
|
||||
clean::ImplItem(..) => true,
|
||||
// If the macro has the `#[macro_export]` attribute, it means it's accessible at the
|
||||
// crate level so it should be handled differently.
|
||||
clean::MacroItem(..) => {
|
||||
i.attrs.other_attrs.iter().any(|attr| attr.has_name(sym::macro_export))
|
||||
}
|
||||
_ => false,
|
||||
};
|
||||
let mut is_hidden = has_doc_hidden;
|
||||
if !is_impl {
|
||||
if !is_impl_or_exported_macro {
|
||||
is_hidden = self.is_in_hidden_item || has_doc_hidden;
|
||||
if !is_hidden && i.inline_stmt_id.is_none() {
|
||||
// We don't need to check if it's coming from a reexport since the reexport itself was
|
||||
|
@ -92,8 +100,8 @@ impl<'a, 'tcx> DocFolder for Stripper<'a, 'tcx> {
|
|||
if self.update_retained {
|
||||
self.retained.insert(i.item_id);
|
||||
}
|
||||
return Some(if is_impl {
|
||||
self.recurse_in_impl(i)
|
||||
return Some(if is_impl_or_exported_macro {
|
||||
self.recurse_in_impl_or_exported_macro(i)
|
||||
} else {
|
||||
self.set_is_in_hidden_item_and_fold(false, i)
|
||||
});
|
||||
|
|
|
@ -1 +1 @@
|
|||
7e253a7fb2e2e050021fed32da6fa2ec7bcea0fb
|
||||
f715e430aac0de131e2ad21804013ea405722a66
|
||||
|
|
|
@ -478,6 +478,10 @@ pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriInterpCxExt<'mir, 'tcx> {
|
|||
} else if matches!(v.layout.fields, FieldsShape::Union(..)) {
|
||||
// A (non-frozen) union. We fall back to whatever the type says.
|
||||
(self.unsafe_cell_action)(v)
|
||||
} else if matches!(v.layout.ty.kind(), ty::Dynamic(_, _, ty::DynStar)) {
|
||||
// This needs to read the vtable pointer to proceed type-driven, but we don't
|
||||
// want to reentrantly read from memory here.
|
||||
(self.unsafe_cell_action)(v)
|
||||
} else {
|
||||
// We want to not actually read from memory for this visit. So, before
|
||||
// walking this value, we have to make sure it is not a
|
||||
|
|
|
@ -13,6 +13,7 @@
|
|||
#![allow(
|
||||
clippy::collapsible_else_if,
|
||||
clippy::collapsible_if,
|
||||
clippy::if_same_then_else,
|
||||
clippy::comparison_chain,
|
||||
clippy::enum_variant_names,
|
||||
clippy::field_reassign_with_default,
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
error: Undefined Behavior: constructing invalid value: encountered a dangling reference (address $HEX is unallocated)
|
||||
error: Undefined Behavior: constructing invalid value: encountered a dangling reference ($HEX[noalloc] has no provenance)
|
||||
--> $DIR/branchless-select-i128-pointer.rs:LL:CC
|
||||
|
|
||||
LL | / transmute::<_, &str>(
|
||||
|
@ -6,7 +6,7 @@ LL | |
|
|||
LL | | !mask & transmute::<_, TwoPtrs>("false !")
|
||||
LL | | | mask & transmute::<_, TwoPtrs>("true !"),
|
||||
LL | | )
|
||||
| |_____________^ constructing invalid value: encountered a dangling reference (address $HEX is unallocated)
|
||||
| |_____________^ constructing invalid value: encountered a dangling reference ($HEX[noalloc] has no provenance)
|
||||
|
|
||||
= help: this indicates a bug in the program: it performed an invalid operation, and caused Undefined Behavior
|
||||
= help: see https://doc.rust-lang.org/nightly/reference/behavior-considered-undefined.html for further information
|
||||
|
|
|
@ -3,5 +3,5 @@
|
|||
use std::mem;
|
||||
|
||||
fn main() {
|
||||
let _x: &i32 = unsafe { mem::transmute(16usize) }; //~ ERROR: encountered a dangling reference (address 0x10 is unallocated)
|
||||
let _x: &i32 = unsafe { mem::transmute(16usize) }; //~ ERROR: encountered a dangling reference
|
||||
}
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
error: Undefined Behavior: constructing invalid value: encountered a dangling reference (address 0x10 is unallocated)
|
||||
error: Undefined Behavior: constructing invalid value: encountered a dangling reference (0x10[noalloc] has no provenance)
|
||||
--> $DIR/dangling_ref1.rs:LL:CC
|
||||
|
|
||||
LL | let _x: &i32 = unsafe { mem::transmute(16usize) };
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value: encountered a dangling reference (address 0x10 is unallocated)
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value: encountered a dangling reference (0x10[noalloc] has no provenance)
|
||||
|
|
||||
= help: this indicates a bug in the program: it performed an invalid operation, and caused Undefined Behavior
|
||||
= help: see https://doc.rust-lang.org/nightly/reference/behavior-considered-undefined.html for further information
|
||||
|
|
119
src/tools/miri/tests/pass/dyn-star.rs
Normal file
119
src/tools/miri/tests/pass/dyn-star.rs
Normal file
|
@ -0,0 +1,119 @@
|
|||
#![feature(dyn_star)]
|
||||
#![allow(incomplete_features)]
|
||||
#![feature(custom_inner_attributes)]
|
||||
// rustfmt destroys `dyn* Trait` syntax
|
||||
#![rustfmt::skip]
|
||||
|
||||
use std::fmt::{Debug, Display};
|
||||
|
||||
fn main() {
|
||||
make_dyn_star();
|
||||
method();
|
||||
box_();
|
||||
dispatch_on_pin_mut();
|
||||
dyn_star_to_dyn();
|
||||
dyn_to_dyn_star();
|
||||
}
|
||||
|
||||
fn dyn_star_to_dyn() {
|
||||
let x: dyn* Debug = &42;
|
||||
let x = Box::new(x) as Box<dyn Debug>;
|
||||
assert_eq!("42", format!("{x:?}"));
|
||||
}
|
||||
|
||||
fn dyn_to_dyn_star() {
|
||||
let x: Box<dyn Debug> = Box::new(42);
|
||||
let x = &x as dyn* Debug;
|
||||
assert_eq!("42", format!("{x:?}"));
|
||||
}
|
||||
|
||||
fn make_dyn_star() {
|
||||
fn make_dyn_star_coercion(i: usize) {
|
||||
let _dyn_i: dyn* Debug = i;
|
||||
}
|
||||
|
||||
fn make_dyn_star_explicit(i: usize) {
|
||||
let _dyn_i: dyn* Debug = i as dyn* Debug;
|
||||
}
|
||||
|
||||
make_dyn_star_coercion(42);
|
||||
make_dyn_star_explicit(42);
|
||||
}
|
||||
|
||||
fn method() {
|
||||
trait Foo {
|
||||
fn get(&self) -> usize;
|
||||
}
|
||||
|
||||
impl Foo for usize {
|
||||
fn get(&self) -> usize {
|
||||
*self
|
||||
}
|
||||
}
|
||||
|
||||
fn invoke_dyn_star(i: dyn* Foo) -> usize {
|
||||
i.get()
|
||||
}
|
||||
|
||||
fn make_and_invoke_dyn_star(i: usize) -> usize {
|
||||
let dyn_i: dyn* Foo = i;
|
||||
invoke_dyn_star(dyn_i)
|
||||
}
|
||||
|
||||
assert_eq!(make_and_invoke_dyn_star(42), 42);
|
||||
}
|
||||
|
||||
fn box_() {
|
||||
fn make_dyn_star() -> dyn* Display {
|
||||
Box::new(42) as dyn* Display
|
||||
}
|
||||
|
||||
let x = make_dyn_star();
|
||||
assert_eq!(format!("{x}"), "42");
|
||||
}
|
||||
|
||||
fn dispatch_on_pin_mut() {
|
||||
use std::future::Future;
|
||||
|
||||
async fn foo(f: dyn* Future<Output = i32>) {
|
||||
println!("dispatch_on_pin_mut: value: {}", f.await);
|
||||
}
|
||||
|
||||
async fn async_main() {
|
||||
foo(Box::pin(async { 1 })).await
|
||||
}
|
||||
|
||||
// ------------------------------------------------------------------------- //
|
||||
// Implementation Details Below...
|
||||
|
||||
use std::pin::Pin;
|
||||
use std::task::*;
|
||||
|
||||
pub fn noop_waker() -> Waker {
|
||||
let raw = RawWaker::new(std::ptr::null(), &NOOP_WAKER_VTABLE);
|
||||
|
||||
// SAFETY: the contracts for RawWaker and RawWakerVTable are upheld
|
||||
unsafe { Waker::from_raw(raw) }
|
||||
}
|
||||
|
||||
const NOOP_WAKER_VTABLE: RawWakerVTable = RawWakerVTable::new(noop_clone, noop, noop, noop);
|
||||
|
||||
unsafe fn noop_clone(_p: *const ()) -> RawWaker {
|
||||
RawWaker::new(std::ptr::null(), &NOOP_WAKER_VTABLE)
|
||||
}
|
||||
|
||||
unsafe fn noop(_p: *const ()) {}
|
||||
|
||||
let mut fut = async_main();
|
||||
|
||||
// Poll loop, just to test the future...
|
||||
let waker = noop_waker();
|
||||
let ctx = &mut Context::from_waker(&waker);
|
||||
|
||||
loop {
|
||||
match unsafe { Pin::new_unchecked(&mut fut).poll(ctx) } {
|
||||
Poll::Pending => {}
|
||||
Poll::Ready(()) => break,
|
||||
}
|
||||
}
|
||||
}
|
1
src/tools/miri/tests/pass/dyn-star.stdout
Normal file
1
src/tools/miri/tests/pass/dyn-star.stdout
Normal file
|
@ -0,0 +1 @@
|
|||
dispatch_on_pin_mut: value: 1
|
|
@ -711,6 +711,7 @@ dependencies = [
|
|||
"limit",
|
||||
"memchr",
|
||||
"once_cell",
|
||||
"oorandom",
|
||||
"parser",
|
||||
"profile",
|
||||
"rayon",
|
||||
|
@ -932,9 +933,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "lsp-types"
|
||||
version = "0.93.2"
|
||||
version = "0.94.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9be6e9c7e2d18f651974370d7aff703f9513e0df6e464fd795660edc77e6ca51"
|
||||
checksum = "0b63735a13a1f9cd4f4835223d828ed9c2e35c8c5e61837774399f558b6a1237"
|
||||
dependencies = [
|
||||
"bitflags",
|
||||
"serde",
|
||||
|
@ -1173,6 +1174,7 @@ dependencies = [
|
|||
"limit",
|
||||
"rustc-ap-rustc_lexer",
|
||||
"sourcegen",
|
||||
"stdx",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
|
@ -74,3 +74,5 @@ toolchain = { path = "./crates/toolchain", version = "0.0.0" }
|
|||
tt = { path = "./crates/tt", version = "0.0.0" }
|
||||
vfs-notify = { path = "./crates/vfs-notify", version = "0.0.0" }
|
||||
vfs = { path = "./crates/vfs", version = "0.0.0" }
|
||||
# non-local crates
|
||||
smallvec = { version = "1.10.0", features = ["const_new", "union", "const_generics"] }
|
||||
|
|
|
@ -27,7 +27,7 @@ itertools = "0.10.5"
|
|||
la-arena = { version = "0.3.0", path = "../../lib/la-arena" }
|
||||
once_cell = "1.17.0"
|
||||
rustc-hash = "1.1.0"
|
||||
smallvec = "1.10.0"
|
||||
smallvec.workspace = true
|
||||
tracing = "0.1.35"
|
||||
|
||||
rustc_abi = { version = "0.0.20221221", package = "hkalbasi-rustc-ap-rustc_abi", default-features = false }
|
||||
|
|
|
@ -2,9 +2,10 @@
|
|||
|
||||
use std::sync::Arc;
|
||||
|
||||
use crate::tt::{Delimiter, DelimiterKind, Leaf, Subtree, TokenTree};
|
||||
use base_db::CrateId;
|
||||
use cfg::CfgOptions;
|
||||
use either::Either;
|
||||
|
||||
use hir_expand::{
|
||||
name::{AsName, Name},
|
||||
HirFileId, InFile,
|
||||
|
@ -24,12 +25,12 @@ use crate::{
|
|||
src::HasChildSource,
|
||||
src::HasSource,
|
||||
trace::Trace,
|
||||
tt::{Delimiter, DelimiterKind, Leaf, Subtree, TokenTree},
|
||||
type_ref::TypeRef,
|
||||
visibility::RawVisibility,
|
||||
EnumId, LocalEnumVariantId, LocalFieldId, LocalModuleId, Lookup, ModuleId, StructId, UnionId,
|
||||
VariantId,
|
||||
};
|
||||
use cfg::CfgOptions;
|
||||
|
||||
/// Note that we use `StructData` for unions as well!
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
|
|
|
@ -19,7 +19,7 @@ use la_arena::{Arena, ArenaMap};
|
|||
use limit::Limit;
|
||||
use profile::Count;
|
||||
use rustc_hash::FxHashMap;
|
||||
use syntax::{ast, AstPtr, SyntaxNodePtr};
|
||||
use syntax::{ast, AstPtr, SyntaxNode, SyntaxNodePtr};
|
||||
|
||||
use crate::{
|
||||
attr::Attrs,
|
||||
|
@ -51,7 +51,8 @@ pub struct Expander {
|
|||
def_map: Arc<DefMap>,
|
||||
current_file_id: HirFileId,
|
||||
module: LocalModuleId,
|
||||
recursion_limit: usize,
|
||||
/// `recursion_depth == usize::MAX` indicates that the recursion limit has been reached.
|
||||
recursion_depth: usize,
|
||||
}
|
||||
|
||||
impl CfgExpander {
|
||||
|
@ -84,7 +85,7 @@ impl Expander {
|
|||
def_map,
|
||||
current_file_id,
|
||||
module: module.local_id,
|
||||
recursion_limit: 0,
|
||||
recursion_depth: 0,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -93,31 +94,37 @@ impl Expander {
|
|||
db: &dyn DefDatabase,
|
||||
macro_call: ast::MacroCall,
|
||||
) -> Result<ExpandResult<Option<(Mark, T)>>, UnresolvedMacro> {
|
||||
if self.recursion_limit(db).check(self.recursion_limit + 1).is_err() {
|
||||
cov_mark::hit!(your_stack_belongs_to_me);
|
||||
return Ok(ExpandResult::only_err(ExpandError::Other(
|
||||
"reached recursion limit during macro expansion".into(),
|
||||
)));
|
||||
let mut unresolved_macro_err = None;
|
||||
|
||||
let result = self.within_limit(db, |this| {
|
||||
let macro_call = InFile::new(this.current_file_id, ¯o_call);
|
||||
|
||||
let resolver =
|
||||
|path| this.resolve_path_as_macro(db, &path).map(|it| macro_id_to_def_id(db, it));
|
||||
|
||||
let mut err = None;
|
||||
let call_id = match macro_call.as_call_id_with_errors(
|
||||
db,
|
||||
this.def_map.krate(),
|
||||
resolver,
|
||||
&mut |e| {
|
||||
err.get_or_insert(e);
|
||||
},
|
||||
) {
|
||||
Ok(call_id) => call_id,
|
||||
Err(resolve_err) => {
|
||||
unresolved_macro_err = Some(resolve_err);
|
||||
return ExpandResult { value: None, err: None };
|
||||
}
|
||||
};
|
||||
ExpandResult { value: call_id.ok(), err }
|
||||
});
|
||||
|
||||
if let Some(err) = unresolved_macro_err {
|
||||
Err(err)
|
||||
} else {
|
||||
Ok(result)
|
||||
}
|
||||
|
||||
let macro_call = InFile::new(self.current_file_id, ¯o_call);
|
||||
|
||||
let resolver =
|
||||
|path| self.resolve_path_as_macro(db, &path).map(|it| macro_id_to_def_id(db, it));
|
||||
|
||||
let mut err = None;
|
||||
let call_id =
|
||||
macro_call.as_call_id_with_errors(db, self.def_map.krate(), resolver, &mut |e| {
|
||||
err.get_or_insert(e);
|
||||
})?;
|
||||
let call_id = match call_id {
|
||||
Ok(it) => it,
|
||||
Err(_) => {
|
||||
return Ok(ExpandResult { value: None, err });
|
||||
}
|
||||
};
|
||||
|
||||
Ok(self.enter_expand_inner(db, call_id, err))
|
||||
}
|
||||
|
||||
pub fn enter_expand_id<T: ast::AstNode>(
|
||||
|
@ -125,15 +132,14 @@ impl Expander {
|
|||
db: &dyn DefDatabase,
|
||||
call_id: MacroCallId,
|
||||
) -> ExpandResult<Option<(Mark, T)>> {
|
||||
self.enter_expand_inner(db, call_id, None)
|
||||
self.within_limit(db, |_this| ExpandResult::ok(Some(call_id)))
|
||||
}
|
||||
|
||||
fn enter_expand_inner<T: ast::AstNode>(
|
||||
&mut self,
|
||||
fn enter_expand_inner(
|
||||
db: &dyn DefDatabase,
|
||||
call_id: MacroCallId,
|
||||
mut err: Option<ExpandError>,
|
||||
) -> ExpandResult<Option<(Mark, T)>> {
|
||||
) -> ExpandResult<Option<(HirFileId, SyntaxNode)>> {
|
||||
if err.is_none() {
|
||||
err = db.macro_expand_error(call_id);
|
||||
}
|
||||
|
@ -154,29 +160,21 @@ impl Expander {
|
|||
}
|
||||
};
|
||||
|
||||
let node = match T::cast(raw_node) {
|
||||
Some(it) => it,
|
||||
None => {
|
||||
// This can happen without being an error, so only forward previous errors.
|
||||
return ExpandResult { value: None, err };
|
||||
}
|
||||
};
|
||||
|
||||
tracing::debug!("macro expansion {:#?}", node.syntax());
|
||||
|
||||
self.recursion_limit += 1;
|
||||
let mark =
|
||||
Mark { file_id: self.current_file_id, bomb: DropBomb::new("expansion mark dropped") };
|
||||
self.cfg_expander.hygiene = Hygiene::new(db.upcast(), file_id);
|
||||
self.current_file_id = file_id;
|
||||
|
||||
ExpandResult { value: Some((mark, node)), err }
|
||||
ExpandResult { value: Some((file_id, raw_node)), err }
|
||||
}
|
||||
|
||||
pub fn exit(&mut self, db: &dyn DefDatabase, mut mark: Mark) {
|
||||
self.cfg_expander.hygiene = Hygiene::new(db.upcast(), mark.file_id);
|
||||
self.current_file_id = mark.file_id;
|
||||
self.recursion_limit -= 1;
|
||||
if self.recursion_depth == usize::MAX {
|
||||
// Recursion limit has been reached somewhere in the macro expansion tree. Reset the
|
||||
// depth only when we get out of the tree.
|
||||
if !self.current_file_id.is_macro() {
|
||||
self.recursion_depth = 0;
|
||||
}
|
||||
} else {
|
||||
self.recursion_depth -= 1;
|
||||
}
|
||||
mark.bomb.defuse();
|
||||
}
|
||||
|
||||
|
@ -215,6 +213,50 @@ impl Expander {
|
|||
#[cfg(test)]
|
||||
return Limit::new(std::cmp::min(32, limit));
|
||||
}
|
||||
|
||||
fn within_limit<F, T: ast::AstNode>(
|
||||
&mut self,
|
||||
db: &dyn DefDatabase,
|
||||
op: F,
|
||||
) -> ExpandResult<Option<(Mark, T)>>
|
||||
where
|
||||
F: FnOnce(&mut Self) -> ExpandResult<Option<MacroCallId>>,
|
||||
{
|
||||
if self.recursion_depth == usize::MAX {
|
||||
// Recursion limit has been reached somewhere in the macro expansion tree. We should
|
||||
// stop expanding other macro calls in this tree, or else this may result in
|
||||
// exponential number of macro expansions, leading to a hang.
|
||||
//
|
||||
// The overflow error should have been reported when it occurred (see the next branch),
|
||||
// so don't return overflow error here to avoid diagnostics duplication.
|
||||
cov_mark::hit!(overflow_but_not_me);
|
||||
return ExpandResult::only_err(ExpandError::RecursionOverflowPosioned);
|
||||
} else if self.recursion_limit(db).check(self.recursion_depth + 1).is_err() {
|
||||
self.recursion_depth = usize::MAX;
|
||||
cov_mark::hit!(your_stack_belongs_to_me);
|
||||
return ExpandResult::only_err(ExpandError::Other(
|
||||
"reached recursion limit during macro expansion".into(),
|
||||
));
|
||||
}
|
||||
|
||||
let ExpandResult { value, err } = op(self);
|
||||
let Some(call_id) = value else {
|
||||
return ExpandResult { value: None, err };
|
||||
};
|
||||
|
||||
Self::enter_expand_inner(db, call_id, err).map(|value| {
|
||||
value.and_then(|(new_file_id, node)| {
|
||||
let node = T::cast(node)?;
|
||||
|
||||
self.recursion_depth += 1;
|
||||
self.cfg_expander.hygiene = Hygiene::new(db.upcast(), new_file_id);
|
||||
let old_file_id = std::mem::replace(&mut self.current_file_id, new_file_id);
|
||||
let mark =
|
||||
Mark { file_id: old_file_id, bomb: DropBomb::new("expansion mark dropped") };
|
||||
Some((mark, node))
|
||||
})
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
|
|
|
@ -624,6 +624,10 @@ impl ExprCollector<'_> {
|
|||
krate: *krate,
|
||||
});
|
||||
}
|
||||
Some(ExpandError::RecursionOverflowPosioned) => {
|
||||
// Recursion limit has been reached in the macro expansion tree, but not in
|
||||
// this very macro call. Don't add diagnostics to avoid duplication.
|
||||
}
|
||||
Some(err) => {
|
||||
self.source_map.diagnostics.push(BodyDiagnostic::MacroError {
|
||||
node: InFile::new(outer_file, syntax_ptr),
|
||||
|
@ -636,6 +640,8 @@ impl ExprCollector<'_> {
|
|||
|
||||
match res.value {
|
||||
Some((mark, expansion)) => {
|
||||
// Keep collecting even with expansion errors so we can provide completions and
|
||||
// other services in incomplete macro expressions.
|
||||
self.source_map.expansions.insert(macro_call_ptr, self.expander.current_file_id);
|
||||
let prev_ast_id_map = mem::replace(
|
||||
&mut self.ast_id_map,
|
||||
|
|
|
@ -61,6 +61,19 @@ fn main() { n_nuple!(1,2,3); }
|
|||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn your_stack_belongs_to_me2() {
|
||||
cov_mark::check!(overflow_but_not_me);
|
||||
lower(
|
||||
r#"
|
||||
macro_rules! foo {
|
||||
() => {{ foo!(); foo!(); }}
|
||||
}
|
||||
fn main() { foo!(); }
|
||||
"#,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn recursion_limit() {
|
||||
cov_mark::check!(your_stack_belongs_to_me);
|
||||
|
|
|
@ -659,15 +659,16 @@ fn desugar_future_path(orig: TypeRef) -> Path {
|
|||
let path = path![core::future::Future];
|
||||
let mut generic_args: Vec<_> =
|
||||
std::iter::repeat(None).take(path.segments().len() - 1).collect();
|
||||
let mut last = GenericArgs::empty();
|
||||
let binding = AssociatedTypeBinding {
|
||||
name: name![Output],
|
||||
args: None,
|
||||
type_ref: Some(orig),
|
||||
bounds: Vec::new(),
|
||||
bounds: Box::default(),
|
||||
};
|
||||
last.bindings.push(binding);
|
||||
generic_args.push(Some(Interned::new(last)));
|
||||
generic_args.push(Some(Interned::new(GenericArgs {
|
||||
bindings: Box::new([binding]),
|
||||
..GenericArgs::empty()
|
||||
})));
|
||||
|
||||
Path::from_known_path(path, generic_args)
|
||||
}
|
||||
|
|
|
@ -1476,7 +1476,7 @@ macro_rules! m {
|
|||
/* parse error: expected identifier */
|
||||
/* parse error: expected SEMICOLON */
|
||||
/* parse error: expected SEMICOLON */
|
||||
/* parse error: expected expression */
|
||||
/* parse error: expected expression, item or let statement */
|
||||
fn f() {
|
||||
K::(C("0"));
|
||||
}
|
||||
|
|
|
@ -830,8 +830,7 @@ macro_rules! rgb_color {
|
|||
/* parse error: expected COMMA */
|
||||
/* parse error: expected R_ANGLE */
|
||||
/* parse error: expected SEMICOLON */
|
||||
/* parse error: expected SEMICOLON */
|
||||
/* parse error: expected expression */
|
||||
/* parse error: expected expression, item or let statement */
|
||||
pub fn new() {
|
||||
let _ = 0as u32<<(8+8);
|
||||
}
|
||||
|
@ -848,21 +847,21 @@ pub fn new() {
|
|||
// BLOCK_EXPR@10..31
|
||||
// STMT_LIST@10..31
|
||||
// L_CURLY@10..11 "{"
|
||||
// LET_STMT@11..27
|
||||
// LET_STMT@11..28
|
||||
// LET_KW@11..14 "let"
|
||||
// WILDCARD_PAT@14..15
|
||||
// UNDERSCORE@14..15 "_"
|
||||
// EQ@15..16 "="
|
||||
// CAST_EXPR@16..27
|
||||
// CAST_EXPR@16..28
|
||||
// LITERAL@16..17
|
||||
// INT_NUMBER@16..17 "0"
|
||||
// AS_KW@17..19 "as"
|
||||
// PATH_TYPE@19..27
|
||||
// PATH@19..27
|
||||
// PATH_SEGMENT@19..27
|
||||
// PATH_TYPE@19..28
|
||||
// PATH@19..28
|
||||
// PATH_SEGMENT@19..28
|
||||
// NAME_REF@19..22
|
||||
// IDENT@19..22 "u32"
|
||||
// GENERIC_ARG_LIST@22..27
|
||||
// GENERIC_ARG_LIST@22..28
|
||||
// L_ANGLE@22..23 "<"
|
||||
// TYPE_ARG@23..27
|
||||
// DYN_TRAIT_TYPE@23..27
|
||||
|
@ -877,9 +876,9 @@ pub fn new() {
|
|||
// ERROR@25..26
|
||||
// INT_NUMBER@25..26 "8"
|
||||
// PLUS@26..27 "+"
|
||||
// EXPR_STMT@27..28
|
||||
// LITERAL@27..28
|
||||
// INT_NUMBER@27..28 "8"
|
||||
// CONST_ARG@27..28
|
||||
// LITERAL@27..28
|
||||
// INT_NUMBER@27..28 "8"
|
||||
// ERROR@28..29
|
||||
// R_PAREN@28..29 ")"
|
||||
// SEMICOLON@29..30 ";"
|
||||
|
|
|
@ -38,18 +38,18 @@ impl Display for ImportAlias {
|
|||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||
pub struct Path {
|
||||
/// Type based path like `<T>::foo`.
|
||||
/// Note that paths like `<Type as Trait>::foo` are desugard to `Trait::<Self=Type>::foo`.
|
||||
/// Note that paths like `<Type as Trait>::foo` are desugared to `Trait::<Self=Type>::foo`.
|
||||
type_anchor: Option<Interned<TypeRef>>,
|
||||
mod_path: Interned<ModPath>,
|
||||
/// Invariant: the same len as `self.mod_path.segments`
|
||||
generic_args: Box<[Option<Interned<GenericArgs>>]>,
|
||||
/// Invariant: the same len as `self.mod_path.segments` or `None` if all segments are `None`.
|
||||
generic_args: Option<Box<[Option<Interned<GenericArgs>>]>>,
|
||||
}
|
||||
|
||||
/// Generic arguments to a path segment (e.g. the `i32` in `Option<i32>`). This
|
||||
/// also includes bindings of associated types, like in `Iterator<Item = Foo>`.
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||
pub struct GenericArgs {
|
||||
pub args: Vec<GenericArg>,
|
||||
pub args: Box<[GenericArg]>,
|
||||
/// This specifies whether the args contain a Self type as the first
|
||||
/// element. This is the case for path segments like `<T as Trait>`, where
|
||||
/// `T` is actually a type parameter for the path `Trait` specifying the
|
||||
|
@ -57,7 +57,7 @@ pub struct GenericArgs {
|
|||
/// is left out.
|
||||
pub has_self_type: bool,
|
||||
/// Associated type bindings like in `Iterator<Item = T>`.
|
||||
pub bindings: Vec<AssociatedTypeBinding>,
|
||||
pub bindings: Box<[AssociatedTypeBinding]>,
|
||||
/// Whether these generic args were desugared from `Trait(Arg) -> Output`
|
||||
/// parenthesis notation typically used for the `Fn` traits.
|
||||
pub desugared_from_fn: bool,
|
||||
|
@ -77,7 +77,7 @@ pub struct AssociatedTypeBinding {
|
|||
/// Bounds for the associated type, like in `Iterator<Item:
|
||||
/// SomeOtherTrait>`. (This is the unstable `associated_type_bounds`
|
||||
/// feature.)
|
||||
pub bounds: Vec<Interned<TypeBound>>,
|
||||
pub bounds: Box<[Interned<TypeBound>]>,
|
||||
}
|
||||
|
||||
/// A single generic argument.
|
||||
|
@ -102,7 +102,7 @@ impl Path {
|
|||
) -> Path {
|
||||
let generic_args = generic_args.into();
|
||||
assert_eq!(path.len(), generic_args.len());
|
||||
Path { type_anchor: None, mod_path: Interned::new(path), generic_args }
|
||||
Path { type_anchor: None, mod_path: Interned::new(path), generic_args: Some(generic_args) }
|
||||
}
|
||||
|
||||
pub fn kind(&self) -> &PathKind {
|
||||
|
@ -114,7 +114,14 @@ impl Path {
|
|||
}
|
||||
|
||||
pub fn segments(&self) -> PathSegments<'_> {
|
||||
PathSegments { segments: self.mod_path.segments(), generic_args: &self.generic_args }
|
||||
let s = PathSegments {
|
||||
segments: self.mod_path.segments(),
|
||||
generic_args: self.generic_args.as_deref(),
|
||||
};
|
||||
if let Some(generic_args) = s.generic_args {
|
||||
assert_eq!(s.segments.len(), generic_args.len());
|
||||
}
|
||||
s
|
||||
}
|
||||
|
||||
pub fn mod_path(&self) -> &ModPath {
|
||||
|
@ -131,13 +138,15 @@ impl Path {
|
|||
self.mod_path.kind,
|
||||
self.mod_path.segments()[..self.mod_path.segments().len() - 1].iter().cloned(),
|
||||
)),
|
||||
generic_args: self.generic_args[..self.generic_args.len() - 1].to_vec().into(),
|
||||
generic_args: self.generic_args.as_ref().map(|it| it[..it.len() - 1].to_vec().into()),
|
||||
};
|
||||
Some(res)
|
||||
}
|
||||
|
||||
pub fn is_self_type(&self) -> bool {
|
||||
self.type_anchor.is_none() && *self.generic_args == [None] && self.mod_path.is_Self()
|
||||
self.type_anchor.is_none()
|
||||
&& self.generic_args.as_deref().is_none()
|
||||
&& self.mod_path.is_Self()
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -149,11 +158,11 @@ pub struct PathSegment<'a> {
|
|||
|
||||
pub struct PathSegments<'a> {
|
||||
segments: &'a [Name],
|
||||
generic_args: &'a [Option<Interned<GenericArgs>>],
|
||||
generic_args: Option<&'a [Option<Interned<GenericArgs>>]>,
|
||||
}
|
||||
|
||||
impl<'a> PathSegments<'a> {
|
||||
pub const EMPTY: PathSegments<'static> = PathSegments { segments: &[], generic_args: &[] };
|
||||
pub const EMPTY: PathSegments<'static> = PathSegments { segments: &[], generic_args: None };
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.len() == 0
|
||||
}
|
||||
|
@ -167,26 +176,29 @@ impl<'a> PathSegments<'a> {
|
|||
self.get(self.len().checked_sub(1)?)
|
||||
}
|
||||
pub fn get(&self, idx: usize) -> Option<PathSegment<'a>> {
|
||||
assert_eq!(self.segments.len(), self.generic_args.len());
|
||||
let res = PathSegment {
|
||||
name: self.segments.get(idx)?,
|
||||
args_and_bindings: self.generic_args.get(idx).unwrap().as_ref().map(|it| &**it),
|
||||
args_and_bindings: self.generic_args.and_then(|it| it.get(idx)?.as_deref()),
|
||||
};
|
||||
Some(res)
|
||||
}
|
||||
pub fn skip(&self, len: usize) -> PathSegments<'a> {
|
||||
assert_eq!(self.segments.len(), self.generic_args.len());
|
||||
PathSegments { segments: &self.segments[len..], generic_args: &self.generic_args[len..] }
|
||||
PathSegments {
|
||||
segments: &self.segments.get(len..).unwrap_or(&[]),
|
||||
generic_args: self.generic_args.and_then(|it| it.get(len..)),
|
||||
}
|
||||
}
|
||||
pub fn take(&self, len: usize) -> PathSegments<'a> {
|
||||
assert_eq!(self.segments.len(), self.generic_args.len());
|
||||
PathSegments { segments: &self.segments[..len], generic_args: &self.generic_args[..len] }
|
||||
PathSegments {
|
||||
segments: &self.segments.get(..len).unwrap_or(&self.segments),
|
||||
generic_args: self.generic_args.map(|it| it.get(..len).unwrap_or(it)),
|
||||
}
|
||||
}
|
||||
pub fn iter(&self) -> impl Iterator<Item = PathSegment<'a>> {
|
||||
self.segments.iter().zip(self.generic_args.iter()).map(|(name, args)| PathSegment {
|
||||
name,
|
||||
args_and_bindings: args.as_ref().map(|it| &**it),
|
||||
})
|
||||
self.segments
|
||||
.iter()
|
||||
.zip(self.generic_args.into_iter().flatten().chain(iter::repeat(&None)))
|
||||
.map(|(name, args)| PathSegment { name, args_and_bindings: args.as_deref() })
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -200,9 +212,9 @@ impl GenericArgs {
|
|||
|
||||
pub(crate) fn empty() -> GenericArgs {
|
||||
GenericArgs {
|
||||
args: Vec::new(),
|
||||
args: Box::default(),
|
||||
has_self_type: false,
|
||||
bindings: Vec::new(),
|
||||
bindings: Box::default(),
|
||||
desugared_from_fn: false,
|
||||
}
|
||||
}
|
||||
|
@ -213,7 +225,7 @@ impl From<Name> for Path {
|
|||
Path {
|
||||
type_anchor: None,
|
||||
mod_path: Interned::new(ModPath::from_segments(PathKind::Plain, iter::once(name))),
|
||||
generic_args: Box::new([None]),
|
||||
generic_args: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,5 +1,7 @@
|
|||
//! Transforms syntax into `Path` objects, ideally with accounting for hygiene
|
||||
|
||||
use std::iter;
|
||||
|
||||
use crate::type_ref::ConstScalarOrPath;
|
||||
|
||||
use either::Either;
|
||||
|
@ -45,8 +47,11 @@ pub(super) fn lower_path(mut path: ast::Path, ctx: &LowerCtx<'_>) -> Option<Path
|
|||
)
|
||||
})
|
||||
.map(Interned::new);
|
||||
if let Some(_) = args {
|
||||
generic_args.resize(segments.len(), None);
|
||||
generic_args.push(args);
|
||||
}
|
||||
segments.push(name);
|
||||
generic_args.push(args)
|
||||
}
|
||||
Either::Right(crate_id) => {
|
||||
kind = PathKind::DollarCrate(crate_id);
|
||||
|
@ -56,7 +61,6 @@ pub(super) fn lower_path(mut path: ast::Path, ctx: &LowerCtx<'_>) -> Option<Path
|
|||
}
|
||||
ast::PathSegmentKind::SelfTypeKw => {
|
||||
segments.push(name![Self]);
|
||||
generic_args.push(None)
|
||||
}
|
||||
ast::PathSegmentKind::Type { type_ref, trait_ref } => {
|
||||
assert!(path.qualifier().is_none()); // this can only occur at the first segment
|
||||
|
@ -77,18 +81,33 @@ pub(super) fn lower_path(mut path: ast::Path, ctx: &LowerCtx<'_>) -> Option<Path
|
|||
kind = mod_path.kind;
|
||||
|
||||
segments.extend(mod_path.segments().iter().cloned().rev());
|
||||
generic_args.extend(Vec::from(path_generic_args).into_iter().rev());
|
||||
if let Some(path_generic_args) = path_generic_args {
|
||||
generic_args.resize(segments.len() - num_segments, None);
|
||||
generic_args.extend(Vec::from(path_generic_args).into_iter().rev());
|
||||
} else {
|
||||
generic_args.resize(segments.len(), None);
|
||||
}
|
||||
|
||||
let self_type = GenericArg::Type(self_type);
|
||||
|
||||
// Insert the type reference (T in the above example) as Self parameter for the trait
|
||||
let last_segment =
|
||||
generic_args.iter_mut().rev().nth(num_segments.saturating_sub(1))?;
|
||||
let mut args_inner = match last_segment {
|
||||
Some(it) => it.as_ref().clone(),
|
||||
None => GenericArgs::empty(),
|
||||
};
|
||||
args_inner.has_self_type = true;
|
||||
args_inner.args.insert(0, GenericArg::Type(self_type));
|
||||
*last_segment = Some(Interned::new(args_inner));
|
||||
let last_segment = generic_args.get_mut(segments.len() - num_segments)?;
|
||||
*last_segment = Some(Interned::new(match last_segment.take() {
|
||||
Some(it) => GenericArgs {
|
||||
args: iter::once(self_type)
|
||||
.chain(it.args.iter().cloned())
|
||||
.collect(),
|
||||
|
||||
has_self_type: true,
|
||||
bindings: it.bindings.clone(),
|
||||
desugared_from_fn: it.desugared_from_fn,
|
||||
},
|
||||
None => GenericArgs {
|
||||
args: Box::new([self_type]),
|
||||
has_self_type: true,
|
||||
..GenericArgs::empty()
|
||||
},
|
||||
}));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -115,7 +134,10 @@ pub(super) fn lower_path(mut path: ast::Path, ctx: &LowerCtx<'_>) -> Option<Path
|
|||
};
|
||||
}
|
||||
segments.reverse();
|
||||
generic_args.reverse();
|
||||
if !generic_args.is_empty() {
|
||||
generic_args.resize(segments.len(), None);
|
||||
generic_args.reverse();
|
||||
}
|
||||
|
||||
if segments.is_empty() && kind == PathKind::Plain && type_anchor.is_none() {
|
||||
// plain empty paths don't exist, this means we got a single `self` segment as our path
|
||||
|
@ -135,7 +157,11 @@ pub(super) fn lower_path(mut path: ast::Path, ctx: &LowerCtx<'_>) -> Option<Path
|
|||
}
|
||||
|
||||
let mod_path = Interned::new(ModPath::from_segments(kind, segments));
|
||||
return Some(Path { type_anchor, mod_path, generic_args: generic_args.into() });
|
||||
return Some(Path {
|
||||
type_anchor,
|
||||
mod_path,
|
||||
generic_args: if generic_args.is_empty() { None } else { Some(generic_args.into()) },
|
||||
});
|
||||
|
||||
fn qualifier(path: &ast::Path) -> Option<ast::Path> {
|
||||
if let Some(q) = path.qualifier() {
|
||||
|
@ -174,7 +200,7 @@ pub(super) fn lower_generic_args(
|
|||
.map(|it| Interned::new(TypeBound::from_ast(lower_ctx, it)))
|
||||
.collect()
|
||||
} else {
|
||||
Vec::new()
|
||||
Box::default()
|
||||
};
|
||||
bindings.push(AssociatedTypeBinding { name, args, type_ref, bounds });
|
||||
}
|
||||
|
@ -195,7 +221,12 @@ pub(super) fn lower_generic_args(
|
|||
if args.is_empty() && bindings.is_empty() {
|
||||
return None;
|
||||
}
|
||||
Some(GenericArgs { args, has_self_type: false, bindings, desugared_from_fn: false })
|
||||
Some(GenericArgs {
|
||||
args: args.into_boxed_slice(),
|
||||
has_self_type: false,
|
||||
bindings: bindings.into_boxed_slice(),
|
||||
desugared_from_fn: false,
|
||||
})
|
||||
}
|
||||
|
||||
/// Collect `GenericArgs` from the parts of a fn-like path, i.e. `Fn(X, Y)
|
||||
|
@ -205,33 +236,30 @@ fn lower_generic_args_from_fn_path(
|
|||
params: Option<ast::ParamList>,
|
||||
ret_type: Option<ast::RetType>,
|
||||
) -> Option<GenericArgs> {
|
||||
let mut args = Vec::new();
|
||||
let mut bindings = Vec::new();
|
||||
let params = params?;
|
||||
let mut param_types = Vec::new();
|
||||
for param in params.params() {
|
||||
let type_ref = TypeRef::from_ast_opt(ctx, param.ty());
|
||||
param_types.push(type_ref);
|
||||
}
|
||||
let arg = GenericArg::Type(TypeRef::Tuple(param_types));
|
||||
args.push(arg);
|
||||
if let Some(ret_type) = ret_type {
|
||||
let args = Box::new([GenericArg::Type(TypeRef::Tuple(param_types))]);
|
||||
let bindings = if let Some(ret_type) = ret_type {
|
||||
let type_ref = TypeRef::from_ast_opt(ctx, ret_type.ty());
|
||||
bindings.push(AssociatedTypeBinding {
|
||||
Box::new([AssociatedTypeBinding {
|
||||
name: name![Output],
|
||||
args: None,
|
||||
type_ref: Some(type_ref),
|
||||
bounds: Vec::new(),
|
||||
});
|
||||
bounds: Box::default(),
|
||||
}])
|
||||
} else {
|
||||
// -> ()
|
||||
let type_ref = TypeRef::Tuple(Vec::new());
|
||||
bindings.push(AssociatedTypeBinding {
|
||||
Box::new([AssociatedTypeBinding {
|
||||
name: name![Output],
|
||||
args: None,
|
||||
type_ref: Some(type_ref),
|
||||
bounds: Vec::new(),
|
||||
});
|
||||
}
|
||||
bounds: Box::default(),
|
||||
}])
|
||||
};
|
||||
Some(GenericArgs { args, has_self_type: false, bindings, desugared_from_fn: true })
|
||||
}
|
||||
|
|
|
@ -71,7 +71,7 @@ pub(crate) fn print_generic_args(generics: &GenericArgs, buf: &mut dyn Write) ->
|
|||
first = false;
|
||||
print_generic_arg(arg, buf)?;
|
||||
}
|
||||
for binding in &generics.bindings {
|
||||
for binding in generics.bindings.iter() {
|
||||
if !first {
|
||||
write!(buf, ", ")?;
|
||||
}
|
||||
|
|
|
@ -292,7 +292,7 @@ impl TypeRef {
|
|||
}
|
||||
for segment in path.segments().iter() {
|
||||
if let Some(args_and_bindings) = segment.args_and_bindings {
|
||||
for arg in &args_and_bindings.args {
|
||||
for arg in args_and_bindings.args.iter() {
|
||||
match arg {
|
||||
crate::path::GenericArg::Type(type_ref) => {
|
||||
go(type_ref, f);
|
||||
|
@ -301,11 +301,11 @@ impl TypeRef {
|
|||
| crate::path::GenericArg::Lifetime(_) => {}
|
||||
}
|
||||
}
|
||||
for binding in &args_and_bindings.bindings {
|
||||
for binding in args_and_bindings.bindings.iter() {
|
||||
if let Some(type_ref) = &binding.type_ref {
|
||||
go(type_ref, f);
|
||||
}
|
||||
for bound in &binding.bounds {
|
||||
for bound in binding.bounds.iter() {
|
||||
match bound.as_ref() {
|
||||
TypeBound::Path(path, _) | TypeBound::ForLifetime(_, path) => {
|
||||
go_path(path, f)
|
||||
|
|
|
@ -21,7 +21,7 @@ itertools = "0.10.5"
|
|||
hashbrown = { version = "0.12.1", features = [
|
||||
"inline-more",
|
||||
], default-features = false }
|
||||
smallvec = { version = "1.10.0", features = ["const_new"] }
|
||||
smallvec.workspace = true
|
||||
|
||||
# local deps
|
||||
stdx.workspace = true
|
||||
|
|
|
@ -55,6 +55,7 @@ pub type ExpandResult<T> = ValueResult<T, ExpandError>;
|
|||
pub enum ExpandError {
|
||||
UnresolvedProcMacro(CrateId),
|
||||
Mbe(mbe::ExpandError),
|
||||
RecursionOverflowPosioned,
|
||||
Other(Box<str>),
|
||||
}
|
||||
|
||||
|
@ -69,6 +70,9 @@ impl fmt::Display for ExpandError {
|
|||
match self {
|
||||
ExpandError::UnresolvedProcMacro(_) => f.write_str("unresolved proc-macro"),
|
||||
ExpandError::Mbe(it) => it.fmt(f),
|
||||
ExpandError::RecursionOverflowPosioned => {
|
||||
f.write_str("overflow expanding the original macro")
|
||||
}
|
||||
ExpandError::Other(it) => f.write_str(it),
|
||||
}
|
||||
}
|
||||
|
|
|
@ -16,7 +16,7 @@ cov-mark = "2.0.0-pre.1"
|
|||
itertools = "0.10.5"
|
||||
arrayvec = "0.7.2"
|
||||
bitflags = "1.3.2"
|
||||
smallvec = "1.10.0"
|
||||
smallvec.workspace = true
|
||||
ena = "0.14.0"
|
||||
tracing = "0.1.35"
|
||||
rustc-hash = "1.1.0"
|
||||
|
|
|
@ -1419,7 +1419,7 @@ impl HirDisplay for Path {
|
|||
|
||||
write!(f, "<")?;
|
||||
let mut first = true;
|
||||
for arg in &generic_args.args {
|
||||
for arg in generic_args.args.iter() {
|
||||
if first {
|
||||
first = false;
|
||||
if generic_args.has_self_type {
|
||||
|
@ -1431,7 +1431,7 @@ impl HirDisplay for Path {
|
|||
}
|
||||
arg.hir_fmt(f)?;
|
||||
}
|
||||
for binding in &generic_args.bindings {
|
||||
for binding in generic_args.bindings.iter() {
|
||||
if first {
|
||||
first = false;
|
||||
} else {
|
||||
|
@ -1445,7 +1445,7 @@ impl HirDisplay for Path {
|
|||
}
|
||||
None => {
|
||||
write!(f, ": ")?;
|
||||
f.write_joined(&binding.bounds, " + ")?;
|
||||
f.write_joined(binding.bounds.iter(), " + ")?;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1025,7 +1025,7 @@ impl<'a> TyLoweringContext<'a> {
|
|||
last_segment
|
||||
.into_iter()
|
||||
.filter_map(|segment| segment.args_and_bindings)
|
||||
.flat_map(|args_and_bindings| &args_and_bindings.bindings)
|
||||
.flat_map(|args_and_bindings| args_and_bindings.bindings.iter())
|
||||
.flat_map(move |binding| {
|
||||
let found = associated_type_by_name_including_super_traits(
|
||||
self.db,
|
||||
|
@ -1068,7 +1068,7 @@ impl<'a> TyLoweringContext<'a> {
|
|||
AliasEq { alias: AliasTy::Projection(projection_ty.clone()), ty };
|
||||
preds.push(crate::wrap_empty_binders(WhereClause::AliasEq(alias_eq)));
|
||||
}
|
||||
for bound in &binding.bounds {
|
||||
for bound in binding.bounds.iter() {
|
||||
preds.extend(self.lower_type_bound(
|
||||
bound,
|
||||
TyKind::Alias(AliasTy::Projection(projection_ty.clone())).intern(Interner),
|
||||
|
|
|
@ -16,7 +16,7 @@ rustc-hash = "1.1.0"
|
|||
either = "1.7.0"
|
||||
arrayvec = "0.7.2"
|
||||
itertools = "0.10.5"
|
||||
smallvec = "1.10.0"
|
||||
smallvec.workspace = true
|
||||
once_cell = "1.17.0"
|
||||
|
||||
# local deps
|
||||
|
|
|
@ -16,7 +16,7 @@ cov-mark = "2.0.0-pre.1"
|
|||
|
||||
itertools = "0.10.5"
|
||||
either = "1.7.0"
|
||||
smallvec = "1.10.0"
|
||||
smallvec.workspace = true
|
||||
|
||||
# local deps
|
||||
stdx.workspace = true
|
||||
|
|
|
@ -180,7 +180,9 @@ pub(crate) fn generate_getter_impl(
|
|||
|
||||
// Insert `$0` only for last getter we generate
|
||||
if i == record_fields_count - 1 {
|
||||
getter_buf = getter_buf.replacen("fn ", "fn $0", 1);
|
||||
if ctx.config.snippet_cap.is_some() {
|
||||
getter_buf = getter_buf.replacen("fn ", "fn $0", 1);
|
||||
}
|
||||
}
|
||||
|
||||
// For first element we do not merge with '\n', as
|
||||
|
@ -330,7 +332,7 @@ fn parse_record_field(record_field: ast::RecordField, mutable: bool) -> Option<R
|
|||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::tests::{check_assist, check_assist_not_applicable};
|
||||
use crate::tests::{check_assist, check_assist_no_snippet_cap, check_assist_not_applicable};
|
||||
|
||||
use super::*;
|
||||
|
||||
|
@ -377,6 +379,49 @@ impl Context {
|
|||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_generate_getter_from_field_no_snippet_cap() {
|
||||
check_assist_no_snippet_cap(
|
||||
generate_getter,
|
||||
r#"
|
||||
struct Context {
|
||||
dat$0a: Data,
|
||||
}
|
||||
"#,
|
||||
r#"
|
||||
struct Context {
|
||||
data: Data,
|
||||
}
|
||||
|
||||
impl Context {
|
||||
fn data(&self) -> &Data {
|
||||
&self.data
|
||||
}
|
||||
}
|
||||
"#,
|
||||
);
|
||||
|
||||
check_assist_no_snippet_cap(
|
||||
generate_getter_mut,
|
||||
r#"
|
||||
struct Context {
|
||||
dat$0a: Data,
|
||||
}
|
||||
"#,
|
||||
r#"
|
||||
struct Context {
|
||||
data: Data,
|
||||
}
|
||||
|
||||
impl Context {
|
||||
fn data_mut(&mut self) -> &mut Data {
|
||||
&mut self.data
|
||||
}
|
||||
}
|
||||
"#,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_generate_getter_already_implemented() {
|
||||
check_assist_not_applicable(
|
||||
|
@ -433,6 +478,29 @@ impl Context {
|
|||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_generate_getter_from_field_with_visibility_marker_no_snippet_cap() {
|
||||
check_assist_no_snippet_cap(
|
||||
generate_getter,
|
||||
r#"
|
||||
pub(crate) struct Context {
|
||||
dat$0a: Data,
|
||||
}
|
||||
"#,
|
||||
r#"
|
||||
pub(crate) struct Context {
|
||||
data: Data,
|
||||
}
|
||||
|
||||
impl Context {
|
||||
pub(crate) fn data(&self) -> &Data {
|
||||
&self.data
|
||||
}
|
||||
}
|
||||
"#,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_multiple_generate_getter() {
|
||||
check_assist(
|
||||
|
@ -468,6 +536,41 @@ impl Context {
|
|||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_multiple_generate_getter_no_snippet_cap() {
|
||||
check_assist_no_snippet_cap(
|
||||
generate_getter,
|
||||
r#"
|
||||
struct Context {
|
||||
data: Data,
|
||||
cou$0nt: usize,
|
||||
}
|
||||
|
||||
impl Context {
|
||||
fn data(&self) -> &Data {
|
||||
&self.data
|
||||
}
|
||||
}
|
||||
"#,
|
||||
r#"
|
||||
struct Context {
|
||||
data: Data,
|
||||
count: usize,
|
||||
}
|
||||
|
||||
impl Context {
|
||||
fn data(&self) -> &Data {
|
||||
&self.data
|
||||
}
|
||||
|
||||
fn count(&self) -> &usize {
|
||||
&self.count
|
||||
}
|
||||
}
|
||||
"#,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_not_a_special_case() {
|
||||
cov_mark::check_count!(convert_reference_type, 0);
|
||||
|
|
|
@ -33,6 +33,20 @@ pub(crate) const TEST_CONFIG: AssistConfig = AssistConfig {
|
|||
assist_emit_must_use: false,
|
||||
};
|
||||
|
||||
pub(crate) const TEST_CONFIG_NO_SNIPPET_CAP: AssistConfig = AssistConfig {
|
||||
snippet_cap: None,
|
||||
allowed: None,
|
||||
insert_use: InsertUseConfig {
|
||||
granularity: ImportGranularity::Crate,
|
||||
prefix_kind: hir::PrefixKind::Plain,
|
||||
enforce_granularity: true,
|
||||
group: true,
|
||||
skip_glob_imports: true,
|
||||
},
|
||||
prefer_no_std: false,
|
||||
assist_emit_must_use: false,
|
||||
};
|
||||
|
||||
pub(crate) fn with_single_file(text: &str) -> (RootDatabase, FileId) {
|
||||
RootDatabase::with_single_file(text)
|
||||
}
|
||||
|
@ -43,6 +57,22 @@ pub(crate) fn check_assist(assist: Handler, ra_fixture_before: &str, ra_fixture_
|
|||
check(assist, ra_fixture_before, ExpectedResult::After(&ra_fixture_after), None);
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
pub(crate) fn check_assist_no_snippet_cap(
|
||||
assist: Handler,
|
||||
ra_fixture_before: &str,
|
||||
ra_fixture_after: &str,
|
||||
) {
|
||||
let ra_fixture_after = trim_indent(ra_fixture_after);
|
||||
check_with_config(
|
||||
TEST_CONFIG_NO_SNIPPET_CAP,
|
||||
assist,
|
||||
ra_fixture_before,
|
||||
ExpectedResult::After(&ra_fixture_after),
|
||||
None,
|
||||
);
|
||||
}
|
||||
|
||||
// There is no way to choose what assist within a group you want to test against,
|
||||
// so this is here to allow you choose.
|
||||
pub(crate) fn check_assist_by_label(
|
||||
|
@ -119,6 +149,17 @@ enum ExpectedResult<'a> {
|
|||
|
||||
#[track_caller]
|
||||
fn check(handler: Handler, before: &str, expected: ExpectedResult<'_>, assist_label: Option<&str>) {
|
||||
check_with_config(TEST_CONFIG, handler, before, expected, assist_label);
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
fn check_with_config(
|
||||
config: AssistConfig,
|
||||
handler: Handler,
|
||||
before: &str,
|
||||
expected: ExpectedResult<'_>,
|
||||
assist_label: Option<&str>,
|
||||
) {
|
||||
let (mut db, file_with_caret_id, range_or_offset) = RootDatabase::with_range_or_offset(before);
|
||||
db.set_enable_proc_attr_macros(true);
|
||||
let text_without_caret = db.file_text(file_with_caret_id).to_string();
|
||||
|
@ -126,7 +167,6 @@ fn check(handler: Handler, before: &str, expected: ExpectedResult<'_>, assist_la
|
|||
let frange = FileRange { file_id: file_with_caret_id, range: range_or_offset.into() };
|
||||
|
||||
let sema = Semantics::new(&db);
|
||||
let config = TEST_CONFIG;
|
||||
let ctx = AssistContext::new(sema, &config, frange);
|
||||
let resolve = match expected {
|
||||
ExpectedResult::Unresolved => AssistResolveStrategy::None,
|
||||
|
|
|
@ -16,7 +16,7 @@ cov-mark = "2.0.0-pre.1"
|
|||
itertools = "0.10.5"
|
||||
|
||||
once_cell = "1.17.0"
|
||||
smallvec = "1.10.0"
|
||||
smallvec.workspace = true
|
||||
|
||||
|
||||
# local deps
|
||||
|
|
|
@ -747,4 +747,16 @@ fn main() {
|
|||
"#,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn no_postfix_completions_in_if_block_that_has_an_else() {
|
||||
check(
|
||||
r#"
|
||||
fn test() {
|
||||
if true {}.$0 else {}
|
||||
}
|
||||
"#,
|
||||
expect![[r#""#]],
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -571,28 +571,25 @@ impl<'a> CompletionContext<'a> {
|
|||
|
||||
// try to skip completions on path with invalid colons
|
||||
// this approach works in normal path and inside token tree
|
||||
match original_token.kind() {
|
||||
T![:] => {
|
||||
// return if no prev token before colon
|
||||
let prev_token = original_token.prev_token()?;
|
||||
if original_token.kind() == T![:] {
|
||||
// return if no prev token before colon
|
||||
let prev_token = original_token.prev_token()?;
|
||||
|
||||
// only has a single colon
|
||||
if prev_token.kind() != T![:] {
|
||||
return None;
|
||||
}
|
||||
|
||||
// has 3 colon or 2 coloncolon in a row
|
||||
// special casing this as per discussion in https://github.com/rust-lang/rust-analyzer/pull/13611#discussion_r1031845205
|
||||
// and https://github.com/rust-lang/rust-analyzer/pull/13611#discussion_r1032812751
|
||||
if prev_token
|
||||
.prev_token()
|
||||
.map(|t| t.kind() == T![:] || t.kind() == T![::])
|
||||
.unwrap_or(false)
|
||||
{
|
||||
return None;
|
||||
}
|
||||
// only has a single colon
|
||||
if prev_token.kind() != T![:] {
|
||||
return None;
|
||||
}
|
||||
|
||||
// has 3 colon or 2 coloncolon in a row
|
||||
// special casing this as per discussion in https://github.com/rust-lang/rust-analyzer/pull/13611#discussion_r1031845205
|
||||
// and https://github.com/rust-lang/rust-analyzer/pull/13611#discussion_r1032812751
|
||||
if prev_token
|
||||
.prev_token()
|
||||
.map(|t| t.kind() == T![:] || t.kind() == T![::])
|
||||
.unwrap_or(false)
|
||||
{
|
||||
return None;
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
||||
let AnalysisResult {
|
||||
|
|
|
@ -29,6 +29,7 @@ pub(super) struct AnalysisResult {
|
|||
pub(super) analysis: CompletionAnalysis,
|
||||
pub(super) expected: (Option<Type>, Option<ast::NameOrNameRef>),
|
||||
pub(super) qualifier_ctx: QualifierCtx,
|
||||
/// the original token of the expanded file
|
||||
pub(super) token: SyntaxToken,
|
||||
pub(super) offset: TextSize,
|
||||
}
|
||||
|
@ -213,15 +214,6 @@ fn analyze(
|
|||
let _p = profile::span("CompletionContext::analyze");
|
||||
let ExpansionResult { original_file, speculative_file, offset, fake_ident_token, derive_ctx } =
|
||||
expansion_result;
|
||||
let syntax_element = NodeOrToken::Token(fake_ident_token);
|
||||
if is_in_token_of_for_loop(syntax_element.clone()) {
|
||||
// for pat $0
|
||||
// there is nothing to complete here except `in` keyword
|
||||
// don't bother populating the context
|
||||
// FIXME: the completion calculations should end up good enough
|
||||
// such that this special case becomes unnecessary
|
||||
return None;
|
||||
}
|
||||
|
||||
// Overwrite the path kind for derives
|
||||
if let Some((original_file, file_with_fake_ident, offset, origin_attr)) = derive_ctx {
|
||||
|
@ -249,37 +241,35 @@ fn analyze(
|
|||
return None;
|
||||
}
|
||||
|
||||
let name_like = match find_node_at_offset(&speculative_file, offset) {
|
||||
Some(it) => it,
|
||||
None => {
|
||||
let analysis = if let Some(original) = ast::String::cast(original_token.clone()) {
|
||||
CompletionAnalysis::String {
|
||||
original,
|
||||
expanded: ast::String::cast(self_token.clone()),
|
||||
let Some(name_like) = find_node_at_offset(&speculative_file, offset) else {
|
||||
let analysis = if let Some(original) = ast::String::cast(original_token.clone()) {
|
||||
CompletionAnalysis::String {
|
||||
original,
|
||||
expanded: ast::String::cast(self_token.clone()),
|
||||
}
|
||||
} else {
|
||||
// Fix up trailing whitespace problem
|
||||
// #[attr(foo = $0
|
||||
let token = syntax::algo::skip_trivia_token(self_token.clone(), Direction::Prev)?;
|
||||
let p = token.parent()?;
|
||||
if p.kind() == SyntaxKind::TOKEN_TREE
|
||||
&& p.ancestors().any(|it| it.kind() == SyntaxKind::META)
|
||||
{
|
||||
let colon_prefix = previous_non_trivia_token(self_token.clone())
|
||||
.map_or(false, |it| T![:] == it.kind());
|
||||
CompletionAnalysis::UnexpandedAttrTT {
|
||||
fake_attribute_under_caret: fake_ident_token
|
||||
.parent_ancestors()
|
||||
.find_map(ast::Attr::cast),
|
||||
colon_prefix,
|
||||
}
|
||||
} else {
|
||||
// Fix up trailing whitespace problem
|
||||
// #[attr(foo = $0
|
||||
let token = syntax::algo::skip_trivia_token(self_token.clone(), Direction::Prev)?;
|
||||
let p = token.parent()?;
|
||||
if p.kind() == SyntaxKind::TOKEN_TREE
|
||||
&& p.ancestors().any(|it| it.kind() == SyntaxKind::META)
|
||||
{
|
||||
let colon_prefix = previous_non_trivia_token(self_token.clone())
|
||||
.map_or(false, |it| T![:] == it.kind());
|
||||
CompletionAnalysis::UnexpandedAttrTT {
|
||||
fake_attribute_under_caret: syntax_element
|
||||
.ancestors()
|
||||
.find_map(ast::Attr::cast),
|
||||
colon_prefix,
|
||||
}
|
||||
} else {
|
||||
return None;
|
||||
}
|
||||
};
|
||||
return Some((analysis, (None, None), QualifierCtx::default()));
|
||||
}
|
||||
return None;
|
||||
}
|
||||
};
|
||||
return Some((analysis, (None, None), QualifierCtx::default()));
|
||||
};
|
||||
|
||||
let expected = expected_type_and_name(sema, self_token, &name_like);
|
||||
let mut qual_ctx = QualifierCtx::default();
|
||||
let analysis = match name_like {
|
||||
|
@ -290,6 +280,22 @@ fn analyze(
|
|||
let parent = name_ref.syntax().parent()?;
|
||||
let (nameref_ctx, qualifier_ctx) =
|
||||
classify_name_ref(sema, &original_file, name_ref, parent)?;
|
||||
|
||||
if let NameRefContext {
|
||||
kind:
|
||||
NameRefKind::Path(PathCompletionCtx { kind: PathKind::Expr { .. }, path, .. }, ..),
|
||||
..
|
||||
} = &nameref_ctx
|
||||
{
|
||||
if is_in_token_of_for_loop(path) {
|
||||
// for pat $0
|
||||
// there is nothing to complete here except `in` keyword
|
||||
// don't bother populating the context
|
||||
// Ideally this special casing wouldn't be needed, but the parser recovers
|
||||
return None;
|
||||
}
|
||||
}
|
||||
|
||||
qual_ctx = qualifier_ctx;
|
||||
CompletionAnalysis::NameRef(nameref_ctx)
|
||||
}
|
||||
|
@ -323,16 +329,14 @@ fn expected_type_and_name(
|
|||
ast::FieldExpr(e) => e
|
||||
.syntax()
|
||||
.ancestors()
|
||||
.map_while(ast::FieldExpr::cast)
|
||||
.last()
|
||||
.map(|it| it.syntax().clone()),
|
||||
.take_while(|it| ast::FieldExpr::can_cast(it.kind()))
|
||||
.last(),
|
||||
ast::PathSegment(e) => e
|
||||
.syntax()
|
||||
.ancestors()
|
||||
.skip(1)
|
||||
.take_while(|it| ast::Path::can_cast(it.kind()) || ast::PathExpr::can_cast(it.kind()))
|
||||
.find_map(ast::PathExpr::cast)
|
||||
.map(|it| it.syntax().clone()),
|
||||
.find(|it| ast::PathExpr::can_cast(it.kind())),
|
||||
_ => None
|
||||
}
|
||||
};
|
||||
|
@ -605,6 +609,18 @@ fn classify_name_ref(
|
|||
},
|
||||
_ => false,
|
||||
};
|
||||
|
||||
let reciever_is_part_of_indivisible_expression = match &receiver {
|
||||
Some(ast::Expr::IfExpr(_)) => {
|
||||
let next_token_kind = next_non_trivia_token(name_ref.syntax().clone()).map(|t| t.kind());
|
||||
next_token_kind == Some(SyntaxKind::ELSE_KW)
|
||||
},
|
||||
_ => false
|
||||
};
|
||||
if reciever_is_part_of_indivisible_expression {
|
||||
return None;
|
||||
}
|
||||
|
||||
let kind = NameRefKind::DotAccess(DotAccess {
|
||||
receiver_ty: receiver.as_ref().and_then(|it| sema.type_of_expr(it)),
|
||||
kind: DotAccessKind::Field { receiver_is_ambiguous_float_literal },
|
||||
|
@ -656,8 +672,15 @@ fn classify_name_ref(
|
|||
};
|
||||
let after_if_expr = |node: SyntaxNode| {
|
||||
let prev_expr = (|| {
|
||||
let node = match node.parent().and_then(ast::ExprStmt::cast) {
|
||||
Some(stmt) => stmt.syntax().clone(),
|
||||
None => node,
|
||||
};
|
||||
let prev_sibling = non_trivia_sibling(node.into(), Direction::Prev)?.into_node()?;
|
||||
ast::ExprStmt::cast(prev_sibling)?.expr()
|
||||
|
||||
ast::ExprStmt::cast(prev_sibling.clone())
|
||||
.and_then(|it| it.expr())
|
||||
.or_else(|| ast::Expr::cast(prev_sibling))
|
||||
})();
|
||||
matches!(prev_expr, Some(ast::Expr::IfExpr(_)))
|
||||
};
|
||||
|
@ -1251,40 +1274,29 @@ fn path_or_use_tree_qualifier(path: &ast::Path) -> Option<(ast::Path, bool)> {
|
|||
Some((use_tree.path()?, true))
|
||||
}
|
||||
|
||||
pub(crate) fn is_in_token_of_for_loop(element: SyntaxElement) -> bool {
|
||||
fn is_in_token_of_for_loop(path: &ast::Path) -> bool {
|
||||
// oh my ...
|
||||
(|| {
|
||||
let syntax_token = element.into_token()?;
|
||||
let range = syntax_token.text_range();
|
||||
let for_expr = syntax_token.parent_ancestors().find_map(ast::ForExpr::cast)?;
|
||||
|
||||
// check if the current token is the `in` token of a for loop
|
||||
if let Some(token) = for_expr.in_token() {
|
||||
return Some(syntax_token == token);
|
||||
let expr = path.syntax().parent().and_then(ast::PathExpr::cast)?;
|
||||
let for_expr = expr.syntax().parent().and_then(ast::ForExpr::cast)?;
|
||||
if for_expr.in_token().is_some() {
|
||||
return Some(false);
|
||||
}
|
||||
let pat = for_expr.pat()?;
|
||||
if range.end() < pat.syntax().text_range().end() {
|
||||
// if we are inside or before the pattern we can't be at the `in` token position
|
||||
return None;
|
||||
}
|
||||
let next_sibl = next_non_trivia_sibling(pat.syntax().clone().into())?;
|
||||
Some(match next_sibl {
|
||||
// the loop body is some node, if our token is at the start we are at the `in` position,
|
||||
// otherwise we could be in a recovered expression, we don't wanna ruin completions there
|
||||
syntax::NodeOrToken::Node(n) => n.text_range().start() == range.start(),
|
||||
// the loop body consists of a single token, if we are this we are certainly at the `in` token position
|
||||
syntax::NodeOrToken::Token(t) => t == syntax_token,
|
||||
syntax::NodeOrToken::Node(n) => {
|
||||
n.text_range().start() == path.syntax().text_range().start()
|
||||
}
|
||||
syntax::NodeOrToken::Token(t) => {
|
||||
t.text_range().start() == path.syntax().text_range().start()
|
||||
}
|
||||
})
|
||||
})()
|
||||
.unwrap_or(false)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_for_is_prev2() {
|
||||
crate::tests::check_pattern_is_applicable(r"fn __() { for i i$0 }", is_in_token_of_for_loop);
|
||||
}
|
||||
|
||||
pub(crate) fn is_in_loop_body(node: &SyntaxNode) -> bool {
|
||||
fn is_in_loop_body(node: &SyntaxNode) -> bool {
|
||||
node.ancestors()
|
||||
.take_while(|it| it.kind() != SyntaxKind::FN && it.kind() != SyntaxKind::CLOSURE_EXPR)
|
||||
.find_map(|it| {
|
||||
|
@ -1317,6 +1329,22 @@ fn previous_non_trivia_token(e: impl Into<SyntaxElement>) -> Option<SyntaxToken>
|
|||
None
|
||||
}
|
||||
|
||||
fn next_non_trivia_token(e: impl Into<SyntaxElement>) -> Option<SyntaxToken> {
|
||||
let mut token = match e.into() {
|
||||
SyntaxElement::Node(n) => n.last_token()?,
|
||||
SyntaxElement::Token(t) => t,
|
||||
}
|
||||
.next_token();
|
||||
while let Some(inner) = token {
|
||||
if !inner.kind().is_trivia() {
|
||||
return Some(inner);
|
||||
} else {
|
||||
token = inner.next_token();
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
fn next_non_trivia_sibling(ele: SyntaxElement) -> Option<SyntaxElement> {
|
||||
let mut e = ele.next_sibling_or_token();
|
||||
while let Some(inner) = e {
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Reference in a new issue