Rollup merge of #63535 - petrochenkov:expndata, r=matthewjasper
Continue refactoring resolve and hygiene The general goal is addressing FIXMEs from the previous PRs. Merging similar data structures (+ prerequisites for such merging), accounting for the fact that all `ExpnId`s have associated data in `HygieneData` now (less `Option`s). Also, some renaming. This should be the last renaming session in this area, I think. r? @matthewjasper
This commit is contained in:
commit
6e9e6ea39b
73 changed files with 636 additions and 728 deletions
|
@ -67,7 +67,7 @@ use syntax::errors;
|
|||
use syntax::ext::base::SpecialDerives;
|
||||
use syntax::ext::hygiene::ExpnId;
|
||||
use syntax::print::pprust;
|
||||
use syntax::source_map::{respan, ExpnInfo, ExpnKind, DesugaringKind, Spanned};
|
||||
use syntax::source_map::{respan, ExpnData, ExpnKind, DesugaringKind, Spanned};
|
||||
use syntax::symbol::{kw, sym, Symbol};
|
||||
use syntax::tokenstream::{TokenStream, TokenTree};
|
||||
use syntax::parse::token::{self, Token};
|
||||
|
@ -704,10 +704,9 @@ impl<'a> LoweringContext<'a> {
|
|||
span: Span,
|
||||
allow_internal_unstable: Option<Lrc<[Symbol]>>,
|
||||
) -> Span {
|
||||
span.fresh_expansion(ExpnId::root(), ExpnInfo {
|
||||
def_site: span,
|
||||
span.fresh_expansion(ExpnData {
|
||||
allow_internal_unstable,
|
||||
..ExpnInfo::default(ExpnKind::Desugaring(reason), span, self.sess.edition())
|
||||
..ExpnData::default(ExpnKind::Desugaring(reason), span, self.sess.edition())
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -1224,7 +1223,7 @@ impl<'a> LoweringContext<'a> {
|
|||
P(hir::Path {
|
||||
res,
|
||||
segments: hir_vec![hir::PathSegment::from_ident(
|
||||
Ident::with_empty_ctxt(kw::SelfUpper)
|
||||
Ident::with_dummy_span(kw::SelfUpper)
|
||||
)],
|
||||
span: t.span,
|
||||
}),
|
||||
|
@ -1558,7 +1557,7 @@ impl<'a> LoweringContext<'a> {
|
|||
|
||||
let (name, kind) = match name {
|
||||
hir::LifetimeName::Underscore => (
|
||||
hir::ParamName::Plain(Ident::with_empty_ctxt(kw::UnderscoreLifetime)),
|
||||
hir::ParamName::Plain(Ident::with_dummy_span(kw::UnderscoreLifetime)),
|
||||
hir::LifetimeParamKind::Elided,
|
||||
),
|
||||
hir::LifetimeName::Param(param_name) => (
|
||||
|
@ -2002,7 +2001,7 @@ impl<'a> LoweringContext<'a> {
|
|||
bindings: hir_vec![
|
||||
hir::TypeBinding {
|
||||
hir_id: this.next_id(),
|
||||
ident: Ident::with_empty_ctxt(FN_OUTPUT_NAME),
|
||||
ident: Ident::with_dummy_span(FN_OUTPUT_NAME),
|
||||
kind: hir::TypeBindingKind::Equality {
|
||||
ty: output
|
||||
.as_ref()
|
||||
|
@ -2394,7 +2393,7 @@ impl<'a> LoweringContext<'a> {
|
|||
let future_params = P(hir::GenericArgs {
|
||||
args: hir_vec![],
|
||||
bindings: hir_vec![hir::TypeBinding {
|
||||
ident: Ident::with_empty_ctxt(FN_OUTPUT_NAME),
|
||||
ident: Ident::with_dummy_span(FN_OUTPUT_NAME),
|
||||
kind: hir::TypeBindingKind::Equality {
|
||||
ty: output_ty,
|
||||
},
|
||||
|
|
|
@ -552,7 +552,7 @@ impl LoweringContext<'_> {
|
|||
|
||||
// let mut pinned = <expr>;
|
||||
let expr = P(self.lower_expr(expr));
|
||||
let pinned_ident = Ident::with_empty_ctxt(sym::pinned);
|
||||
let pinned_ident = Ident::with_dummy_span(sym::pinned);
|
||||
let (pinned_pat, pinned_pat_hid) = self.pat_ident_binding_mode(
|
||||
span,
|
||||
pinned_ident,
|
||||
|
@ -593,7 +593,7 @@ impl LoweringContext<'_> {
|
|||
let loop_node_id = self.sess.next_node_id();
|
||||
let loop_hir_id = self.lower_node_id(loop_node_id);
|
||||
let ready_arm = {
|
||||
let x_ident = Ident::with_empty_ctxt(sym::result);
|
||||
let x_ident = Ident::with_dummy_span(sym::result);
|
||||
let (x_pat, x_pat_hid) = self.pat_ident(span, x_ident);
|
||||
let x_expr = P(self.expr_ident(span, x_ident, x_pat_hid));
|
||||
let ready_pat = self.pat_std_enum(
|
||||
|
@ -1070,9 +1070,9 @@ impl LoweringContext<'_> {
|
|||
);
|
||||
head.span = desugared_span;
|
||||
|
||||
let iter = Ident::with_empty_ctxt(sym::iter);
|
||||
let iter = Ident::with_dummy_span(sym::iter);
|
||||
|
||||
let next_ident = Ident::with_empty_ctxt(sym::__next);
|
||||
let next_ident = Ident::with_dummy_span(sym::__next);
|
||||
let (next_pat, next_pat_hid) = self.pat_ident_binding_mode(
|
||||
desugared_span,
|
||||
next_ident,
|
||||
|
@ -1081,7 +1081,7 @@ impl LoweringContext<'_> {
|
|||
|
||||
// `::std::option::Option::Some(val) => __next = val`
|
||||
let pat_arm = {
|
||||
let val_ident = Ident::with_empty_ctxt(sym::val);
|
||||
let val_ident = Ident::with_dummy_span(sym::val);
|
||||
let (val_pat, val_pat_hid) = self.pat_ident(pat.span, val_ident);
|
||||
let val_expr = P(self.expr_ident(pat.span, val_ident, val_pat_hid));
|
||||
let next_expr = P(self.expr_ident(pat.span, next_ident, next_pat_hid));
|
||||
|
@ -1247,7 +1247,7 @@ impl LoweringContext<'_> {
|
|||
|
||||
// `Ok(val) => #[allow(unreachable_code)] val,`
|
||||
let ok_arm = {
|
||||
let val_ident = Ident::with_empty_ctxt(sym::val);
|
||||
let val_ident = Ident::with_dummy_span(sym::val);
|
||||
let (val_pat, val_pat_nid) = self.pat_ident(span, val_ident);
|
||||
let val_expr = P(self.expr_ident_with_attrs(
|
||||
span,
|
||||
|
@ -1263,7 +1263,7 @@ impl LoweringContext<'_> {
|
|||
// `Err(err) => #[allow(unreachable_code)]
|
||||
// return Try::from_error(From::from(err)),`
|
||||
let err_arm = {
|
||||
let err_ident = Ident::with_empty_ctxt(sym::err);
|
||||
let err_ident = Ident::with_dummy_span(sym::err);
|
||||
let (err_local, err_local_nid) = self.pat_ident(try_span, err_ident);
|
||||
let from_expr = {
|
||||
let from_path = &[sym::convert, sym::From, sym::from];
|
||||
|
|
|
@ -202,7 +202,7 @@ impl ParamName {
|
|||
match *self {
|
||||
ParamName::Plain(ident) => ident,
|
||||
ParamName::Fresh(_) |
|
||||
ParamName::Error => Ident::with_empty_ctxt(kw::UnderscoreLifetime),
|
||||
ParamName::Error => Ident::with_dummy_span(kw::UnderscoreLifetime),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -237,8 +237,8 @@ impl LifetimeName {
|
|||
pub fn ident(&self) -> Ident {
|
||||
match *self {
|
||||
LifetimeName::Implicit | LifetimeName::Error => Ident::invalid(),
|
||||
LifetimeName::Underscore => Ident::with_empty_ctxt(kw::UnderscoreLifetime),
|
||||
LifetimeName::Static => Ident::with_empty_ctxt(kw::StaticLifetime),
|
||||
LifetimeName::Underscore => Ident::with_dummy_span(kw::UnderscoreLifetime),
|
||||
LifetimeName::Static => Ident::with_dummy_span(kw::StaticLifetime),
|
||||
LifetimeName::Param(param_name) => param_name.ident(),
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1457,7 +1457,7 @@ impl<'a> State<'a> {
|
|||
}
|
||||
|
||||
pub fn print_name(&mut self, name: ast::Name) {
|
||||
self.print_ident(ast::Ident::with_empty_ctxt(name))
|
||||
self.print_ident(ast::Ident::with_dummy_span(name))
|
||||
}
|
||||
|
||||
pub fn print_for_decl(&mut self, loc: &hir::Local, coll: &hir::Expr) {
|
||||
|
|
|
@ -350,7 +350,7 @@ impl<'a> HashStable<StableHashingContext<'a>> for Span {
|
|||
let line_col_len = col | line | len;
|
||||
std_hash::Hash::hash(&line_col_len, hasher);
|
||||
|
||||
if span.ctxt == SyntaxContext::empty() {
|
||||
if span.ctxt == SyntaxContext::root() {
|
||||
TAG_NO_EXPANSION.hash_stable(hcx, hasher);
|
||||
} else {
|
||||
TAG_EXPANSION.hash_stable(hcx, hasher);
|
||||
|
@ -370,7 +370,7 @@ impl<'a> HashStable<StableHashingContext<'a>> for Span {
|
|||
}
|
||||
|
||||
let mut hasher = StableHasher::new();
|
||||
expn_id.expn_info().hash_stable(hcx, &mut hasher);
|
||||
expn_id.expn_data().hash_stable(hcx, &mut hasher);
|
||||
let sub_hash: Fingerprint = hasher.finish();
|
||||
let sub_hash = sub_hash.to_smaller_hash();
|
||||
cache.borrow_mut().insert(expn_id, sub_hash);
|
||||
|
|
|
@ -397,9 +397,10 @@ impl_stable_hash_for!(enum ::syntax_pos::hygiene::Transparency {
|
|||
Opaque,
|
||||
});
|
||||
|
||||
impl_stable_hash_for!(struct ::syntax_pos::hygiene::ExpnInfo {
|
||||
call_site,
|
||||
impl_stable_hash_for!(struct ::syntax_pos::hygiene::ExpnData {
|
||||
kind,
|
||||
parent -> _,
|
||||
call_site,
|
||||
def_site,
|
||||
default_transparency,
|
||||
allow_internal_unstable,
|
||||
|
|
|
@ -9,7 +9,6 @@ use errors::Applicability;
|
|||
use rustc_data_structures::fx::FxHashMap;
|
||||
use syntax::ast::{Ident, Item, ItemKind};
|
||||
use syntax::symbol::{sym, Symbol};
|
||||
use syntax_pos::ExpnInfo;
|
||||
|
||||
declare_tool_lint! {
|
||||
pub rustc::DEFAULT_HASH_TYPES,
|
||||
|
@ -108,7 +107,7 @@ impl<'a, 'tcx> LateLintPass<'a, 'tcx> for TyTyKind {
|
|||
.help("try using `Ty` instead")
|
||||
.emit();
|
||||
} else {
|
||||
if ty.span.ctxt().outer_expn_info().is_some() {
|
||||
if ty.span.from_expansion() {
|
||||
return;
|
||||
}
|
||||
if let Some(t) = is_ty_or_ty_ctxt(cx, ty) {
|
||||
|
@ -228,30 +227,20 @@ impl EarlyLintPass for LintPassImpl {
|
|||
if let ItemKind::Impl(_, _, _, _, Some(lint_pass), _, _) = &item.node {
|
||||
if let Some(last) = lint_pass.path.segments.last() {
|
||||
if last.ident.name == sym::LintPass {
|
||||
match &lint_pass.path.span.ctxt().outer_expn_info() {
|
||||
Some(info) if is_lint_pass_expansion(info) => {}
|
||||
_ => {
|
||||
cx.struct_span_lint(
|
||||
LINT_PASS_IMPL_WITHOUT_MACRO,
|
||||
lint_pass.path.span,
|
||||
"implementing `LintPass` by hand",
|
||||
)
|
||||
.help("try using `declare_lint_pass!` or `impl_lint_pass!` instead")
|
||||
.emit();
|
||||
}
|
||||
let expn_data = lint_pass.path.span.ctxt().outer_expn_data();
|
||||
let call_site = expn_data.call_site;
|
||||
if expn_data.kind.descr() != sym::impl_lint_pass &&
|
||||
call_site.ctxt().outer_expn_data().kind.descr() != sym::declare_lint_pass {
|
||||
cx.struct_span_lint(
|
||||
LINT_PASS_IMPL_WITHOUT_MACRO,
|
||||
lint_pass.path.span,
|
||||
"implementing `LintPass` by hand",
|
||||
)
|
||||
.help("try using `declare_lint_pass!` or `impl_lint_pass!` instead")
|
||||
.emit();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn is_lint_pass_expansion(expn_info: &ExpnInfo) -> bool {
|
||||
if expn_info.kind.descr() == sym::impl_lint_pass {
|
||||
true
|
||||
} else if let Some(info) = expn_info.call_site.ctxt().outer_expn_info() {
|
||||
info.kind.descr() == sym::declare_lint_pass
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
|
|
@ -885,21 +885,16 @@ pub fn provide(providers: &mut Providers<'_>) {
|
|||
/// This is used to test whether a lint should not even begin to figure out whether it should
|
||||
/// be reported on the current node.
|
||||
pub fn in_external_macro(sess: &Session, span: Span) -> bool {
|
||||
let info = match span.ctxt().outer_expn_info() {
|
||||
Some(info) => info,
|
||||
// no ExpnInfo means this span doesn't come from a macro
|
||||
None => return false,
|
||||
};
|
||||
|
||||
match info.kind {
|
||||
let expn_data = span.ctxt().outer_expn_data();
|
||||
match expn_data.kind {
|
||||
ExpnKind::Root | ExpnKind::Desugaring(DesugaringKind::ForLoop) => false,
|
||||
ExpnKind::Desugaring(_) => true, // well, it's "external"
|
||||
ExpnKind::Macro(MacroKind::Bang, _) => {
|
||||
if info.def_site.is_dummy() {
|
||||
if expn_data.def_site.is_dummy() {
|
||||
// dummy span for the def_site means it's an external macro
|
||||
return true;
|
||||
}
|
||||
match sess.source_map().span_to_snippet(info.def_site) {
|
||||
match sess.source_map().span_to_snippet(expn_data.def_site) {
|
||||
Ok(code) => !code.starts_with("macro_rules"),
|
||||
// no snippet = external macro or compiler-builtin expansion
|
||||
Err(_) => true,
|
||||
|
@ -911,10 +906,8 @@ pub fn in_external_macro(sess: &Session, span: Span) -> bool {
|
|||
|
||||
/// Returns whether `span` originates in a derive macro's expansion
|
||||
pub fn in_derive_expansion(span: Span) -> bool {
|
||||
if let Some(info) = span.ctxt().outer_expn_info() {
|
||||
if let ExpnKind::Macro(MacroKind::Derive, _) = info.kind {
|
||||
return true;
|
||||
}
|
||||
if let ExpnKind::Macro(MacroKind::Derive, _) = span.ctxt().outer_expn_data().kind {
|
||||
return true;
|
||||
}
|
||||
false
|
||||
}
|
||||
|
|
|
@ -36,7 +36,7 @@ use errors::{Applicability, DiagnosticBuilder};
|
|||
use std::fmt;
|
||||
use syntax::ast;
|
||||
use syntax::symbol::sym;
|
||||
use syntax_pos::{DUMMY_SP, Span, ExpnInfo, ExpnKind};
|
||||
use syntax_pos::{DUMMY_SP, Span, ExpnKind};
|
||||
|
||||
impl<'a, 'tcx> InferCtxt<'a, 'tcx> {
|
||||
pub fn report_fulfillment_errors(&self,
|
||||
|
@ -61,9 +61,9 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> {
|
|||
// We want to ignore desugarings here: spans are equivalent even
|
||||
// if one is the result of a desugaring and the other is not.
|
||||
let mut span = error.obligation.cause.span;
|
||||
if let Some(ExpnInfo { kind: ExpnKind::Desugaring(_), def_site, .. })
|
||||
= span.ctxt().outer_expn_info() {
|
||||
span = def_site;
|
||||
let expn_data = span.ctxt().outer_expn_data();
|
||||
if let ExpnKind::Desugaring(_) = expn_data.kind {
|
||||
span = expn_data.call_site;
|
||||
}
|
||||
|
||||
error_map.entry(span).or_default().push(
|
||||
|
|
|
@ -1417,7 +1417,7 @@ fn confirm_callable_candidate<'cx, 'tcx>(
|
|||
projection_ty: ty::ProjectionTy::from_ref_and_name(
|
||||
tcx,
|
||||
trait_ref,
|
||||
Ident::with_empty_ctxt(FN_OUTPUT_NAME),
|
||||
Ident::with_dummy_span(FN_OUTPUT_NAME),
|
||||
),
|
||||
ty: ret_type
|
||||
}
|
||||
|
|
|
@ -23,16 +23,16 @@ use std::mem;
|
|||
use syntax::ast::NodeId;
|
||||
use syntax::source_map::{SourceMap, StableSourceFileId};
|
||||
use syntax_pos::{BytePos, Span, DUMMY_SP, SourceFile};
|
||||
use syntax_pos::hygiene::{ExpnId, SyntaxContext, ExpnInfo};
|
||||
use syntax_pos::hygiene::{ExpnId, SyntaxContext, ExpnData};
|
||||
|
||||
const TAG_FILE_FOOTER: u128 = 0xC0FFEE_C0FFEE_C0FFEE_C0FFEE_C0FFEE;
|
||||
|
||||
const TAG_CLEAR_CROSS_CRATE_CLEAR: u8 = 0;
|
||||
const TAG_CLEAR_CROSS_CRATE_SET: u8 = 1;
|
||||
|
||||
const TAG_NO_EXPANSION_INFO: u8 = 0;
|
||||
const TAG_EXPANSION_INFO_SHORTHAND: u8 = 1;
|
||||
const TAG_EXPANSION_INFO_INLINE: u8 = 2;
|
||||
const TAG_NO_EXPN_DATA: u8 = 0;
|
||||
const TAG_EXPN_DATA_SHORTHAND: u8 = 1;
|
||||
const TAG_EXPN_DATA_INLINE: u8 = 2;
|
||||
|
||||
const TAG_VALID_SPAN: u8 = 0;
|
||||
const TAG_INVALID_SPAN: u8 = 1;
|
||||
|
@ -58,7 +58,7 @@ pub struct OnDiskCache<'sess> {
|
|||
|
||||
// These two fields caches that are populated lazily during decoding.
|
||||
file_index_to_file: Lock<FxHashMap<SourceFileIndex, Lrc<SourceFile>>>,
|
||||
synthetic_expansion_infos: Lock<FxHashMap<AbsoluteBytePos, SyntaxContext>>,
|
||||
synthetic_syntax_contexts: Lock<FxHashMap<AbsoluteBytePos, SyntaxContext>>,
|
||||
|
||||
// A map from dep-node to the position of the cached query result in
|
||||
// `serialized_data`.
|
||||
|
@ -135,7 +135,7 @@ impl<'sess> OnDiskCache<'sess> {
|
|||
current_diagnostics: Default::default(),
|
||||
query_result_index: footer.query_result_index.into_iter().collect(),
|
||||
prev_diagnostics_index: footer.diagnostics_index.into_iter().collect(),
|
||||
synthetic_expansion_infos: Default::default(),
|
||||
synthetic_syntax_contexts: Default::default(),
|
||||
alloc_decoding_state: AllocDecodingState::new(footer.interpret_alloc_index),
|
||||
}
|
||||
}
|
||||
|
@ -151,7 +151,7 @@ impl<'sess> OnDiskCache<'sess> {
|
|||
current_diagnostics: Default::default(),
|
||||
query_result_index: Default::default(),
|
||||
prev_diagnostics_index: Default::default(),
|
||||
synthetic_expansion_infos: Default::default(),
|
||||
synthetic_syntax_contexts: Default::default(),
|
||||
alloc_decoding_state: AllocDecodingState::new(Vec::new()),
|
||||
}
|
||||
}
|
||||
|
@ -185,7 +185,7 @@ impl<'sess> OnDiskCache<'sess> {
|
|||
encoder,
|
||||
type_shorthands: Default::default(),
|
||||
predicate_shorthands: Default::default(),
|
||||
expn_info_shorthands: Default::default(),
|
||||
expn_data_shorthands: Default::default(),
|
||||
interpret_allocs: Default::default(),
|
||||
interpret_allocs_inverse: Vec::new(),
|
||||
source_map: CachingSourceMapView::new(tcx.sess.source_map()),
|
||||
|
@ -383,7 +383,7 @@ impl<'sess> OnDiskCache<'sess> {
|
|||
cnum_map: self.cnum_map.get(),
|
||||
file_index_to_file: &self.file_index_to_file,
|
||||
file_index_to_stable_id: &self.file_index_to_stable_id,
|
||||
synthetic_expansion_infos: &self.synthetic_expansion_infos,
|
||||
synthetic_syntax_contexts: &self.synthetic_syntax_contexts,
|
||||
alloc_decoding_session: self.alloc_decoding_state.new_decoding_session(),
|
||||
};
|
||||
|
||||
|
@ -440,7 +440,7 @@ struct CacheDecoder<'a, 'tcx> {
|
|||
opaque: opaque::Decoder<'a>,
|
||||
source_map: &'a SourceMap,
|
||||
cnum_map: &'a IndexVec<CrateNum, Option<CrateNum>>,
|
||||
synthetic_expansion_infos: &'a Lock<FxHashMap<AbsoluteBytePos, SyntaxContext>>,
|
||||
synthetic_syntax_contexts: &'a Lock<FxHashMap<AbsoluteBytePos, SyntaxContext>>,
|
||||
file_index_to_file: &'a Lock<FxHashMap<SourceFileIndex, Lrc<SourceFile>>>,
|
||||
file_index_to_stable_id: &'a FxHashMap<SourceFileIndex, StableSourceFileId>,
|
||||
alloc_decoding_session: AllocDecodingSession<'a>,
|
||||
|
@ -586,37 +586,37 @@ impl<'a, 'tcx> SpecializedDecoder<Span> for CacheDecoder<'a, 'tcx> {
|
|||
let lo = file_lo.lines[line_lo - 1] + col_lo;
|
||||
let hi = lo + len;
|
||||
|
||||
let expn_info_tag = u8::decode(self)?;
|
||||
let expn_data_tag = u8::decode(self)?;
|
||||
|
||||
// FIXME(mw): This method does not restore `InternalExpnData::parent` or
|
||||
// FIXME(mw): This method does not restore `ExpnData::parent` or
|
||||
// `SyntaxContextData::prev_ctxt` or `SyntaxContextData::opaque`. These things
|
||||
// don't seem to be used after HIR lowering, so everything should be fine
|
||||
// as long as incremental compilation does not kick in before that.
|
||||
let location = || Span::new(lo, hi, SyntaxContext::empty());
|
||||
let recover_from_expn_info = |this: &Self, expn_info, pos| {
|
||||
let span = location().fresh_expansion(ExpnId::root(), expn_info);
|
||||
this.synthetic_expansion_infos.borrow_mut().insert(pos, span.ctxt());
|
||||
let location = || Span::with_root_ctxt(lo, hi);
|
||||
let recover_from_expn_data = |this: &Self, expn_data, pos| {
|
||||
let span = location().fresh_expansion(expn_data);
|
||||
this.synthetic_syntax_contexts.borrow_mut().insert(pos, span.ctxt());
|
||||
span
|
||||
};
|
||||
Ok(match expn_info_tag {
|
||||
TAG_NO_EXPANSION_INFO => {
|
||||
Ok(match expn_data_tag {
|
||||
TAG_NO_EXPN_DATA => {
|
||||
location()
|
||||
}
|
||||
TAG_EXPANSION_INFO_INLINE => {
|
||||
let expn_info = Decodable::decode(self)?;
|
||||
recover_from_expn_info(
|
||||
self, expn_info, AbsoluteBytePos::new(self.opaque.position())
|
||||
TAG_EXPN_DATA_INLINE => {
|
||||
let expn_data = Decodable::decode(self)?;
|
||||
recover_from_expn_data(
|
||||
self, expn_data, AbsoluteBytePos::new(self.opaque.position())
|
||||
)
|
||||
}
|
||||
TAG_EXPANSION_INFO_SHORTHAND => {
|
||||
TAG_EXPN_DATA_SHORTHAND => {
|
||||
let pos = AbsoluteBytePos::decode(self)?;
|
||||
let cached_ctxt = self.synthetic_expansion_infos.borrow().get(&pos).cloned();
|
||||
let cached_ctxt = self.synthetic_syntax_contexts.borrow().get(&pos).cloned();
|
||||
if let Some(ctxt) = cached_ctxt {
|
||||
Span::new(lo, hi, ctxt)
|
||||
} else {
|
||||
let expn_info =
|
||||
self.with_position(pos.to_usize(), |this| ExpnInfo::decode(this))?;
|
||||
recover_from_expn_info(self, expn_info, pos)
|
||||
let expn_data =
|
||||
self.with_position(pos.to_usize(), |this| ExpnData::decode(this))?;
|
||||
recover_from_expn_data(self, expn_data, pos)
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
|
@ -725,7 +725,7 @@ struct CacheEncoder<'a, 'tcx, E: ty_codec::TyEncoder> {
|
|||
encoder: &'a mut E,
|
||||
type_shorthands: FxHashMap<Ty<'tcx>, usize>,
|
||||
predicate_shorthands: FxHashMap<ty::Predicate<'tcx>, usize>,
|
||||
expn_info_shorthands: FxHashMap<ExpnId, AbsoluteBytePos>,
|
||||
expn_data_shorthands: FxHashMap<ExpnId, AbsoluteBytePos>,
|
||||
interpret_allocs: FxHashMap<interpret::AllocId, usize>,
|
||||
interpret_allocs_inverse: Vec<interpret::AllocId>,
|
||||
source_map: CachingSourceMapView<'tcx>,
|
||||
|
@ -816,22 +816,18 @@ where
|
|||
col_lo.encode(self)?;
|
||||
len.encode(self)?;
|
||||
|
||||
if span_data.ctxt == SyntaxContext::empty() {
|
||||
TAG_NO_EXPANSION_INFO.encode(self)
|
||||
if span_data.ctxt == SyntaxContext::root() {
|
||||
TAG_NO_EXPN_DATA.encode(self)
|
||||
} else {
|
||||
let (expn_id, expn_info) = span_data.ctxt.outer_expn_with_info();
|
||||
if let Some(expn_info) = expn_info {
|
||||
if let Some(pos) = self.expn_info_shorthands.get(&expn_id).cloned() {
|
||||
TAG_EXPANSION_INFO_SHORTHAND.encode(self)?;
|
||||
pos.encode(self)
|
||||
} else {
|
||||
TAG_EXPANSION_INFO_INLINE.encode(self)?;
|
||||
let pos = AbsoluteBytePos::new(self.position());
|
||||
self.expn_info_shorthands.insert(expn_id, pos);
|
||||
expn_info.encode(self)
|
||||
}
|
||||
let (expn_id, expn_data) = span_data.ctxt.outer_expn_with_data();
|
||||
if let Some(pos) = self.expn_data_shorthands.get(&expn_id).cloned() {
|
||||
TAG_EXPN_DATA_SHORTHAND.encode(self)?;
|
||||
pos.encode(self)
|
||||
} else {
|
||||
TAG_NO_EXPANSION_INFO.encode(self)
|
||||
TAG_EXPN_DATA_INLINE.encode(self)?;
|
||||
let pos = AbsoluteBytePos::new(self.position());
|
||||
self.expn_data_shorthands.insert(expn_id, pos);
|
||||
expn_data.encode(self)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1775,10 +1775,7 @@ impl SharedEmitterMain {
|
|||
}
|
||||
}
|
||||
Ok(SharedEmitterMessage::InlineAsmError(cookie, msg)) => {
|
||||
match ExpnId::from_u32(cookie).expn_info() {
|
||||
Some(ei) => sess.span_err(ei.call_site, &msg),
|
||||
None => sess.err(&msg),
|
||||
}
|
||||
sess.span_err(ExpnId::from_u32(cookie).expn_data().call_site, &msg)
|
||||
}
|
||||
Ok(SharedEmitterMessage::AbortIfErrors) => {
|
||||
sess.abort_if_errors();
|
||||
|
|
|
@ -8,7 +8,7 @@ use crate::base;
|
|||
use crate::debuginfo::{self, VariableAccess, VariableKind, FunctionDebugContext};
|
||||
use crate::traits::*;
|
||||
|
||||
use syntax_pos::{DUMMY_SP, NO_EXPANSION, BytePos, Span};
|
||||
use syntax_pos::{DUMMY_SP, BytePos, Span};
|
||||
use syntax::symbol::kw;
|
||||
|
||||
use std::iter;
|
||||
|
@ -120,7 +120,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
|
|||
// In order to have a good line stepping behavior in debugger, we overwrite debug
|
||||
// locations of macro expansions with that of the outermost expansion site
|
||||
// (unless the crate is being compiled with `-Z debug-macros`).
|
||||
if source_info.span.ctxt() == NO_EXPANSION ||
|
||||
if !source_info.span.from_expansion() ||
|
||||
self.cx.sess().opts.debugging_opts.debug_macros {
|
||||
let scope = self.scope_metadata_for_loc(source_info.scope, source_info.span.lo());
|
||||
(scope, source_info.span)
|
||||
|
|
|
@ -678,7 +678,7 @@ impl RustcDefaultCalls {
|
|||
|
||||
let mut cfgs = sess.parse_sess.config.iter().filter_map(|&(name, ref value)| {
|
||||
let gated_cfg = GatedCfg::gate(&ast::MetaItem {
|
||||
path: ast::Path::from_ident(ast::Ident::with_empty_ctxt(name)),
|
||||
path: ast::Path::from_ident(ast::Ident::with_dummy_span(name)),
|
||||
node: ast::MetaItemKind::Word,
|
||||
span: DUMMY_SP,
|
||||
});
|
||||
|
|
|
@ -43,8 +43,7 @@ use syntax_pos::{BytePos,
|
|||
SourceFile,
|
||||
FileName,
|
||||
MultiSpan,
|
||||
Span,
|
||||
NO_EXPANSION};
|
||||
Span};
|
||||
|
||||
/// Indicates the confidence in the correctness of a suggestion.
|
||||
///
|
||||
|
@ -189,7 +188,7 @@ impl CodeSuggestion {
|
|||
// Find the bounding span.
|
||||
let lo = substitution.parts.iter().map(|part| part.span.lo()).min().unwrap();
|
||||
let hi = substitution.parts.iter().map(|part| part.span.hi()).min().unwrap();
|
||||
let bounding_span = Span::new(lo, hi, NO_EXPANSION);
|
||||
let bounding_span = Span::with_root_ctxt(lo, hi);
|
||||
let lines = cm.span_to_lines(bounding_span).unwrap();
|
||||
assert!(!lines.lines.is_empty());
|
||||
|
||||
|
|
|
@ -42,7 +42,7 @@ use syntax::source_map::Spanned;
|
|||
use syntax::edition::Edition;
|
||||
use syntax::feature_gate::{self, AttributeGate, AttributeType};
|
||||
use syntax::feature_gate::{Stability, deprecated_attributes};
|
||||
use syntax_pos::{BytePos, Span, SyntaxContext};
|
||||
use syntax_pos::{BytePos, Span};
|
||||
use syntax::symbol::{Symbol, kw, sym};
|
||||
use syntax::errors::{Applicability, DiagnosticBuilder};
|
||||
use syntax::print::pprust::expr_to_string;
|
||||
|
@ -78,7 +78,7 @@ impl EarlyLintPass for WhileTrue {
|
|||
if let ast::ExprKind::While(cond, ..) = &e.node {
|
||||
if let ast::ExprKind::Lit(ref lit) = pierce_parens(cond).node {
|
||||
if let ast::LitKind::Bool(true) = lit.node {
|
||||
if lit.span.ctxt() == SyntaxContext::empty() {
|
||||
if !lit.span.from_expansion() {
|
||||
let msg = "denote infinite loops with `loop { ... }`";
|
||||
let condition_span = cx.sess.source_map().def_span(e.span);
|
||||
cx.struct_span_lint(WHILE_TRUE, condition_span, msg)
|
||||
|
@ -167,7 +167,7 @@ impl<'a, 'tcx> LateLintPass<'a, 'tcx> for NonShorthandFieldPatterns {
|
|||
if fieldpat.is_shorthand {
|
||||
continue;
|
||||
}
|
||||
if fieldpat.span.ctxt().outer_expn_info().is_some() {
|
||||
if fieldpat.span.from_expansion() {
|
||||
// Don't lint if this is a macro expansion: macro authors
|
||||
// shouldn't have to worry about this kind of style issue
|
||||
// (Issue #49588)
|
||||
|
@ -1012,7 +1012,7 @@ impl UnreachablePub {
|
|||
let mut applicability = Applicability::MachineApplicable;
|
||||
match vis.node {
|
||||
hir::VisibilityKind::Public if !cx.access_levels.is_reachable(id) => {
|
||||
if span.ctxt().outer_expn_info().is_some() {
|
||||
if span.from_expansion() {
|
||||
applicability = Applicability::MaybeIncorrect;
|
||||
}
|
||||
let def_span = cx.tcx.sess.source_map().def_span(span);
|
||||
|
|
|
@ -517,9 +517,8 @@ impl EarlyLintPass for UnusedParens {
|
|||
// trigger in situations that macro authors shouldn't have to care about, e.g.,
|
||||
// when a parenthesized token tree matched in one macro expansion is matched as
|
||||
// an expression in another and used as a fn/method argument (Issue #47775)
|
||||
if e.span.ctxt().outer_expn_info()
|
||||
.map_or(false, |info| info.call_site.ctxt().outer_expn_info().is_some()) {
|
||||
return;
|
||||
if e.span.ctxt().outer_expn_data().call_site.from_expansion() {
|
||||
return;
|
||||
}
|
||||
let msg = format!("{} argument", call_kind);
|
||||
for arg in args_to_check {
|
||||
|
|
|
@ -35,7 +35,7 @@ use syntax::ext::proc_macro::BangProcMacro;
|
|||
use syntax::parse::source_file_to_stream;
|
||||
use syntax::parse::parser::emit_unclosed_delims;
|
||||
use syntax::symbol::{Symbol, sym};
|
||||
use syntax_pos::{Span, NO_EXPANSION, FileName};
|
||||
use syntax_pos::{Span, FileName};
|
||||
use rustc_data_structures::bit_set::BitSet;
|
||||
|
||||
macro_rules! provide {
|
||||
|
@ -443,7 +443,7 @@ impl cstore::CStore {
|
|||
let source_name = FileName::Macros(macro_full_name);
|
||||
|
||||
let source_file = sess.parse_sess.source_map().new_source_file(source_name, def.body);
|
||||
let local_span = Span::new(source_file.start_pos, source_file.end_pos, NO_EXPANSION);
|
||||
let local_span = Span::with_root_ctxt(source_file.start_pos, source_file.end_pos);
|
||||
let (body, mut errors) = source_file_to_stream(&sess.parse_sess, source_file, None);
|
||||
emit_unclosed_delims(&mut errors, &sess.diagnostic());
|
||||
|
||||
|
|
|
@ -32,7 +32,7 @@ use syntax::source_map;
|
|||
use syntax::symbol::{Symbol, sym};
|
||||
use syntax::ext::base::{MacroKind, SyntaxExtension};
|
||||
use syntax::ext::hygiene::ExpnId;
|
||||
use syntax_pos::{self, Span, BytePos, Pos, DUMMY_SP, NO_EXPANSION};
|
||||
use syntax_pos::{self, Span, BytePos, Pos, DUMMY_SP};
|
||||
use log::debug;
|
||||
|
||||
pub struct DecodeContext<'a, 'tcx> {
|
||||
|
@ -344,7 +344,7 @@ impl<'a, 'tcx> SpecializedDecoder<Span> for DecodeContext<'a, 'tcx> {
|
|||
let hi = (hi + source_file.translated_source_file.start_pos)
|
||||
- source_file.original_start_pos;
|
||||
|
||||
Ok(Span::new(lo, hi, NO_EXPANSION))
|
||||
Ok(Span::with_root_ctxt(lo, hi))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -569,7 +569,7 @@ impl<'a, 'tcx> CrateMetadata {
|
|||
|
||||
ty::VariantDef::new(
|
||||
tcx,
|
||||
Ident::with_empty_ctxt(self.item_name(index)),
|
||||
Ident::with_dummy_span(self.item_name(index)),
|
||||
variant_did,
|
||||
ctor_did,
|
||||
data.discr,
|
||||
|
@ -577,7 +577,7 @@ impl<'a, 'tcx> CrateMetadata {
|
|||
let f = self.entry(index);
|
||||
ty::FieldDef {
|
||||
did: self.local_def_id(index),
|
||||
ident: Ident::with_empty_ctxt(self.item_name(index)),
|
||||
ident: Ident::with_dummy_span(self.item_name(index)),
|
||||
vis: f.visibility.decode(self)
|
||||
}
|
||||
}).collect(),
|
||||
|
@ -741,7 +741,7 @@ impl<'a, 'tcx> CrateMetadata {
|
|||
DefKind::Macro(ext.macro_kind()),
|
||||
self.local_def_id(DefIndex::from_proc_macro_index(id)),
|
||||
);
|
||||
let ident = Ident::with_empty_ctxt(name);
|
||||
let ident = Ident::with_dummy_span(name);
|
||||
callback(def::Export {
|
||||
ident: ident,
|
||||
res: res,
|
||||
|
@ -783,7 +783,7 @@ impl<'a, 'tcx> CrateMetadata {
|
|||
if let Some(kind) = self.def_kind(child_index) {
|
||||
callback(def::Export {
|
||||
res: Res::Def(kind, self.local_def_id(child_index)),
|
||||
ident: Ident::with_empty_ctxt(self.item_name(child_index)),
|
||||
ident: Ident::with_dummy_span(self.item_name(child_index)),
|
||||
vis: self.get_visibility(child_index),
|
||||
span: self.entry(child_index).span.decode((self, sess)),
|
||||
});
|
||||
|
|
|
@ -1,9 +1,11 @@
|
|||
//! Reduced graph building.
|
||||
//! After we obtain a fresh AST fragment from a macro, code in this module helps to integrate
|
||||
//! that fragment into the module structures that are already partially built.
|
||||
//!
|
||||
//! Here we build the "reduced graph": the graph of the module tree without
|
||||
//! any imports resolved.
|
||||
//! Items from the fragment are placed into modules,
|
||||
//! unexpanded macros in the fragment are visited and registered.
|
||||
//! Imports are also considered items and placed into modules here, but not resolved yet.
|
||||
|
||||
use crate::macros::{InvocationData, LegacyBinding, LegacyScope};
|
||||
use crate::macros::{LegacyBinding, LegacyScope};
|
||||
use crate::resolve_imports::ImportDirective;
|
||||
use crate::resolve_imports::ImportDirectiveSubclass::{self, GlobImport, SingleImport};
|
||||
use crate::{Module, ModuleData, ModuleKind, NameBinding, NameBindingKind, Segment, ToNameBinding};
|
||||
|
@ -30,6 +32,7 @@ use syntax::attr;
|
|||
use syntax::ast::{self, Block, ForeignItem, ForeignItemKind, Item, ItemKind, NodeId};
|
||||
use syntax::ast::{MetaItemKind, StmtKind, TraitItem, TraitItemKind, Variant};
|
||||
use syntax::ext::base::{MacroKind, SyntaxExtension};
|
||||
use syntax::ext::expand::AstFragment;
|
||||
use syntax::ext::hygiene::ExpnId;
|
||||
use syntax::feature_gate::is_builtin_attr;
|
||||
use syntax::parse::token::{self, Token};
|
||||
|
@ -67,7 +70,7 @@ impl<'a> ToNameBinding<'a> for (Res, ty::Visibility, Span, ExpnId) {
|
|||
}
|
||||
}
|
||||
|
||||
pub(crate) struct IsMacroExport;
|
||||
struct IsMacroExport;
|
||||
|
||||
impl<'a> ToNameBinding<'a> for (Res, ty::Visibility, Span, ExpnId, IsMacroExport) {
|
||||
fn to_name_binding(self, arenas: &'a ResolverArenas<'a>) -> &'a NameBinding<'a> {
|
||||
|
@ -84,7 +87,7 @@ impl<'a> ToNameBinding<'a> for (Res, ty::Visibility, Span, ExpnId, IsMacroExport
|
|||
impl<'a> Resolver<'a> {
|
||||
/// Defines `name` in namespace `ns` of module `parent` to be `def` if it is not yet defined;
|
||||
/// otherwise, reports an error.
|
||||
pub fn define<T>(&mut self, parent: Module<'a>, ident: Ident, ns: Namespace, def: T)
|
||||
crate fn define<T>(&mut self, parent: Module<'a>, ident: Ident, ns: Namespace, def: T)
|
||||
where T: ToNameBinding<'a>,
|
||||
{
|
||||
let binding = def.to_name_binding(self.arenas);
|
||||
|
@ -93,7 +96,7 @@ impl<'a> Resolver<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn get_module(&mut self, def_id: DefId) -> Module<'a> {
|
||||
crate fn get_module(&mut self, def_id: DefId) -> Module<'a> {
|
||||
if def_id.krate == LOCAL_CRATE {
|
||||
return self.module_map[&def_id]
|
||||
}
|
||||
|
@ -119,7 +122,7 @@ impl<'a> Resolver<'a> {
|
|||
module
|
||||
}
|
||||
|
||||
pub fn macro_def_scope(&mut self, expn_id: ExpnId) -> Module<'a> {
|
||||
crate fn macro_def_scope(&mut self, expn_id: ExpnId) -> Module<'a> {
|
||||
let def_id = match self.macro_defs.get(&expn_id) {
|
||||
Some(def_id) => *def_id,
|
||||
None => return self.graph_root,
|
||||
|
@ -141,7 +144,7 @@ impl<'a> Resolver<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
crate fn get_macro_by_def_id(&mut self, def_id: DefId) -> Option<Lrc<SyntaxExtension>> {
|
||||
fn get_macro_by_def_id(&mut self, def_id: DefId) -> Option<Lrc<SyntaxExtension>> {
|
||||
if let Some(ext) = self.macro_map.get(&def_id) {
|
||||
return Some(ext.clone());
|
||||
}
|
||||
|
@ -158,21 +161,29 @@ impl<'a> Resolver<'a> {
|
|||
|
||||
/// Ensures that the reduced graph rooted at the given external module
|
||||
/// is built, building it if it is not.
|
||||
pub fn populate_module_if_necessary(&mut self, module: Module<'a>) {
|
||||
crate fn populate_module_if_necessary(&mut self, module: Module<'a>) {
|
||||
if module.populated.get() { return }
|
||||
let def_id = module.def_id().unwrap();
|
||||
for child in self.cstore.item_children_untracked(def_id, self.session) {
|
||||
let child = child.map_id(|_| panic!("unexpected id"));
|
||||
BuildReducedGraphVisitor { parent_scope: self.dummy_parent_scope(), r: self }
|
||||
.build_reduced_graph_for_external_crate_res(module, child);
|
||||
BuildReducedGraphVisitor { parent_scope: ParentScope::module(module), r: self }
|
||||
.build_reduced_graph_for_external_crate_res(child);
|
||||
}
|
||||
module.populated.set(true)
|
||||
}
|
||||
|
||||
crate fn build_reduced_graph(
|
||||
&mut self, fragment: &AstFragment, parent_scope: ParentScope<'a>
|
||||
) -> LegacyScope<'a> {
|
||||
let mut visitor = BuildReducedGraphVisitor { r: self, parent_scope };
|
||||
fragment.visit_with(&mut visitor);
|
||||
visitor.parent_scope.legacy
|
||||
}
|
||||
}
|
||||
|
||||
pub struct BuildReducedGraphVisitor<'a, 'b> {
|
||||
pub r: &'b mut Resolver<'a>,
|
||||
pub parent_scope: ParentScope<'a>,
|
||||
struct BuildReducedGraphVisitor<'a, 'b> {
|
||||
r: &'b mut Resolver<'a>,
|
||||
parent_scope: ParentScope<'a>,
|
||||
}
|
||||
|
||||
impl<'a, 'b> BuildReducedGraphVisitor<'a, 'b> {
|
||||
|
@ -300,10 +311,9 @@ impl<'a, 'b> BuildReducedGraphVisitor<'a, 'b> {
|
|||
root_id: NodeId,
|
||||
vis: ty::Visibility,
|
||||
) {
|
||||
let parent_scope = &self.parent_scope;
|
||||
let current_module = parent_scope.module;
|
||||
let current_module = self.parent_scope.module;
|
||||
let directive = self.r.arenas.alloc_import_directive(ImportDirective {
|
||||
parent_scope: parent_scope.clone(),
|
||||
parent_scope: self.parent_scope,
|
||||
module_path,
|
||||
imported_module: Cell::new(None),
|
||||
subclass,
|
||||
|
@ -601,7 +611,7 @@ impl<'a, 'b> BuildReducedGraphVisitor<'a, 'b> {
|
|||
let directive = self.r.arenas.alloc_import_directive(ImportDirective {
|
||||
root_id: item.id,
|
||||
id: item.id,
|
||||
parent_scope: self.parent_scope.clone(),
|
||||
parent_scope: self.parent_scope,
|
||||
imported_module: Cell::new(Some(ModuleOrUniformRoot::Module(module))),
|
||||
subclass: ImportDirectiveSubclass::ExternCrate {
|
||||
source: orig_name,
|
||||
|
@ -706,7 +716,7 @@ impl<'a, 'b> BuildReducedGraphVisitor<'a, 'b> {
|
|||
self.r.define(parent, ident, TypeNS, (module, vis, sp, expansion));
|
||||
|
||||
for variant in &(*enum_definition).variants {
|
||||
self.build_reduced_graph_for_variant(variant, module, vis, expansion);
|
||||
self.build_reduced_graph_for_variant(variant, module, vis);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -797,8 +807,8 @@ impl<'a, 'b> BuildReducedGraphVisitor<'a, 'b> {
|
|||
fn build_reduced_graph_for_variant(&mut self,
|
||||
variant: &Variant,
|
||||
parent: Module<'a>,
|
||||
vis: ty::Visibility,
|
||||
expn_id: ExpnId) {
|
||||
vis: ty::Visibility) {
|
||||
let expn_id = self.parent_scope.expansion;
|
||||
let ident = variant.ident;
|
||||
|
||||
// Define a name in the type namespace.
|
||||
|
@ -861,11 +871,8 @@ impl<'a, 'b> BuildReducedGraphVisitor<'a, 'b> {
|
|||
}
|
||||
|
||||
/// Builds the reduced graph for a single item in an external crate.
|
||||
fn build_reduced_graph_for_external_crate_res(
|
||||
&mut self,
|
||||
parent: Module<'a>,
|
||||
child: Export<ast::NodeId>,
|
||||
) {
|
||||
fn build_reduced_graph_for_external_crate_res(&mut self, child: Export<ast::NodeId>) {
|
||||
let parent = self.parent_scope.module;
|
||||
let Export { ident, res, vis, span } = child;
|
||||
// FIXME: We shouldn't create the gensym here, it should come from metadata,
|
||||
// but metadata cannot encode gensyms currently, so we create it here.
|
||||
|
@ -997,7 +1004,7 @@ impl<'a, 'b> BuildReducedGraphVisitor<'a, 'b> {
|
|||
|this: &Self, span| this.r.arenas.alloc_import_directive(ImportDirective {
|
||||
root_id: item.id,
|
||||
id: item.id,
|
||||
parent_scope: this.parent_scope.clone(),
|
||||
parent_scope: this.parent_scope,
|
||||
imported_module: Cell::new(Some(ModuleOrUniformRoot::Module(module))),
|
||||
subclass: ImportDirectiveSubclass::MacroUse,
|
||||
use_span_with_attributes: item.span_with_attributes(),
|
||||
|
@ -1066,20 +1073,15 @@ impl<'a, 'b> BuildReducedGraphVisitor<'a, 'b> {
|
|||
false
|
||||
}
|
||||
|
||||
fn visit_invoc(&mut self, id: ast::NodeId) -> &'a InvocationData<'a> {
|
||||
fn visit_invoc(&mut self, id: ast::NodeId) -> LegacyScope<'a> {
|
||||
let invoc_id = id.placeholder_to_expn_id();
|
||||
|
||||
self.parent_scope.module.unresolved_invocations.borrow_mut().insert(invoc_id);
|
||||
|
||||
let invocation_data = self.r.arenas.alloc_invocation_data(InvocationData {
|
||||
module: self.parent_scope.module,
|
||||
parent_legacy_scope: self.parent_scope.legacy,
|
||||
output_legacy_scope: Cell::new(None),
|
||||
});
|
||||
let old_invocation_data = self.r.invocations.insert(invoc_id, invocation_data);
|
||||
assert!(old_invocation_data.is_none(), "invocation data is reset for an invocation");
|
||||
let old_parent_scope = self.r.invocation_parent_scopes.insert(invoc_id, self.parent_scope);
|
||||
assert!(old_parent_scope.is_none(), "invocation data is reset for an invocation");
|
||||
|
||||
invocation_data
|
||||
LegacyScope::Invocation(invoc_id)
|
||||
}
|
||||
|
||||
fn proc_macro_stub(item: &ast::Item) -> Option<(MacroKind, Ident, Span)> {
|
||||
|
@ -1180,7 +1182,7 @@ impl<'a, 'b> Visitor<'b> for BuildReducedGraphVisitor<'a, 'b> {
|
|||
return
|
||||
}
|
||||
ItemKind::Mac(..) => {
|
||||
self.parent_scope.legacy = LegacyScope::Invocation(self.visit_invoc(item.id));
|
||||
self.parent_scope.legacy = self.visit_invoc(item.id);
|
||||
return
|
||||
}
|
||||
ItemKind::Mod(..) => self.contains_macro_use(&item.attrs),
|
||||
|
@ -1199,7 +1201,7 @@ impl<'a, 'b> Visitor<'b> for BuildReducedGraphVisitor<'a, 'b> {
|
|||
|
||||
fn visit_stmt(&mut self, stmt: &'b ast::Stmt) {
|
||||
if let ast::StmtKind::Mac(..) = stmt.node {
|
||||
self.parent_scope.legacy = LegacyScope::Invocation(self.visit_invoc(stmt.id));
|
||||
self.parent_scope.legacy = self.visit_invoc(stmt.id);
|
||||
} else {
|
||||
visit::walk_stmt(self, stmt);
|
||||
}
|
||||
|
@ -1267,9 +1269,7 @@ impl<'a, 'b> Visitor<'b> for BuildReducedGraphVisitor<'a, 'b> {
|
|||
|
||||
fn visit_attribute(&mut self, attr: &'b ast::Attribute) {
|
||||
if !attr.is_sugared_doc && is_builtin_attr(attr) {
|
||||
self.parent_scope.module.builtin_attrs.borrow_mut().push((
|
||||
attr.path.segments[0].ident, self.parent_scope.clone()
|
||||
));
|
||||
self.r.builtin_attrs.push((attr.path.segments[0].ident, self.parent_scope));
|
||||
}
|
||||
visit::walk_attribute(self, attr);
|
||||
}
|
||||
|
|
|
@ -376,9 +376,9 @@ impl<'a> Resolver<'a> {
|
|||
Scope::DeriveHelpers => {
|
||||
let res = Res::NonMacroAttr(NonMacroAttrKind::DeriveHelper);
|
||||
if filter_fn(res) {
|
||||
for derive in &parent_scope.derives {
|
||||
for derive in parent_scope.derives {
|
||||
let parent_scope =
|
||||
&ParentScope { derives: Vec::new(), ..*parent_scope };
|
||||
&ParentScope { derives: &[], ..*parent_scope };
|
||||
if let Ok((Some(ext), _)) = this.resolve_macro_path(
|
||||
derive, Some(MacroKind::Derive), parent_scope, false, false
|
||||
) {
|
||||
|
@ -455,7 +455,7 @@ impl<'a> Resolver<'a> {
|
|||
let mut tmp_suggestions = Vec::new();
|
||||
add_module_candidates(prelude, &mut tmp_suggestions, filter_fn);
|
||||
suggestions.extend(tmp_suggestions.into_iter().filter(|s| {
|
||||
use_prelude || this.is_builtin_macro(s.res.opt_def_id())
|
||||
use_prelude || this.is_builtin_macro(s.res)
|
||||
}));
|
||||
}
|
||||
}
|
||||
|
@ -595,7 +595,7 @@ impl<'a> Resolver<'a> {
|
|||
where FilterFn: Fn(Res) -> bool
|
||||
{
|
||||
let mut suggestions = self.lookup_import_candidates_from_module(
|
||||
lookup_ident, namespace, self.graph_root, Ident::with_empty_ctxt(kw::Crate), &filter_fn
|
||||
lookup_ident, namespace, self.graph_root, Ident::with_dummy_span(kw::Crate), &filter_fn
|
||||
);
|
||||
|
||||
if lookup_ident.span.rust_2018() {
|
||||
|
|
|
@ -1,3 +1,10 @@
|
|||
//! "Late resolution" is the pass that resolves most of names in a crate beside imports and macros.
|
||||
//! It runs when the crate is fully expanded and its module structure is fully built.
|
||||
//! So it just walks through the crate and resolves all the expressions, types, etc.
|
||||
//!
|
||||
//! If you wonder why there's no `early.rs`, that's because it's split into three files -
|
||||
//! `build_reduced_graph.rs`, `macros.rs` and `resolve_imports.rs`.
|
||||
|
||||
use GenericParameters::*;
|
||||
use RibKind::*;
|
||||
|
||||
|
@ -352,7 +359,7 @@ impl<'a, 'tcx> Visitor<'tcx> for LateResolutionVisitor<'a, '_> {
|
|||
self.smart_resolve_path(ty.id, qself.as_ref(), path, PathSource::Type);
|
||||
}
|
||||
TyKind::ImplicitSelf => {
|
||||
let self_ty = Ident::with_empty_ctxt(kw::SelfUpper);
|
||||
let self_ty = Ident::with_dummy_span(kw::SelfUpper);
|
||||
let res = self.resolve_ident_in_lexical_scope(self_ty, TypeNS, Some(ty.id), ty.span)
|
||||
.map_or(Res::Err, |d| d.res());
|
||||
self.r.record_partial_res(ty.id, PartialRes::new(res));
|
||||
|
@ -442,7 +449,7 @@ impl<'a, 'tcx> Visitor<'tcx> for LateResolutionVisitor<'a, '_> {
|
|||
GenericParamKind::Type { ref default, .. } => {
|
||||
found_default |= default.is_some();
|
||||
if found_default {
|
||||
Some((Ident::with_empty_ctxt(param.ident.name), Res::Err))
|
||||
Some((Ident::with_dummy_span(param.ident.name), Res::Err))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
|
@ -459,7 +466,7 @@ impl<'a, 'tcx> Visitor<'tcx> for LateResolutionVisitor<'a, '_> {
|
|||
false
|
||||
}
|
||||
})
|
||||
.map(|param| (Ident::with_empty_ctxt(param.ident.name), Res::Err)));
|
||||
.map(|param| (Ident::with_dummy_span(param.ident.name), Res::Err)));
|
||||
|
||||
for param in &generics.params {
|
||||
match param.kind {
|
||||
|
@ -476,7 +483,7 @@ impl<'a, 'tcx> Visitor<'tcx> for LateResolutionVisitor<'a, '_> {
|
|||
}
|
||||
|
||||
// Allow all following defaults to refer to this type parameter.
|
||||
default_ban_rib.bindings.remove(&Ident::with_empty_ctxt(param.ident.name));
|
||||
default_ban_rib.bindings.remove(&Ident::with_dummy_span(param.ident.name));
|
||||
}
|
||||
GenericParamKind::Const { ref ty } => {
|
||||
self.ribs[TypeNS].push(const_ty_param_ban_rib);
|
||||
|
@ -501,8 +508,8 @@ impl<'a, 'b> LateResolutionVisitor<'a, '_> {
|
|||
fn new(resolver: &'b mut Resolver<'a>) -> LateResolutionVisitor<'a, 'b> {
|
||||
// During late resolution we only track the module component of the parent scope,
|
||||
// although it may be useful to track other components as well for diagnostics.
|
||||
let parent_scope = resolver.dummy_parent_scope();
|
||||
let graph_root = resolver.graph_root;
|
||||
let parent_scope = ParentScope::module(graph_root);
|
||||
LateResolutionVisitor {
|
||||
r: resolver,
|
||||
parent_scope,
|
||||
|
@ -574,7 +581,6 @@ impl<'a, 'b> LateResolutionVisitor<'a, '_> {
|
|||
self.ribs[ValueNS].push(Rib::new(ModuleRibKind(module)));
|
||||
self.ribs[TypeNS].push(Rib::new(ModuleRibKind(module)));
|
||||
|
||||
self.r.finalize_current_module_macro_resolutions(module);
|
||||
let ret = f(self);
|
||||
|
||||
self.parent_scope.module = orig_module;
|
||||
|
@ -965,7 +971,7 @@ impl<'a, 'b> LateResolutionVisitor<'a, '_> {
|
|||
let mut self_type_rib = Rib::new(NormalRibKind);
|
||||
|
||||
// Plain insert (no renaming, since types are not currently hygienic)
|
||||
self_type_rib.bindings.insert(Ident::with_empty_ctxt(kw::SelfUpper), self_res);
|
||||
self_type_rib.bindings.insert(Ident::with_dummy_span(kw::SelfUpper), self_res);
|
||||
self.ribs[TypeNS].push(self_type_rib);
|
||||
f(self);
|
||||
self.ribs[TypeNS].pop();
|
||||
|
@ -976,7 +982,7 @@ impl<'a, 'b> LateResolutionVisitor<'a, '_> {
|
|||
{
|
||||
let self_res = Res::SelfCtor(impl_id);
|
||||
let mut self_type_rib = Rib::new(NormalRibKind);
|
||||
self_type_rib.bindings.insert(Ident::with_empty_ctxt(kw::SelfUpper), self_res);
|
||||
self_type_rib.bindings.insert(Ident::with_dummy_span(kw::SelfUpper), self_res);
|
||||
self.ribs[ValueNS].push(self_type_rib);
|
||||
f(self);
|
||||
self.ribs[ValueNS].pop();
|
||||
|
@ -1227,7 +1233,6 @@ impl<'a, 'b> LateResolutionVisitor<'a, '_> {
|
|||
self.ribs[ValueNS].push(Rib::new(ModuleRibKind(anonymous_module)));
|
||||
self.ribs[TypeNS].push(Rib::new(ModuleRibKind(anonymous_module)));
|
||||
self.parent_scope.module = anonymous_module;
|
||||
self.r.finalize_current_module_macro_resolutions(anonymous_module);
|
||||
} else {
|
||||
self.ribs[ValueNS].push(Rib::new(NormalRibKind));
|
||||
}
|
||||
|
@ -1476,7 +1481,7 @@ impl<'a, 'b> LateResolutionVisitor<'a, '_> {
|
|||
self.r.trait_map.insert(id, traits);
|
||||
}
|
||||
|
||||
let mut std_path = vec![Segment::from_ident(Ident::with_empty_ctxt(sym::std))];
|
||||
let mut std_path = vec![Segment::from_ident(Ident::with_dummy_span(sym::std))];
|
||||
std_path.extend(path);
|
||||
if self.r.primitive_type_table.primitive_types.contains_key(&path[0].ident.name) {
|
||||
let cl = CrateLint::No;
|
||||
|
@ -1507,7 +1512,7 @@ impl<'a, 'b> LateResolutionVisitor<'a, '_> {
|
|||
|
||||
fn self_type_is_available(&mut self, span: Span) -> bool {
|
||||
let binding = self.resolve_ident_in_lexical_scope(
|
||||
Ident::with_empty_ctxt(kw::SelfUpper),
|
||||
Ident::with_dummy_span(kw::SelfUpper),
|
||||
TypeNS,
|
||||
None,
|
||||
span,
|
||||
|
@ -1984,7 +1989,6 @@ impl<'a, 'b> LateResolutionVisitor<'a, '_> {
|
|||
|
||||
impl<'a> Resolver<'a> {
|
||||
pub(crate) fn late_resolve_crate(&mut self, krate: &Crate) {
|
||||
self.finalize_current_module_macro_resolutions(self.graph_root);
|
||||
let mut late_resolution_visitor = LateResolutionVisitor::new(self);
|
||||
visit::walk_crate(&mut late_resolution_visitor, krate);
|
||||
for (id, span) in late_resolution_visitor.unused_labels.iter() {
|
||||
|
|
|
@ -1,3 +1,12 @@
|
|||
//! This crate is responsible for the part of name resolution that doesn't require type checker.
|
||||
//!
|
||||
//! Module structure of the crate is built here.
|
||||
//! Paths in macros, imports, expressions, types, patterns are resolved here.
|
||||
//! Label names are resolved here as well.
|
||||
//!
|
||||
//! Type-relative name resolution (methods, fields, associated items) happens in `librustc_typeck`.
|
||||
//! Lifetime names are resolved in `librustc/middle/resolve_lifetime.rs`.
|
||||
|
||||
#![doc(html_root_url = "https://doc.rust-lang.org/nightly/")]
|
||||
|
||||
#![feature(crate_visibility_modifier)]
|
||||
|
@ -54,7 +63,7 @@ use diagnostics::{Suggestion, ImportSuggestion};
|
|||
use diagnostics::{find_span_of_binding_until_next_binding, extend_span_to_previous_binding};
|
||||
use late::{PathSource, Rib, RibKind::*};
|
||||
use resolve_imports::{ImportDirective, ImportDirectiveSubclass, NameResolution, ImportResolver};
|
||||
use macros::{InvocationData, LegacyBinding, LegacyScope};
|
||||
use macros::{LegacyBinding, LegacyScope};
|
||||
|
||||
type Res = def::Res<NodeId>;
|
||||
|
||||
|
@ -122,12 +131,25 @@ enum ScopeSet {
|
|||
/// Serves as a starting point for the scope visitor.
|
||||
/// This struct is currently used only for early resolution (imports and macros),
|
||||
/// but not for late resolution yet.
|
||||
#[derive(Clone, Debug)]
|
||||
#[derive(Clone, Copy, Debug)]
|
||||
pub struct ParentScope<'a> {
|
||||
module: Module<'a>,
|
||||
expansion: ExpnId,
|
||||
legacy: LegacyScope<'a>,
|
||||
derives: Vec<ast::Path>,
|
||||
derives: &'a [ast::Path],
|
||||
}
|
||||
|
||||
impl<'a> ParentScope<'a> {
|
||||
/// Creates a parent scope with the passed argument used as the module scope component,
|
||||
/// and other scope components set to default empty values.
|
||||
pub fn module(module: Module<'a>) -> ParentScope<'a> {
|
||||
ParentScope {
|
||||
module,
|
||||
expansion: ExpnId::root(),
|
||||
legacy: LegacyScope::Empty,
|
||||
derives: &[],
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Eq)]
|
||||
|
@ -274,7 +296,7 @@ impl<'tcx> Visitor<'tcx> for UsePlacementFinder {
|
|||
ItemKind::Use(..) => {
|
||||
// don't suggest placing a use before the prelude
|
||||
// import or other generated ones
|
||||
if item.span.ctxt().outer_expn_info().is_none() {
|
||||
if !item.span.from_expansion() {
|
||||
self.span = Some(item.span.shrink_to_lo());
|
||||
self.found_use = true;
|
||||
return;
|
||||
|
@ -284,7 +306,7 @@ impl<'tcx> Visitor<'tcx> for UsePlacementFinder {
|
|||
ItemKind::ExternCrate(_) => {}
|
||||
// but place them before the first other item
|
||||
_ => if self.span.map_or(true, |span| item.span < span ) {
|
||||
if item.span.ctxt().outer_expn_info().is_none() {
|
||||
if !item.span.from_expansion() {
|
||||
// don't insert between attributes and an item
|
||||
if item.attrs.is_empty() {
|
||||
self.span = Some(item.span.shrink_to_lo());
|
||||
|
@ -418,11 +440,6 @@ pub struct ModuleData<'a> {
|
|||
normal_ancestor_id: DefId,
|
||||
|
||||
resolutions: RefCell<FxHashMap<(Ident, Namespace), &'a RefCell<NameResolution<'a>>>>,
|
||||
single_segment_macro_resolutions: RefCell<Vec<(Ident, MacroKind, ParentScope<'a>,
|
||||
Option<&'a NameBinding<'a>>)>>,
|
||||
multi_segment_macro_resolutions: RefCell<Vec<(Vec<Segment>, Span, MacroKind, ParentScope<'a>,
|
||||
Option<Res>)>>,
|
||||
builtin_attrs: RefCell<Vec<(Ident, ParentScope<'a>)>>,
|
||||
|
||||
// Macro invocations that can expand into items in this module.
|
||||
unresolved_invocations: RefCell<FxHashSet<ExpnId>>,
|
||||
|
@ -459,9 +476,6 @@ impl<'a> ModuleData<'a> {
|
|||
kind,
|
||||
normal_ancestor_id,
|
||||
resolutions: Default::default(),
|
||||
single_segment_macro_resolutions: RefCell::new(Vec::new()),
|
||||
multi_segment_macro_resolutions: RefCell::new(Vec::new()),
|
||||
builtin_attrs: RefCell::new(Vec::new()),
|
||||
unresolved_invocations: Default::default(),
|
||||
no_implicit_prelude: false,
|
||||
glob_importers: RefCell::new(Vec::new()),
|
||||
|
@ -807,7 +821,7 @@ pub struct Resolver<'a> {
|
|||
|
||||
pub definitions: Definitions,
|
||||
|
||||
graph_root: Module<'a>,
|
||||
pub graph_root: Module<'a>,
|
||||
|
||||
prelude: Option<Module<'a>>,
|
||||
pub extern_prelude: FxHashMap<Ident, ExternPreludeEntry<'a>>,
|
||||
|
@ -896,15 +910,24 @@ pub struct Resolver<'a> {
|
|||
local_macro_def_scopes: FxHashMap<NodeId, Module<'a>>,
|
||||
unused_macros: NodeMap<Span>,
|
||||
proc_macro_stubs: NodeSet,
|
||||
/// Traces collected during macro resolution and validated when it's complete.
|
||||
single_segment_macro_resolutions: Vec<(Ident, MacroKind, ParentScope<'a>,
|
||||
Option<&'a NameBinding<'a>>)>,
|
||||
multi_segment_macro_resolutions: Vec<(Vec<Segment>, Span, MacroKind, ParentScope<'a>,
|
||||
Option<Res>)>,
|
||||
builtin_attrs: Vec<(Ident, ParentScope<'a>)>,
|
||||
/// Some built-in derives mark items they are applied to so they are treated specially later.
|
||||
/// Derive macros cannot modify the item themselves and have to store the markers in the global
|
||||
/// context, so they attach the markers to derive container IDs using this resolver table.
|
||||
/// FIXME: Find a way for `PartialEq` and `Eq` to emulate `#[structural_match]`
|
||||
/// by marking the produced impls rather than the original items.
|
||||
special_derives: FxHashMap<ExpnId, SpecialDerives>,
|
||||
|
||||
/// Maps the `ExpnId` of an expansion to its containing module or block.
|
||||
invocations: FxHashMap<ExpnId, &'a InvocationData<'a>>,
|
||||
/// Parent scopes in which the macros were invoked.
|
||||
/// FIXME: `derives` are missing in these parent scopes and need to be taken from elsewhere.
|
||||
invocation_parent_scopes: FxHashMap<ExpnId, ParentScope<'a>>,
|
||||
/// Legacy scopes *produced* by expanding the macro invocations,
|
||||
/// include all the `macro_rules` items and other invocations generated by them.
|
||||
output_legacy_scopes: FxHashMap<ExpnId, LegacyScope<'a>>,
|
||||
|
||||
/// Avoid duplicated errors for "name already defined".
|
||||
name_already_seen: FxHashMap<Name, Span>,
|
||||
|
@ -927,8 +950,8 @@ pub struct ResolverArenas<'a> {
|
|||
name_bindings: arena::TypedArena<NameBinding<'a>>,
|
||||
import_directives: arena::TypedArena<ImportDirective<'a>>,
|
||||
name_resolutions: arena::TypedArena<RefCell<NameResolution<'a>>>,
|
||||
invocation_data: arena::TypedArena<InvocationData<'a>>,
|
||||
legacy_bindings: arena::TypedArena<LegacyBinding<'a>>,
|
||||
ast_paths: arena::TypedArena<ast::Path>,
|
||||
}
|
||||
|
||||
impl<'a> ResolverArenas<'a> {
|
||||
|
@ -952,13 +975,12 @@ impl<'a> ResolverArenas<'a> {
|
|||
fn alloc_name_resolution(&'a self) -> &'a RefCell<NameResolution<'a>> {
|
||||
self.name_resolutions.alloc(Default::default())
|
||||
}
|
||||
fn alloc_invocation_data(&'a self, expansion_data: InvocationData<'a>)
|
||||
-> &'a InvocationData<'a> {
|
||||
self.invocation_data.alloc(expansion_data)
|
||||
}
|
||||
fn alloc_legacy_binding(&'a self, binding: LegacyBinding<'a>) -> &'a LegacyBinding<'a> {
|
||||
self.legacy_bindings.alloc(binding)
|
||||
}
|
||||
fn alloc_ast_paths(&'a self, paths: &[ast::Path]) -> &'a [ast::Path] {
|
||||
self.ast_paths.alloc_from_iter(paths.iter().cloned())
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, 'b> ty::DefIdTree for &'a Resolver<'b> {
|
||||
|
@ -985,11 +1007,11 @@ impl<'a> hir::lowering::Resolver for Resolver<'a> {
|
|||
} else {
|
||||
kw::Crate
|
||||
};
|
||||
let segments = iter::once(Ident::with_empty_ctxt(root))
|
||||
let segments = iter::once(Ident::with_dummy_span(root))
|
||||
.chain(
|
||||
crate_root.into_iter()
|
||||
.chain(components.iter().cloned())
|
||||
.map(Ident::with_empty_ctxt)
|
||||
.map(Ident::with_dummy_span)
|
||||
).map(|i| self.new_ast_path_segment(i)).collect::<Vec<_>>();
|
||||
|
||||
let path = ast::Path {
|
||||
|
@ -997,7 +1019,7 @@ impl<'a> hir::lowering::Resolver for Resolver<'a> {
|
|||
segments,
|
||||
};
|
||||
|
||||
let parent_scope = &self.dummy_parent_scope();
|
||||
let parent_scope = &ParentScope::module(self.graph_root);
|
||||
let res = match self.resolve_ast_path(&path, ns, parent_scope) {
|
||||
Ok(res) => res,
|
||||
Err((span, error)) => {
|
||||
|
@ -1060,18 +1082,17 @@ impl<'a> Resolver<'a> {
|
|||
.collect();
|
||||
|
||||
if !attr::contains_name(&krate.attrs, sym::no_core) {
|
||||
extern_prelude.insert(Ident::with_empty_ctxt(sym::core), Default::default());
|
||||
extern_prelude.insert(Ident::with_dummy_span(sym::core), Default::default());
|
||||
if !attr::contains_name(&krate.attrs, sym::no_std) {
|
||||
extern_prelude.insert(Ident::with_empty_ctxt(sym::std), Default::default());
|
||||
extern_prelude.insert(Ident::with_dummy_span(sym::std), Default::default());
|
||||
if session.rust_2018() {
|
||||
extern_prelude.insert(Ident::with_empty_ctxt(sym::meta), Default::default());
|
||||
extern_prelude.insert(Ident::with_dummy_span(sym::meta), Default::default());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let mut invocations = FxHashMap::default();
|
||||
invocations.insert(ExpnId::root(),
|
||||
arenas.alloc_invocation_data(InvocationData::root(graph_root)));
|
||||
let mut invocation_parent_scopes = FxHashMap::default();
|
||||
invocation_parent_scopes.insert(ExpnId::root(), ParentScope::module(graph_root));
|
||||
|
||||
let mut macro_defs = FxHashMap::default();
|
||||
macro_defs.insert(ExpnId::root(), root_def_id);
|
||||
|
@ -1143,7 +1164,8 @@ impl<'a> Resolver<'a> {
|
|||
dummy_ext_bang: Lrc::new(SyntaxExtension::dummy_bang(session.edition())),
|
||||
dummy_ext_derive: Lrc::new(SyntaxExtension::dummy_derive(session.edition())),
|
||||
non_macro_attrs: [non_macro_attr(false), non_macro_attr(true)],
|
||||
invocations,
|
||||
invocation_parent_scopes,
|
||||
output_legacy_scopes: Default::default(),
|
||||
macro_defs,
|
||||
local_macro_def_scopes: FxHashMap::default(),
|
||||
name_already_seen: FxHashMap::default(),
|
||||
|
@ -1151,6 +1173,9 @@ impl<'a> Resolver<'a> {
|
|||
struct_constructors: Default::default(),
|
||||
unused_macros: Default::default(),
|
||||
proc_macro_stubs: Default::default(),
|
||||
single_segment_macro_resolutions: Default::default(),
|
||||
multi_segment_macro_resolutions: Default::default(),
|
||||
builtin_attrs: Default::default(),
|
||||
special_derives: Default::default(),
|
||||
active_features:
|
||||
features.declared_lib_features.iter().map(|(feat, ..)| *feat)
|
||||
|
@ -1182,9 +1207,8 @@ impl<'a> Resolver<'a> {
|
|||
f(self, MacroNS);
|
||||
}
|
||||
|
||||
fn is_builtin_macro(&mut self, def_id: Option<DefId>) -> bool {
|
||||
def_id.and_then(|def_id| self.get_macro_by_def_id(def_id))
|
||||
.map_or(false, |ext| ext.is_builtin)
|
||||
fn is_builtin_macro(&mut self, res: Res) -> bool {
|
||||
self.get_macro(res).map_or(false, |ext| ext.is_builtin)
|
||||
}
|
||||
|
||||
fn macro_def(&self, mut ctxt: SyntaxContext) -> DefId {
|
||||
|
@ -1203,6 +1227,7 @@ impl<'a> Resolver<'a> {
|
|||
/// Entry point to crate resolution.
|
||||
pub fn resolve_crate(&mut self, krate: &Crate) {
|
||||
ImportResolver { r: self }.finalize_imports();
|
||||
self.finalize_macro_resolutions();
|
||||
|
||||
self.late_resolve_crate(krate);
|
||||
|
||||
|
@ -1319,13 +1344,15 @@ impl<'a> Resolver<'a> {
|
|||
ScopeSet::AbsolutePath(ns) => (ns, true),
|
||||
ScopeSet::Macro(_) => (MacroNS, false),
|
||||
};
|
||||
// Jump out of trait or enum modules, they do not act as scopes.
|
||||
let module = parent_scope.module.nearest_item_scope();
|
||||
let mut scope = match ns {
|
||||
_ if is_absolute_path => Scope::CrateRoot,
|
||||
TypeNS | ValueNS => Scope::Module(parent_scope.module),
|
||||
TypeNS | ValueNS => Scope::Module(module),
|
||||
MacroNS => Scope::DeriveHelpers,
|
||||
};
|
||||
let mut ident = ident.modern();
|
||||
let mut use_prelude = !parent_scope.module.no_implicit_prelude;
|
||||
let mut use_prelude = !module.no_implicit_prelude;
|
||||
|
||||
loop {
|
||||
let visit = match scope {
|
||||
|
@ -1355,10 +1382,11 @@ impl<'a> Resolver<'a> {
|
|||
LegacyScope::Binding(binding) => Scope::MacroRules(
|
||||
binding.parent_legacy_scope
|
||||
),
|
||||
LegacyScope::Invocation(invoc) => Scope::MacroRules(
|
||||
invoc.output_legacy_scope.get().unwrap_or(invoc.parent_legacy_scope)
|
||||
LegacyScope::Invocation(invoc_id) => Scope::MacroRules(
|
||||
self.output_legacy_scopes.get(&invoc_id).cloned()
|
||||
.unwrap_or(self.invocation_parent_scopes[&invoc_id].legacy)
|
||||
),
|
||||
LegacyScope::Empty => Scope::Module(parent_scope.module),
|
||||
LegacyScope::Empty => Scope::Module(module),
|
||||
}
|
||||
Scope::CrateRoot => match ns {
|
||||
TypeNS => {
|
||||
|
@ -1430,7 +1458,7 @@ impl<'a> Resolver<'a> {
|
|||
}
|
||||
let (general_span, modern_span) = if ident.name == kw::SelfUpper {
|
||||
// FIXME(jseyfried) improve `Self` hygiene
|
||||
let empty_span = ident.span.with_ctxt(SyntaxContext::empty());
|
||||
let empty_span = ident.span.with_ctxt(SyntaxContext::root());
|
||||
(empty_span, empty_span)
|
||||
} else if ns == TypeNS {
|
||||
let modern_span = ident.span.modern();
|
||||
|
@ -1501,7 +1529,7 @@ impl<'a> Resolver<'a> {
|
|||
self.hygienic_lexical_parent(module, &mut ident.span)
|
||||
};
|
||||
module = unwrap_or!(opt_module, break);
|
||||
let adjusted_parent_scope = &ParentScope { module, ..parent_scope.clone() };
|
||||
let adjusted_parent_scope = &ParentScope { module, ..*parent_scope };
|
||||
let result = self.resolve_ident_in_module_unadjusted(
|
||||
ModuleOrUniformRoot::Module(module),
|
||||
ident,
|
||||
|
@ -1637,7 +1665,7 @@ impl<'a> Resolver<'a> {
|
|||
ModuleOrUniformRoot::Module(m) => {
|
||||
if let Some(def) = ident.span.modernize_and_adjust(m.expansion) {
|
||||
tmp_parent_scope =
|
||||
ParentScope { module: self.macro_def_scope(def), ..parent_scope.clone() };
|
||||
ParentScope { module: self.macro_def_scope(def), ..*parent_scope };
|
||||
adjusted_parent_scope = &tmp_parent_scope;
|
||||
}
|
||||
}
|
||||
|
@ -2624,7 +2652,7 @@ impl<'a> Resolver<'a> {
|
|||
let path = if path_str.starts_with("::") {
|
||||
ast::Path {
|
||||
span,
|
||||
segments: iter::once(Ident::with_empty_ctxt(kw::PathRoot))
|
||||
segments: iter::once(Ident::with_dummy_span(kw::PathRoot))
|
||||
.chain({
|
||||
path_str.split("::").skip(1).map(Ident::from_str)
|
||||
})
|
||||
|
@ -2645,7 +2673,7 @@ impl<'a> Resolver<'a> {
|
|||
let def_id = self.definitions.local_def_id(module_id);
|
||||
self.module_map.get(&def_id).copied().unwrap_or(self.graph_root)
|
||||
});
|
||||
let parent_scope = &ParentScope { module, ..self.dummy_parent_scope() };
|
||||
let parent_scope = &ParentScope::module(module);
|
||||
let res = self.resolve_ast_path(&path, ns, parent_scope).map_err(|_| ())?;
|
||||
Ok((path, res))
|
||||
}
|
||||
|
@ -2713,7 +2741,7 @@ fn module_to_string(module: Module<'_>) -> Option<String> {
|
|||
fn collect_mod(names: &mut Vec<Ident>, module: Module<'_>) {
|
||||
if let ModuleKind::Def(.., name) = module.kind {
|
||||
if let Some(parent) = module.parent {
|
||||
names.push(Ident::with_empty_ctxt(name));
|
||||
names.push(Ident::with_dummy_span(name));
|
||||
collect_mod(names, parent);
|
||||
}
|
||||
} else {
|
||||
|
|
|
@ -1,9 +1,11 @@
|
|||
//! A bunch of methods and structures more or less related to resolving macros and
|
||||
//! interface provided by `Resolver` to macro expander.
|
||||
|
||||
use crate::{AmbiguityError, AmbiguityKind, AmbiguityErrorMisc, Determinacy};
|
||||
use crate::{CrateLint, Resolver, ResolutionError, Scope, ScopeSet, ParentScope, Weak};
|
||||
use crate::{Module, ModuleKind, NameBinding, PathResult, Segment, ToNameBinding};
|
||||
use crate::{ModuleKind, NameBinding, PathResult, Segment, ToNameBinding};
|
||||
use crate::{ModuleOrUniformRoot, KNOWN_TOOLS};
|
||||
use crate::Namespace::*;
|
||||
use crate::build_reduced_graph::BuildReducedGraphVisitor;
|
||||
use crate::resolve_imports::ImportResolver;
|
||||
use rustc::hir::def::{self, DefKind, NonMacroAttrKind};
|
||||
use rustc::hir::map::DefCollector;
|
||||
|
@ -15,43 +17,18 @@ use syntax::edition::Edition;
|
|||
use syntax::ext::base::{self, Indeterminate, SpecialDerives};
|
||||
use syntax::ext::base::{MacroKind, SyntaxExtension};
|
||||
use syntax::ext::expand::{AstFragment, Invocation, InvocationKind};
|
||||
use syntax::ext::hygiene::{self, ExpnId, ExpnInfo, ExpnKind};
|
||||
use syntax::ext::hygiene::{self, ExpnId, ExpnData, ExpnKind};
|
||||
use syntax::ext::tt::macro_rules;
|
||||
use syntax::feature_gate::{emit_feature_err, is_builtin_attr_name};
|
||||
use syntax::feature_gate::GateIssue;
|
||||
use syntax::symbol::{Symbol, kw, sym};
|
||||
use syntax_pos::{Span, DUMMY_SP};
|
||||
|
||||
use std::cell::Cell;
|
||||
use std::{mem, ptr};
|
||||
use rustc_data_structures::sync::Lrc;
|
||||
|
||||
type Res = def::Res<ast::NodeId>;
|
||||
|
||||
// FIXME: Merge this with `ParentScope`.
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct InvocationData<'a> {
|
||||
/// The module in which the macro was invoked.
|
||||
crate module: Module<'a>,
|
||||
/// The legacy scope in which the macro was invoked.
|
||||
/// The invocation path is resolved in this scope.
|
||||
crate parent_legacy_scope: LegacyScope<'a>,
|
||||
/// The legacy scope *produced* by expanding this macro invocation,
|
||||
/// includes all the macro_rules items, other invocations, etc generated by it.
|
||||
/// `None` if the macro is not expanded yet.
|
||||
crate output_legacy_scope: Cell<Option<LegacyScope<'a>>>,
|
||||
}
|
||||
|
||||
impl<'a> InvocationData<'a> {
|
||||
pub fn root(graph_root: Module<'a>) -> Self {
|
||||
InvocationData {
|
||||
module: graph_root,
|
||||
parent_legacy_scope: LegacyScope::Empty,
|
||||
output_legacy_scope: Cell::new(None),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Binding produced by a `macro_rules` item.
|
||||
/// Not modularized, can shadow previous legacy bindings, etc.
|
||||
#[derive(Debug)]
|
||||
|
@ -75,7 +52,7 @@ pub enum LegacyScope<'a> {
|
|||
Binding(&'a LegacyBinding<'a>),
|
||||
/// The scope introduced by a macro invocation that can potentially
|
||||
/// create a `macro_rules!` macro definition.
|
||||
Invocation(&'a InvocationData<'a>),
|
||||
Invocation(ExpnId),
|
||||
}
|
||||
|
||||
// Macro namespace is separated into two sub-namespaces, one for bang macros and
|
||||
|
@ -120,17 +97,12 @@ impl<'a> base::Resolver for Resolver<'a> {
|
|||
}
|
||||
|
||||
fn get_module_scope(&mut self, id: ast::NodeId) -> ExpnId {
|
||||
let span = DUMMY_SP.fresh_expansion(ExpnId::root(), ExpnInfo::default(
|
||||
let expn_id = ExpnId::fresh(Some(ExpnData::default(
|
||||
ExpnKind::Macro(MacroKind::Attr, sym::test_case), DUMMY_SP, self.session.edition()
|
||||
));
|
||||
let expn_id = span.ctxt().outer_expn();
|
||||
)));
|
||||
let module = self.module_map[&self.definitions.local_def_id(id)];
|
||||
self.invocation_parent_scopes.insert(expn_id, ParentScope::module(module));
|
||||
self.definitions.set_invocation_parent(expn_id, module.def_id().unwrap().index);
|
||||
self.invocations.insert(expn_id, self.arenas.alloc_invocation_data(InvocationData {
|
||||
module,
|
||||
parent_legacy_scope: LegacyScope::Empty,
|
||||
output_legacy_scope: Cell::new(None),
|
||||
}));
|
||||
expn_id
|
||||
}
|
||||
|
||||
|
@ -144,29 +116,24 @@ impl<'a> base::Resolver for Resolver<'a> {
|
|||
});
|
||||
}
|
||||
|
||||
fn visit_ast_fragment_with_placeholders(&mut self, expn_id: ExpnId, fragment: &AstFragment,
|
||||
derives: &[ExpnId]) {
|
||||
fragment.visit_with(&mut DefCollector::new(&mut self.definitions, expn_id));
|
||||
|
||||
let invocation = self.invocations[&expn_id];
|
||||
invocation.module.unresolved_invocations.borrow_mut().remove(&expn_id);
|
||||
invocation.module.unresolved_invocations.borrow_mut().extend(derives);
|
||||
let parent_def = self.definitions.invocation_parent(expn_id);
|
||||
fn visit_ast_fragment_with_placeholders(
|
||||
&mut self, expansion: ExpnId, fragment: &AstFragment, derives: &[ExpnId]
|
||||
) {
|
||||
// Fill in some data for derives if the fragment is from a derive container.
|
||||
// We are inside the `expansion` now, but other parent scope components are still the same.
|
||||
let parent_scope = ParentScope { expansion, ..self.invocation_parent_scopes[&expansion] };
|
||||
let parent_def = self.definitions.invocation_parent(expansion);
|
||||
self.invocation_parent_scopes.extend(derives.iter().map(|&derive| (derive, parent_scope)));
|
||||
for &derive_invoc_id in derives {
|
||||
self.definitions.set_invocation_parent(derive_invoc_id, parent_def);
|
||||
}
|
||||
self.invocations.extend(derives.iter().map(|&derive| (derive, invocation)));
|
||||
let mut visitor = BuildReducedGraphVisitor {
|
||||
r: self,
|
||||
parent_scope: ParentScope {
|
||||
module: invocation.module,
|
||||
expansion: expn_id,
|
||||
legacy: invocation.parent_legacy_scope,
|
||||
derives: Vec::new(),
|
||||
},
|
||||
};
|
||||
fragment.visit_with(&mut visitor);
|
||||
invocation.output_legacy_scope.set(Some(visitor.parent_scope.legacy));
|
||||
parent_scope.module.unresolved_invocations.borrow_mut().remove(&expansion);
|
||||
parent_scope.module.unresolved_invocations.borrow_mut().extend(derives);
|
||||
|
||||
// Integrate the new AST fragment into all the definition and module structures.
|
||||
fragment.visit_with(&mut DefCollector::new(&mut self.definitions, expansion));
|
||||
let output_legacy_scope = self.build_reduced_graph(fragment, parent_scope);
|
||||
self.output_legacy_scopes.insert(expansion, output_legacy_scope);
|
||||
}
|
||||
|
||||
fn register_builtin_macro(&mut self, ident: ast::Ident, ext: SyntaxExtension) {
|
||||
|
@ -182,13 +149,14 @@ impl<'a> base::Resolver for Resolver<'a> {
|
|||
|
||||
fn resolve_macro_invocation(&mut self, invoc: &Invocation, invoc_id: ExpnId, force: bool)
|
||||
-> Result<Option<Lrc<SyntaxExtension>>, Indeterminate> {
|
||||
let (path, kind, derives_in_scope, after_derive) = match invoc.kind {
|
||||
let parent_scope = self.invocation_parent_scopes[&invoc_id];
|
||||
let (path, kind, derives, after_derive) = match invoc.kind {
|
||||
InvocationKind::Attr { ref attr, ref derives, after_derive, .. } =>
|
||||
(&attr.path, MacroKind::Attr, derives.clone(), after_derive),
|
||||
(&attr.path, MacroKind::Attr, self.arenas.alloc_ast_paths(derives), after_derive),
|
||||
InvocationKind::Bang { ref mac, .. } =>
|
||||
(&mac.path, MacroKind::Bang, Vec::new(), false),
|
||||
(&mac.path, MacroKind::Bang, &[][..], false),
|
||||
InvocationKind::Derive { ref path, .. } =>
|
||||
(path, MacroKind::Derive, Vec::new(), false),
|
||||
(path, MacroKind::Derive, &[][..], false),
|
||||
InvocationKind::DeriveContainer { ref derives, .. } => {
|
||||
// Block expansion of derives in the container until we know whether one of them
|
||||
// is a built-in `Copy`. Skip the resolution if there's only one derive - either
|
||||
|
@ -196,10 +164,9 @@ impl<'a> base::Resolver for Resolver<'a> {
|
|||
// will automatically knows about itself.
|
||||
let mut result = Ok(None);
|
||||
if derives.len() > 1 {
|
||||
let parent_scope = &self.invoc_parent_scope(invoc_id, Vec::new());
|
||||
for path in derives {
|
||||
match self.resolve_macro_path(path, Some(MacroKind::Derive),
|
||||
parent_scope, true, force) {
|
||||
&parent_scope, true, force) {
|
||||
Ok((Some(ref ext), _)) if ext.is_derive_copy => {
|
||||
self.add_derives(invoc.expansion_data.id, SpecialDerives::COPY);
|
||||
return Ok(None);
|
||||
|
@ -213,11 +180,14 @@ impl<'a> base::Resolver for Resolver<'a> {
|
|||
}
|
||||
};
|
||||
|
||||
let parent_scope = &self.invoc_parent_scope(invoc_id, derives_in_scope);
|
||||
// Derives are not included when `invocations` are collected, so we have to add them here.
|
||||
let parent_scope = &ParentScope { derives, ..parent_scope };
|
||||
let (ext, res) = self.smart_resolve_macro_path(path, kind, parent_scope, force)?;
|
||||
|
||||
let span = invoc.span();
|
||||
invoc.expansion_data.id.set_expn_info(ext.expn_info(span, fast_print_path(path)));
|
||||
invoc.expansion_data.id.set_expn_data(
|
||||
ext.expn_data(parent_scope.expansion, span, fast_print_path(path))
|
||||
);
|
||||
|
||||
if let Res::Def(_, def_id) = res {
|
||||
if after_derive {
|
||||
|
@ -251,20 +221,6 @@ impl<'a> base::Resolver for Resolver<'a> {
|
|||
}
|
||||
|
||||
impl<'a> Resolver<'a> {
|
||||
pub fn dummy_parent_scope(&self) -> ParentScope<'a> {
|
||||
self.invoc_parent_scope(ExpnId::root(), Vec::new())
|
||||
}
|
||||
|
||||
fn invoc_parent_scope(&self, invoc_id: ExpnId, derives: Vec<ast::Path>) -> ParentScope<'a> {
|
||||
let invoc = self.invocations[&invoc_id];
|
||||
ParentScope {
|
||||
module: invoc.module.nearest_item_scope(),
|
||||
expansion: invoc_id.parent(),
|
||||
legacy: invoc.parent_legacy_scope,
|
||||
derives,
|
||||
}
|
||||
}
|
||||
|
||||
/// Resolve macro path with error reporting and recovery.
|
||||
fn smart_resolve_macro_path(
|
||||
&mut self,
|
||||
|
@ -346,8 +302,7 @@ impl<'a> Resolver<'a> {
|
|||
|
||||
// Possibly apply the macro helper hack
|
||||
if kind == Some(MacroKind::Bang) && path.len() == 1 &&
|
||||
path[0].ident.span.ctxt().outer_expn_info()
|
||||
.map_or(false, |info| info.local_inner_macros) {
|
||||
path[0].ident.span.ctxt().outer_expn_data().local_inner_macros {
|
||||
let root = Ident::new(kw::DollarCrate, path[0].ident.span);
|
||||
path.insert(0, Segment::from_ident(root));
|
||||
}
|
||||
|
@ -367,8 +322,8 @@ impl<'a> Resolver<'a> {
|
|||
|
||||
if trace {
|
||||
let kind = kind.expect("macro kind must be specified if tracing is enabled");
|
||||
parent_scope.module.multi_segment_macro_resolutions.borrow_mut()
|
||||
.push((path, path_span, kind, parent_scope.clone(), res.ok()));
|
||||
self.multi_segment_macro_resolutions
|
||||
.push((path, path_span, kind, *parent_scope, res.ok()));
|
||||
}
|
||||
|
||||
self.prohibit_imported_non_macro_attrs(None, res.ok(), path_span);
|
||||
|
@ -384,8 +339,8 @@ impl<'a> Resolver<'a> {
|
|||
|
||||
if trace {
|
||||
let kind = kind.expect("macro kind must be specified if tracing is enabled");
|
||||
parent_scope.module.single_segment_macro_resolutions.borrow_mut()
|
||||
.push((path[0].ident, kind, parent_scope.clone(), binding.ok()));
|
||||
self.single_segment_macro_resolutions
|
||||
.push((path[0].ident, kind, *parent_scope, binding.ok()));
|
||||
}
|
||||
|
||||
let res = binding.map(|binding| binding.res());
|
||||
|
@ -454,8 +409,8 @@ impl<'a> Resolver<'a> {
|
|||
let result = match scope {
|
||||
Scope::DeriveHelpers => {
|
||||
let mut result = Err(Determinacy::Determined);
|
||||
for derive in &parent_scope.derives {
|
||||
let parent_scope = &ParentScope { derives: Vec::new(), ..*parent_scope };
|
||||
for derive in parent_scope.derives {
|
||||
let parent_scope = &ParentScope { derives: &[], ..*parent_scope };
|
||||
match this.resolve_macro_path(derive, Some(MacroKind::Derive),
|
||||
parent_scope, true, force) {
|
||||
Ok((Some(ext), _)) => if ext.helper_attrs.contains(&ident.name) {
|
||||
|
@ -475,8 +430,9 @@ impl<'a> Resolver<'a> {
|
|||
Scope::MacroRules(legacy_scope) => match legacy_scope {
|
||||
LegacyScope::Binding(legacy_binding) if ident == legacy_binding.ident =>
|
||||
Ok((legacy_binding.binding, Flags::MACRO_RULES)),
|
||||
LegacyScope::Invocation(invoc) if invoc.output_legacy_scope.get().is_none() =>
|
||||
Err(Determinacy::Undetermined),
|
||||
LegacyScope::Invocation(invoc_id)
|
||||
if !this.output_legacy_scopes.contains_key(&invoc_id) =>
|
||||
Err(Determinacy::Undetermined),
|
||||
_ => Err(Determinacy::Determined),
|
||||
}
|
||||
Scope::CrateRoot => {
|
||||
|
@ -500,7 +456,7 @@ impl<'a> Resolver<'a> {
|
|||
}
|
||||
}
|
||||
Scope::Module(module) => {
|
||||
let adjusted_parent_scope = &ParentScope { module, ..parent_scope.clone() };
|
||||
let adjusted_parent_scope = &ParentScope { module, ..*parent_scope };
|
||||
let binding = this.resolve_ident_in_module_unadjusted_ext(
|
||||
ModuleOrUniformRoot::Module(module),
|
||||
ident,
|
||||
|
@ -575,7 +531,7 @@ impl<'a> Resolver<'a> {
|
|||
false,
|
||||
path_span,
|
||||
) {
|
||||
if use_prelude || this.is_builtin_macro(binding.res().opt_def_id()) {
|
||||
if use_prelude || this.is_builtin_macro(binding.res()) {
|
||||
result = Ok((binding, Flags::PRELUDE | Flags::MISC_FROM_PRELUDE));
|
||||
}
|
||||
}
|
||||
|
@ -694,7 +650,7 @@ impl<'a> Resolver<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn finalize_current_module_macro_resolutions(&mut self, module: Module<'a>) {
|
||||
crate fn finalize_macro_resolutions(&mut self) {
|
||||
let check_consistency = |this: &mut Self, path: &[Segment], span, kind: MacroKind,
|
||||
initial_res: Option<Res>, res: Res| {
|
||||
if let Some(initial_res) = initial_res {
|
||||
|
@ -730,8 +686,7 @@ impl<'a> Resolver<'a> {
|
|||
}
|
||||
};
|
||||
|
||||
let macro_resolutions =
|
||||
mem::take(&mut *module.multi_segment_macro_resolutions.borrow_mut());
|
||||
let macro_resolutions = mem::take(&mut self.multi_segment_macro_resolutions);
|
||||
for (mut path, path_span, kind, parent_scope, initial_res) in macro_resolutions {
|
||||
// FIXME: Path resolution will ICE if segment IDs present.
|
||||
for seg in &mut path { seg.id = None; }
|
||||
|
@ -758,8 +713,7 @@ impl<'a> Resolver<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
let macro_resolutions =
|
||||
mem::take(&mut *module.single_segment_macro_resolutions.borrow_mut());
|
||||
let macro_resolutions = mem::take(&mut self.single_segment_macro_resolutions);
|
||||
for (ident, kind, parent_scope, initial_binding) in macro_resolutions {
|
||||
match self.early_resolve_ident_in_lexical_scope(ident, ScopeSet::Macro(kind),
|
||||
&parent_scope, true, true, ident.span) {
|
||||
|
@ -784,7 +738,7 @@ impl<'a> Resolver<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
let builtin_attrs = mem::take(&mut *module.builtin_attrs.borrow_mut());
|
||||
let builtin_attrs = mem::take(&mut self.builtin_attrs);
|
||||
for (ident, parent_scope) in builtin_attrs {
|
||||
let _ = self.early_resolve_ident_in_lexical_scope(
|
||||
ident, ScopeSet::Macro(MacroKind::Attr), &parent_scope, true, true, ident.span
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
//! A bunch of methods and structures more or less related to resolving imports.
|
||||
|
||||
use ImportDirectiveSubclass::*;
|
||||
|
||||
use crate::{AmbiguityError, AmbiguityKind, AmbiguityErrorMisc};
|
||||
|
@ -394,7 +396,7 @@ impl<'a> Resolver<'a> {
|
|||
match ident.span.glob_adjust(module.expansion, glob_import.span) {
|
||||
Some(Some(def)) => {
|
||||
tmp_parent_scope =
|
||||
ParentScope { module: self.macro_def_scope(def), ..parent_scope.clone() };
|
||||
ParentScope { module: self.macro_def_scope(def), ..*parent_scope };
|
||||
adjusted_parent_scope = &tmp_parent_scope;
|
||||
}
|
||||
Some(None) => {}
|
||||
|
@ -848,7 +850,7 @@ impl<'a, 'b> ImportResolver<'a, 'b> {
|
|||
directive.vis.set(orig_vis);
|
||||
let module = match path_res {
|
||||
PathResult::Module(module) => {
|
||||
// Consistency checks, analogous to `finalize_current_module_macro_resolutions`.
|
||||
// Consistency checks, analogous to `finalize_macro_resolutions`.
|
||||
if let Some(initial_module) = directive.imported_module.get() {
|
||||
if !ModuleOrUniformRoot::same_def(module, initial_module) && no_ambiguity {
|
||||
span_bug!(directive.span, "inconsistent resolution for an import");
|
||||
|
@ -973,7 +975,7 @@ impl<'a, 'b> ImportResolver<'a, 'b> {
|
|||
|
||||
match binding {
|
||||
Ok(binding) => {
|
||||
// Consistency checks, analogous to `finalize_current_module_macro_resolutions`.
|
||||
// Consistency checks, analogous to `finalize_macro_resolutions`.
|
||||
let initial_res = source_bindings[ns].get().map(|initial_binding| {
|
||||
all_ns_err = false;
|
||||
if let Some(target_binding) = target_bindings[ns].get() {
|
||||
|
|
|
@ -1156,7 +1156,7 @@ fn escape(s: String) -> String {
|
|||
// Helper function to determine if a span came from a
|
||||
// macro expansion or syntax extension.
|
||||
fn generated_code(span: Span) -> bool {
|
||||
span.ctxt() != NO_EXPANSION || span.is_dummy()
|
||||
span.from_expansion() || span.is_dummy()
|
||||
}
|
||||
|
||||
// DefId::index is a newtype and so the JSON serialisation is ugly. Therefore
|
||||
|
|
|
@ -347,9 +347,9 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
|||
sp,
|
||||
);
|
||||
|
||||
// Check the `expn_info()` to see if this is a macro; if so, it's hard to
|
||||
// extract the text and make a good suggestion, so don't bother.
|
||||
let is_macro = sp.ctxt().outer_expn_info().is_some();
|
||||
// If the span is from a macro, then it's hard to extract the text
|
||||
// and make a good suggestion, so don't bother.
|
||||
let is_macro = sp.from_expansion();
|
||||
|
||||
match (&expr.node, &expected.sty, &checked_ty.sty) {
|
||||
(_, &ty::Ref(_, exp, _), &ty::Ref(_, check, _)) => match (&exp.sty, &check.sty) {
|
||||
|
|
|
@ -985,7 +985,7 @@ impl hir::intravisit::Visitor<'tcx> for UsePlacementFinder<'tcx> {
|
|||
hir::ItemKind::Use(..) => {
|
||||
// Don't suggest placing a `use` before the prelude
|
||||
// import or other generated ones.
|
||||
if item.span.ctxt().outer_expn_info().is_none() {
|
||||
if !item.span.from_expansion() {
|
||||
self.span = Some(item.span.shrink_to_lo());
|
||||
self.found_use = true;
|
||||
return;
|
||||
|
@ -995,7 +995,7 @@ impl hir::intravisit::Visitor<'tcx> for UsePlacementFinder<'tcx> {
|
|||
hir::ItemKind::ExternCrate(_) => {}
|
||||
// ...but do place them before the first other item.
|
||||
_ => if self.span.map_or(true, |span| item.span < span ) {
|
||||
if item.span.ctxt().outer_expn_info().is_none() {
|
||||
if !item.span.from_expansion() {
|
||||
// Don't insert between attributes and an item.
|
||||
if item.attrs.is_empty() {
|
||||
self.span = Some(item.span.shrink_to_lo());
|
||||
|
|
|
@ -2943,7 +2943,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
|||
(PlaceOp::Index, false) => (self.tcx.lang_items().index_trait(), sym::index),
|
||||
(PlaceOp::Index, true) => (self.tcx.lang_items().index_mut_trait(), sym::index_mut),
|
||||
};
|
||||
(tr, ast::Ident::with_empty_ctxt(name))
|
||||
(tr, ast::Ident::with_dummy_span(name))
|
||||
}
|
||||
|
||||
fn try_overloaded_place_op(&self,
|
||||
|
|
|
@ -930,7 +930,7 @@ impl Attributes {
|
|||
if attr.check_name(sym::enable) {
|
||||
if let Some(feat) = attr.value_str() {
|
||||
let meta = attr::mk_name_value_item_str(
|
||||
Ident::with_empty_ctxt(sym::target_feature), feat, DUMMY_SP
|
||||
Ident::with_dummy_span(sym::target_feature), feat, DUMMY_SP
|
||||
);
|
||||
if let Ok(feat_cfg) = Cfg::parse(&meta) {
|
||||
cfg &= feat_cfg;
|
||||
|
|
|
@ -4,6 +4,7 @@ use rustc::hir::def_id::DefId;
|
|||
use rustc::hir;
|
||||
use rustc::lint as lint;
|
||||
use rustc::ty;
|
||||
use rustc_resolve::ParentScope;
|
||||
use syntax;
|
||||
use syntax::ast::{self, Ident};
|
||||
use syntax::ext::base::SyntaxExtensionKind;
|
||||
|
@ -431,7 +432,7 @@ fn macro_resolve(cx: &DocContext<'_>, path_str: &str) -> Option<Res> {
|
|||
let path = ast::Path::from_ident(Ident::from_str(path_str));
|
||||
cx.enter_resolver(|resolver| {
|
||||
if let Ok((Some(ext), res)) = resolver.resolve_macro_path(
|
||||
&path, None, &resolver.dummy_parent_scope(), false, false
|
||||
&path, None, &ParentScope::module(resolver.graph_root), false, false
|
||||
) {
|
||||
if let SyntaxExtensionKind::LegacyBang { .. } = ext.kind {
|
||||
return Some(res.map_id(|_| panic!("unexpected id")));
|
||||
|
|
|
@ -327,7 +327,7 @@ impl Attribute {
|
|||
if self.is_sugared_doc {
|
||||
let comment = self.value_str().unwrap();
|
||||
let meta = mk_name_value_item_str(
|
||||
Ident::with_empty_ctxt(sym::doc),
|
||||
Ident::with_dummy_span(sym::doc),
|
||||
Symbol::intern(&strip_doc_comment_decoration(&comment.as_str())),
|
||||
DUMMY_SP,
|
||||
);
|
||||
|
@ -412,7 +412,7 @@ pub fn mk_sugared_doc_attr(text: Symbol, span: Span) -> Attribute {
|
|||
Attribute {
|
||||
id: mk_attr_id(),
|
||||
style,
|
||||
path: Path::from_ident(Ident::with_empty_ctxt(sym::doc).with_span_pos(span)),
|
||||
path: Path::from_ident(Ident::with_dummy_span(sym::doc).with_span_pos(span)),
|
||||
tokens: MetaItemKind::NameValue(lit).tokens(span),
|
||||
is_sugared_doc: true,
|
||||
span,
|
||||
|
|
|
@ -172,7 +172,7 @@ pub fn expand_build_diagnostic_array<'cx>(ecx: &'cx mut ExtCtxt<'_>,
|
|||
(descriptions.len(), ecx.expr_vec(span, descriptions))
|
||||
});
|
||||
|
||||
let static_ = ecx.lifetime(span, Ident::with_empty_ctxt(kw::StaticLifetime));
|
||||
let static_ = ecx.lifetime(span, Ident::with_dummy_span(kw::StaticLifetime));
|
||||
let ty_str = ecx.ty_rptr(
|
||||
span,
|
||||
ecx.ty_ident(span, ecx.ident_of("str")),
|
||||
|
|
|
@ -15,7 +15,7 @@ use crate::tokenstream::{self, TokenStream, TokenTree};
|
|||
use errors::{DiagnosticBuilder, DiagnosticId};
|
||||
use smallvec::{smallvec, SmallVec};
|
||||
use syntax_pos::{FileName, Span, MultiSpan, DUMMY_SP};
|
||||
use syntax_pos::hygiene::{ExpnInfo, ExpnKind};
|
||||
use syntax_pos::hygiene::{ExpnData, ExpnKind};
|
||||
|
||||
use rustc_data_structures::fx::FxHashMap;
|
||||
use rustc_data_structures::sync::{self, Lrc};
|
||||
|
@ -640,10 +640,11 @@ impl SyntaxExtension {
|
|||
SyntaxExtension::default(SyntaxExtensionKind::NonMacroAttr { mark_used }, edition)
|
||||
}
|
||||
|
||||
pub fn expn_info(&self, call_site: Span, descr: Symbol) -> ExpnInfo {
|
||||
ExpnInfo {
|
||||
call_site,
|
||||
pub fn expn_data(&self, parent: ExpnId, call_site: Span, descr: Symbol) -> ExpnData {
|
||||
ExpnData {
|
||||
kind: ExpnKind::Macro(self.macro_kind(), descr),
|
||||
parent,
|
||||
call_site,
|
||||
def_site: self.span,
|
||||
default_transparency: self.default_transparency,
|
||||
allow_internal_unstable: self.allow_internal_unstable.clone(),
|
||||
|
@ -707,7 +708,7 @@ pub struct ExpansionData {
|
|||
|
||||
/// One of these is made during expansion and incrementally updated as we go;
|
||||
/// when a macro expansion occurs, the resulting nodes have the `backtrace()
|
||||
/// -> expn_info` of their expansion context stored into their span.
|
||||
/// -> expn_data` of their expansion context stored into their span.
|
||||
pub struct ExtCtxt<'a> {
|
||||
pub parse_sess: &'a parse::ParseSess,
|
||||
pub ecfg: expand::ExpansionConfig<'a>,
|
||||
|
@ -756,13 +757,10 @@ impl<'a> ExtCtxt<'a> {
|
|||
pub fn parse_sess(&self) -> &'a parse::ParseSess { self.parse_sess }
|
||||
pub fn cfg(&self) -> &ast::CrateConfig { &self.parse_sess.config }
|
||||
pub fn call_site(&self) -> Span {
|
||||
match self.current_expansion.id.expn_info() {
|
||||
Some(expn_info) => expn_info.call_site,
|
||||
None => DUMMY_SP,
|
||||
}
|
||||
self.current_expansion.id.expn_data().call_site
|
||||
}
|
||||
pub fn backtrace(&self) -> SyntaxContext {
|
||||
SyntaxContext::empty().apply_mark(self.current_expansion.id)
|
||||
SyntaxContext::root().apply_mark(self.current_expansion.id)
|
||||
}
|
||||
|
||||
/// Returns span for the macro which originally caused the current expansion to happen.
|
||||
|
@ -772,17 +770,13 @@ impl<'a> ExtCtxt<'a> {
|
|||
let mut ctxt = self.backtrace();
|
||||
let mut last_macro = None;
|
||||
loop {
|
||||
if ctxt.outer_expn_info().map_or(None, |info| {
|
||||
if info.kind.descr() == sym::include {
|
||||
// Stop going up the backtrace once include! is encountered
|
||||
return None;
|
||||
}
|
||||
ctxt = info.call_site.ctxt();
|
||||
last_macro = Some(info.call_site);
|
||||
Some(())
|
||||
}).is_none() {
|
||||
break
|
||||
let expn_data = ctxt.outer_expn_data();
|
||||
// Stop going up the backtrace once include! is encountered
|
||||
if expn_data.is_root() || expn_data.kind.descr() == sym::include {
|
||||
break;
|
||||
}
|
||||
ctxt = expn_data.call_site.ctxt();
|
||||
last_macro = Some(expn_data.call_site);
|
||||
}
|
||||
last_macro
|
||||
}
|
||||
|
@ -872,7 +866,7 @@ impl<'a> ExtCtxt<'a> {
|
|||
pub fn std_path(&self, components: &[Symbol]) -> Vec<ast::Ident> {
|
||||
let def_site = DUMMY_SP.apply_mark(self.current_expansion.id);
|
||||
iter::once(Ident::new(kw::DollarCrate, def_site))
|
||||
.chain(components.iter().map(|&s| Ident::with_empty_ctxt(s)))
|
||||
.chain(components.iter().map(|&s| Ident::with_dummy_span(s)))
|
||||
.collect()
|
||||
}
|
||||
pub fn name_of(&self, st: &str) -> ast::Name {
|
||||
|
|
|
@ -340,7 +340,7 @@ impl<'a> ExtCtxt<'a> {
|
|||
self.expr_path(self.path_ident(span, id))
|
||||
}
|
||||
pub fn expr_self(&self, span: Span) -> P<ast::Expr> {
|
||||
self.expr_ident(span, Ident::with_empty_ctxt(kw::SelfLower))
|
||||
self.expr_ident(span, Ident::with_dummy_span(kw::SelfLower))
|
||||
}
|
||||
|
||||
pub fn expr_binary(&self, sp: Span, op: ast::BinOpKind,
|
||||
|
|
|
@ -5,7 +5,7 @@ use crate::source_map::respan;
|
|||
use crate::config::StripUnconfigured;
|
||||
use crate::ext::base::*;
|
||||
use crate::ext::proc_macro::collect_derives;
|
||||
use crate::ext::hygiene::{ExpnId, SyntaxContext, ExpnInfo, ExpnKind};
|
||||
use crate::ext::hygiene::{ExpnId, SyntaxContext, ExpnData, ExpnKind};
|
||||
use crate::ext::tt::macro_rules::annotate_err_with_kind;
|
||||
use crate::ext::placeholders::{placeholder, PlaceholderExpander};
|
||||
use crate::feature_gate::{self, Features, GateIssue, is_builtin_attr, emit_feature_err};
|
||||
|
@ -353,7 +353,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
|
|||
derives.reserve(traits.len());
|
||||
invocations.reserve(traits.len());
|
||||
for path in traits {
|
||||
let expn_id = ExpnId::fresh(self.cx.current_expansion.id, None);
|
||||
let expn_id = ExpnId::fresh(None);
|
||||
derives.push(expn_id);
|
||||
invocations.push(Invocation {
|
||||
kind: InvocationKind::Derive { path, item: item.clone() },
|
||||
|
@ -475,11 +475,11 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
|
|||
}
|
||||
|
||||
if self.cx.current_expansion.depth > self.cx.ecfg.recursion_limit {
|
||||
let info = self.cx.current_expansion.id.expn_info().unwrap();
|
||||
let expn_data = self.cx.current_expansion.id.expn_data();
|
||||
let suggested_limit = self.cx.ecfg.recursion_limit * 2;
|
||||
let mut err = self.cx.struct_span_err(info.call_site,
|
||||
let mut err = self.cx.struct_span_err(expn_data.call_site,
|
||||
&format!("recursion limit reached while expanding the macro `{}`",
|
||||
info.kind.descr()));
|
||||
expn_data.kind.descr()));
|
||||
err.help(&format!(
|
||||
"consider adding a `#![recursion_limit=\"{}\"]` attribute to your crate",
|
||||
suggested_limit));
|
||||
|
@ -759,7 +759,7 @@ impl<'a> Parser<'a> {
|
|||
let msg = format!("macro expansion ignores token `{}` and any following",
|
||||
self.this_token_to_string());
|
||||
// Avoid emitting backtrace info twice.
|
||||
let def_site_span = self.token.span.with_ctxt(SyntaxContext::empty());
|
||||
let def_site_span = self.token.span.with_ctxt(SyntaxContext::root());
|
||||
let mut err = self.diagnostic().struct_span_err(def_site_span, &msg);
|
||||
err.span_label(span, "caused by the macro expansion here");
|
||||
let msg = format!(
|
||||
|
@ -796,17 +796,20 @@ struct InvocationCollector<'a, 'b> {
|
|||
|
||||
impl<'a, 'b> InvocationCollector<'a, 'b> {
|
||||
fn collect(&mut self, fragment_kind: AstFragmentKind, kind: InvocationKind) -> AstFragment {
|
||||
// Expansion info for all the collected invocations is set upon their resolution,
|
||||
// Expansion data for all the collected invocations is set upon their resolution,
|
||||
// with exception of the derive container case which is not resolved and can get
|
||||
// its expansion info immediately.
|
||||
let expn_info = match &kind {
|
||||
InvocationKind::DeriveContainer { item, .. } => Some(ExpnInfo::default(
|
||||
ExpnKind::Macro(MacroKind::Attr, sym::derive),
|
||||
item.span(), self.cx.parse_sess.edition,
|
||||
)),
|
||||
// its expansion data immediately.
|
||||
let expn_data = match &kind {
|
||||
InvocationKind::DeriveContainer { item, .. } => Some(ExpnData {
|
||||
parent: self.cx.current_expansion.id,
|
||||
..ExpnData::default(
|
||||
ExpnKind::Macro(MacroKind::Attr, sym::derive),
|
||||
item.span(), self.cx.parse_sess.edition,
|
||||
)
|
||||
}),
|
||||
_ => None,
|
||||
};
|
||||
let expn_id = ExpnId::fresh(self.cx.current_expansion.id, expn_info);
|
||||
let expn_id = ExpnId::fresh(expn_data);
|
||||
self.invocations.push(Invocation {
|
||||
kind,
|
||||
fragment_kind,
|
||||
|
@ -1249,21 +1252,21 @@ impl<'a, 'b> MutVisitor for InvocationCollector<'a, 'b> {
|
|||
let include_info = vec![
|
||||
ast::NestedMetaItem::MetaItem(
|
||||
attr::mk_name_value_item_str(
|
||||
Ident::with_empty_ctxt(sym::file),
|
||||
Ident::with_dummy_span(sym::file),
|
||||
file,
|
||||
DUMMY_SP,
|
||||
),
|
||||
),
|
||||
ast::NestedMetaItem::MetaItem(
|
||||
attr::mk_name_value_item_str(
|
||||
Ident::with_empty_ctxt(sym::contents),
|
||||
Ident::with_dummy_span(sym::contents),
|
||||
src_interned,
|
||||
DUMMY_SP,
|
||||
),
|
||||
),
|
||||
];
|
||||
|
||||
let include_ident = Ident::with_empty_ctxt(sym::include);
|
||||
let include_ident = Ident::with_dummy_span(sym::include);
|
||||
let item = attr::mk_list_item(include_ident, include_info);
|
||||
items.push(ast::NestedMetaItem::MetaItem(item));
|
||||
}
|
||||
|
@ -1325,7 +1328,7 @@ impl<'a, 'b> MutVisitor for InvocationCollector<'a, 'b> {
|
|||
}
|
||||
}
|
||||
|
||||
let meta = attr::mk_list_item(Ident::with_empty_ctxt(sym::doc), items);
|
||||
let meta = attr::mk_list_item(Ident::with_dummy_span(sym::doc), items);
|
||||
*at = attr::Attribute {
|
||||
span: at.span,
|
||||
id: at.id,
|
||||
|
|
|
@ -362,10 +362,10 @@ pub(crate) struct Rustc<'a> {
|
|||
impl<'a> Rustc<'a> {
|
||||
pub fn new(cx: &'a ExtCtxt<'_>) -> Self {
|
||||
// No way to determine def location for a proc macro right now, so use call location.
|
||||
let location = cx.current_expansion.id.expn_info().unwrap().call_site;
|
||||
let location = cx.current_expansion.id.expn_data().call_site;
|
||||
let to_span = |transparency| {
|
||||
location.with_ctxt(
|
||||
SyntaxContext::empty()
|
||||
SyntaxContext::root()
|
||||
.apply_mark_with_transparency(cx.current_expansion.id, transparency),
|
||||
)
|
||||
};
|
||||
|
@ -677,7 +677,7 @@ impl server::Span for Rustc<'_> {
|
|||
self.sess.source_map().lookup_char_pos(span.lo()).file
|
||||
}
|
||||
fn parent(&mut self, span: Self::Span) -> Option<Self::Span> {
|
||||
span.ctxt().outer_expn_info().map(|i| i.call_site)
|
||||
span.parent()
|
||||
}
|
||||
fn source(&mut self, span: Self::Span) -> Self::Span {
|
||||
span.source_callsite()
|
||||
|
|
|
@ -4,7 +4,7 @@ use crate::symbol::{sym, Symbol};
|
|||
use crate::parse::unescape_error_reporting::{emit_unescape_error, push_escaped_char};
|
||||
|
||||
use errors::{FatalError, DiagnosticBuilder};
|
||||
use syntax_pos::{BytePos, Pos, Span, NO_EXPANSION};
|
||||
use syntax_pos::{BytePos, Pos, Span};
|
||||
use rustc_lexer::Base;
|
||||
use rustc_lexer::unescape;
|
||||
|
||||
|
@ -84,7 +84,7 @@ impl<'a> StringReader<'a> {
|
|||
|
||||
|
||||
fn mk_sp(&self, lo: BytePos, hi: BytePos) -> Span {
|
||||
self.override_span.unwrap_or_else(|| Span::new(lo, hi, NO_EXPANSION))
|
||||
self.override_span.unwrap_or_else(|| Span::with_root_ctxt(lo, hi))
|
||||
}
|
||||
|
||||
/// Returns the next token, including trivia like whitespace or comments.
|
||||
|
|
|
@ -1,41 +1,17 @@
|
|||
use super::*;
|
||||
|
||||
use crate::ast::CrateConfig;
|
||||
use crate::symbol::Symbol;
|
||||
use crate::source_map::{SourceMap, FilePathMapping};
|
||||
use crate::feature_gate::UnstableFeatures;
|
||||
use crate::parse::token;
|
||||
use crate::diagnostics::plugin::ErrorMap;
|
||||
use crate::with_default_globals;
|
||||
use std::io;
|
||||
use std::path::PathBuf;
|
||||
use syntax_pos::{BytePos, Span, NO_EXPANSION, edition::Edition};
|
||||
use rustc_data_structures::fx::{FxHashSet, FxHashMap};
|
||||
use rustc_data_structures::sync::{Lock, Once};
|
||||
use errors::{Handler, emitter::EmitterWriter};
|
||||
use syntax_pos::{BytePos, Span};
|
||||
|
||||
fn mk_sess(sm: Lrc<SourceMap>) -> ParseSess {
|
||||
let emitter = errors::emitter::EmitterWriter::new(Box::new(io::sink()),
|
||||
Some(sm.clone()),
|
||||
false,
|
||||
false,
|
||||
false);
|
||||
ParseSess {
|
||||
span_diagnostic: errors::Handler::with_emitter(true, None, Box::new(emitter)),
|
||||
unstable_features: UnstableFeatures::from_environment(),
|
||||
config: CrateConfig::default(),
|
||||
included_mod_stack: Lock::new(Vec::new()),
|
||||
source_map: sm,
|
||||
missing_fragment_specifiers: Lock::new(FxHashSet::default()),
|
||||
raw_identifier_spans: Lock::new(Vec::new()),
|
||||
registered_diagnostics: Lock::new(ErrorMap::new()),
|
||||
buffered_lints: Lock::new(vec![]),
|
||||
edition: Edition::from_session(),
|
||||
ambiguous_block_expr_parse: Lock::new(FxHashMap::default()),
|
||||
param_attr_spans: Lock::new(Vec::new()),
|
||||
let_chains_spans: Lock::new(Vec::new()),
|
||||
async_closure_spans: Lock::new(Vec::new()),
|
||||
injected_crate_name: Once::new(),
|
||||
}
|
||||
let emitter = EmitterWriter::new(Box::new(io::sink()), Some(sm.clone()), false, false, false);
|
||||
ParseSess::with_span_handler(Handler::with_emitter(true, None, Box::new(emitter)), sm)
|
||||
}
|
||||
|
||||
// open a string reader for the given string
|
||||
|
@ -61,7 +37,7 @@ fn t1() {
|
|||
let tok1 = string_reader.next_token();
|
||||
let tok2 = Token::new(
|
||||
mk_ident("fn"),
|
||||
Span::new(BytePos(21), BytePos(23), NO_EXPANSION),
|
||||
Span::with_root_ctxt(BytePos(21), BytePos(23)),
|
||||
);
|
||||
assert_eq!(tok1.kind, tok2.kind);
|
||||
assert_eq!(tok1.span, tok2.span);
|
||||
|
@ -71,7 +47,7 @@ fn t1() {
|
|||
assert_eq!(string_reader.pos.clone(), BytePos(28));
|
||||
let tok4 = Token::new(
|
||||
mk_ident("main"),
|
||||
Span::new(BytePos(24), BytePos(28), NO_EXPANSION),
|
||||
Span::with_root_ctxt(BytePos(24), BytePos(28)),
|
||||
);
|
||||
assert_eq!(tok3.kind, tok4.kind);
|
||||
assert_eq!(tok3.span, tok4.span);
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
|
||||
use super::StringReader;
|
||||
use errors::{Applicability, DiagnosticBuilder};
|
||||
use syntax_pos::{BytePos, Pos, Span, NO_EXPANSION, symbol::kw};
|
||||
use syntax_pos::{BytePos, Pos, Span, symbol::kw};
|
||||
use crate::parse::token;
|
||||
|
||||
#[rustfmt::skip] // for line breaks
|
||||
|
@ -343,7 +343,7 @@ crate fn check_for_substitution<'a>(
|
|||
None => return None,
|
||||
};
|
||||
|
||||
let span = Span::new(pos, pos + Pos::from_usize(ch.len_utf8()), NO_EXPANSION);
|
||||
let span = Span::with_root_ctxt(pos, pos + Pos::from_usize(ch.len_utf8()));
|
||||
|
||||
let (ascii_name, token) = match ASCII_ARRAY.iter().find(|&&(c, _, _)| c == ascii_char) {
|
||||
Some((_ascii_char, ascii_name, token)) => (ascii_name, token),
|
||||
|
@ -362,10 +362,9 @@ crate fn check_for_substitution<'a>(
|
|||
ascii_char, ascii_name
|
||||
);
|
||||
err.span_suggestion(
|
||||
Span::new(
|
||||
Span::with_root_ctxt(
|
||||
pos,
|
||||
pos + Pos::from_usize('“'.len_utf8() + s.len() + '”'.len_utf8()),
|
||||
NO_EXPANSION,
|
||||
),
|
||||
&msg,
|
||||
format!("\"{}\"", s),
|
||||
|
|
|
@ -16,6 +16,7 @@ use errors::{Applicability, FatalError, Level, Handler, ColorConfig, Diagnostic,
|
|||
use rustc_data_structures::sync::{Lrc, Lock, Once};
|
||||
use syntax_pos::{Span, SourceFile, FileName, MultiSpan};
|
||||
use syntax_pos::edition::Edition;
|
||||
use syntax_pos::hygiene::ExpnId;
|
||||
|
||||
use rustc_data_structures::fx::{FxHashSet, FxHashMap};
|
||||
use std::borrow::Cow;
|
||||
|
@ -86,7 +87,7 @@ impl ParseSess {
|
|||
included_mod_stack: Lock::new(vec![]),
|
||||
source_map,
|
||||
buffered_lints: Lock::new(vec![]),
|
||||
edition: Edition::from_session(),
|
||||
edition: ExpnId::root().expn_data().edition,
|
||||
ambiguous_block_expr_parse: Lock::new(FxHashMap::default()),
|
||||
param_attr_spans: Lock::new(Vec::new()),
|
||||
let_chains_spans: Lock::new(Vec::new()),
|
||||
|
|
|
@ -13,7 +13,6 @@ mod generics;
|
|||
use crate::ast::{self, AttrStyle, Attribute, Arg, BindingMode, StrStyle, SelfKind};
|
||||
use crate::ast::{FnDecl, Ident, IsAsync, MacDelimiter, Mutability, TyKind};
|
||||
use crate::ast::{Visibility, VisibilityKind, Unsafety, CrateSugar};
|
||||
use crate::ext::hygiene::SyntaxContext;
|
||||
use crate::source_map::{self, respan};
|
||||
use crate::parse::{SeqSep, literal, token};
|
||||
use crate::parse::lexer::UnmatchedBrace;
|
||||
|
@ -1101,7 +1100,7 @@ impl<'a> Parser<'a> {
|
|||
|
||||
crate fn process_potential_macro_variable(&mut self) {
|
||||
self.token = match self.token.kind {
|
||||
token::Dollar if self.token.span.ctxt() != SyntaxContext::empty() &&
|
||||
token::Dollar if self.token.span.from_expansion() &&
|
||||
self.look_ahead(1, |t| t.is_ident()) => {
|
||||
self.bump();
|
||||
let name = match self.token.kind {
|
||||
|
|
|
@ -60,7 +60,7 @@ impl<'a> Parser<'a> {
|
|||
// Record that we fetched the mod from an external file
|
||||
if warn {
|
||||
let attr = attr::mk_attr_outer(
|
||||
attr::mk_word_item(Ident::with_empty_ctxt(sym::warn_directory_ownership)));
|
||||
attr::mk_word_item(Ident::with_dummy_span(sym::warn_directory_ownership)));
|
||||
attr::mark_known(&attr);
|
||||
attrs.push(attr);
|
||||
}
|
||||
|
|
|
@ -12,7 +12,7 @@ use crate::symbol::{kw, sym};
|
|||
use crate::tests::{matches_codepattern, string_to_stream, with_error_checking_parse};
|
||||
use crate::tokenstream::{DelimSpan, TokenTree, TokenStream};
|
||||
use crate::with_default_globals;
|
||||
use syntax_pos::{Span, BytePos, Pos, NO_EXPANSION};
|
||||
use syntax_pos::{Span, BytePos, Pos};
|
||||
|
||||
use std::path::PathBuf;
|
||||
|
||||
|
@ -27,7 +27,7 @@ fn parse_item_from_source_str(name: FileName, source: String, sess: &ParseSess)
|
|||
|
||||
// produce a syntax_pos::span
|
||||
fn sp(a: u32, b: u32) -> Span {
|
||||
Span::new(BytePos(a), BytePos(b), NO_EXPANSION)
|
||||
Span::with_root_ctxt(BytePos(a), BytePos(b))
|
||||
}
|
||||
|
||||
/// Parse a string, return an expr
|
||||
|
|
|
@ -123,13 +123,13 @@ pub fn print_crate<'a>(cm: &'a SourceMap,
|
|||
// of the feature gate, so we fake them up here.
|
||||
|
||||
// #![feature(prelude_import)]
|
||||
let pi_nested = attr::mk_nested_word_item(ast::Ident::with_empty_ctxt(sym::prelude_import));
|
||||
let list = attr::mk_list_item(ast::Ident::with_empty_ctxt(sym::feature), vec![pi_nested]);
|
||||
let pi_nested = attr::mk_nested_word_item(ast::Ident::with_dummy_span(sym::prelude_import));
|
||||
let list = attr::mk_list_item(ast::Ident::with_dummy_span(sym::feature), vec![pi_nested]);
|
||||
let fake_attr = attr::mk_attr_inner(list);
|
||||
s.print_attribute(&fake_attr);
|
||||
|
||||
// #![no_std]
|
||||
let no_std_meta = attr::mk_word_item(ast::Ident::with_empty_ctxt(sym::no_std));
|
||||
let no_std_meta = attr::mk_word_item(ast::Ident::with_dummy_span(sym::no_std));
|
||||
let fake_attr = attr::mk_attr_inner(no_std_meta);
|
||||
s.print_attribute(&fake_attr);
|
||||
}
|
||||
|
|
|
@ -8,7 +8,7 @@
|
|||
//! information, source code snippets, etc.
|
||||
|
||||
pub use syntax_pos::*;
|
||||
pub use syntax_pos::hygiene::{ExpnKind, ExpnInfo};
|
||||
pub use syntax_pos::hygiene::{ExpnKind, ExpnData};
|
||||
|
||||
use rustc_data_structures::fx::FxHashMap;
|
||||
use rustc_data_structures::stable_hasher::StableHasher;
|
||||
|
@ -29,14 +29,15 @@ mod tests;
|
|||
|
||||
/// Returns the span itself if it doesn't come from a macro expansion,
|
||||
/// otherwise return the call site span up to the `enclosing_sp` by
|
||||
/// following the `expn_info` chain.
|
||||
/// following the `expn_data` chain.
|
||||
pub fn original_sp(sp: Span, enclosing_sp: Span) -> Span {
|
||||
let call_site1 = sp.ctxt().outer_expn_info().map(|ei| ei.call_site);
|
||||
let call_site2 = enclosing_sp.ctxt().outer_expn_info().map(|ei| ei.call_site);
|
||||
match (call_site1, call_site2) {
|
||||
(None, _) => sp,
|
||||
(Some(call_site1), Some(call_site2)) if call_site1 == call_site2 => sp,
|
||||
(Some(call_site1), _) => original_sp(call_site1, enclosing_sp),
|
||||
let expn_data1 = sp.ctxt().outer_expn_data();
|
||||
let expn_data2 = enclosing_sp.ctxt().outer_expn_data();
|
||||
if expn_data1.is_root() ||
|
||||
!expn_data2.is_root() && expn_data1.call_site == expn_data2.call_site {
|
||||
sp
|
||||
} else {
|
||||
original_sp(expn_data1.call_site, enclosing_sp)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -91,7 +91,7 @@ fn t6() {
|
|||
fn t7() {
|
||||
// Test span_to_lines for a span ending at the end of source_file
|
||||
let sm = init_source_map();
|
||||
let span = Span::new(BytePos(12), BytePos(23), NO_EXPANSION);
|
||||
let span = Span::with_root_ctxt(BytePos(12), BytePos(23));
|
||||
let file_lines = sm.span_to_lines(span).unwrap();
|
||||
|
||||
assert_eq!(file_lines.file.name, PathBuf::from("blork.rs").into());
|
||||
|
@ -107,7 +107,7 @@ fn span_from_selection(input: &str, selection: &str) -> Span {
|
|||
assert_eq!(input.len(), selection.len());
|
||||
let left_index = selection.find('~').unwrap() as u32;
|
||||
let right_index = selection.rfind('~').map(|x|x as u32).unwrap_or(left_index);
|
||||
Span::new(BytePos(left_index), BytePos(right_index + 1), NO_EXPANSION)
|
||||
Span::with_root_ctxt(BytePos(left_index), BytePos(right_index + 1))
|
||||
}
|
||||
|
||||
/// Tests span_to_snippet and span_to_lines for a span converting 3
|
||||
|
@ -137,7 +137,7 @@ fn span_to_snippet_and_lines_spanning_multiple_lines() {
|
|||
fn t8() {
|
||||
// Test span_to_snippet for a span ending at the end of source_file
|
||||
let sm = init_source_map();
|
||||
let span = Span::new(BytePos(12), BytePos(23), NO_EXPANSION);
|
||||
let span = Span::with_root_ctxt(BytePos(12), BytePos(23));
|
||||
let snippet = sm.span_to_snippet(span);
|
||||
|
||||
assert_eq!(snippet, Ok("second line".to_string()));
|
||||
|
@ -147,7 +147,7 @@ fn t8() {
|
|||
fn t9() {
|
||||
// Test span_to_str for a span ending at the end of source_file
|
||||
let sm = init_source_map();
|
||||
let span = Span::new(BytePos(12), BytePos(23), NO_EXPANSION);
|
||||
let span = Span::with_root_ctxt(BytePos(12), BytePos(23));
|
||||
let sstr = sm.span_to_string(span);
|
||||
|
||||
assert_eq!(sstr, "blork.rs:2:1: 2:12");
|
||||
|
@ -198,10 +198,9 @@ impl SourceMapExtension for SourceMap {
|
|||
let lo = hi + offset;
|
||||
hi = lo + substring.len();
|
||||
if i == n {
|
||||
let span = Span::new(
|
||||
let span = Span::with_root_ctxt(
|
||||
BytePos(lo as u32 + file.start_pos.0),
|
||||
BytePos(hi as u32 + file.start_pos.0),
|
||||
NO_EXPANSION,
|
||||
);
|
||||
assert_eq!(&self.span_to_snippet(span).unwrap()[..],
|
||||
substring);
|
||||
|
|
|
@ -9,7 +9,7 @@ use crate::with_default_globals;
|
|||
use errors::emitter::EmitterWriter;
|
||||
use errors::Handler;
|
||||
use rustc_data_structures::sync::Lrc;
|
||||
use syntax_pos::{BytePos, NO_EXPANSION, Span, MultiSpan};
|
||||
use syntax_pos::{BytePos, Span, MultiSpan};
|
||||
|
||||
use std::io;
|
||||
use std::io::prelude::*;
|
||||
|
@ -169,7 +169,7 @@ fn make_span(file_text: &str, start: &Position, end: &Position) -> Span {
|
|||
let start = make_pos(file_text, start);
|
||||
let end = make_pos(file_text, end) + end.string.len(); // just after matching thing ends
|
||||
assert!(start <= end);
|
||||
Span::new(BytePos(start as u32), BytePos(end as u32), NO_EXPANSION)
|
||||
Span::with_root_ctxt(BytePos(start as u32), BytePos(end as u32))
|
||||
}
|
||||
|
||||
fn make_pos(file_text: &str, pos: &Position) -> usize {
|
||||
|
|
|
@ -3,14 +3,14 @@ use super::*;
|
|||
use crate::ast::Name;
|
||||
use crate::with_default_globals;
|
||||
use crate::tests::string_to_stream;
|
||||
use syntax_pos::{Span, BytePos, NO_EXPANSION};
|
||||
use syntax_pos::{Span, BytePos};
|
||||
|
||||
fn string_to_ts(string: &str) -> TokenStream {
|
||||
string_to_stream(string.to_owned())
|
||||
}
|
||||
|
||||
fn sp(a: u32, b: u32) -> Span {
|
||||
Span::new(BytePos(a), BytePos(b), NO_EXPANSION)
|
||||
Span::with_root_ctxt(BytePos(a), BytePos(b))
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
|
|
@ -35,7 +35,7 @@ pub fn expand_deriving_clone(cx: &mut ExtCtxt<'_>,
|
|||
match annitem.node {
|
||||
ItemKind::Struct(_, Generics { ref params, .. }) |
|
||||
ItemKind::Enum(_, Generics { ref params, .. }) => {
|
||||
let container_id = cx.current_expansion.id.parent();
|
||||
let container_id = cx.current_expansion.id.expn_data().parent;
|
||||
if cx.resolver.has_derives(container_id, SpecialDerives::COPY) &&
|
||||
!params.iter().any(|param| match param.kind {
|
||||
ast::GenericParamKind::Type { .. } => true,
|
||||
|
@ -129,7 +129,7 @@ fn cs_clone_shallow(name: &str,
|
|||
if is_union {
|
||||
// let _: AssertParamIsCopy<Self>;
|
||||
let self_ty =
|
||||
cx.ty_path(cx.path_ident(trait_span, ast::Ident::with_empty_ctxt(kw::SelfUpper)));
|
||||
cx.ty_path(cx.path_ident(trait_span, ast::Ident::with_dummy_span(kw::SelfUpper)));
|
||||
assert_ty_bounds(cx, &mut stmts, self_ty, trait_span, "AssertParamIsCopy");
|
||||
} else {
|
||||
match *substr.fields {
|
||||
|
|
|
@ -13,7 +13,7 @@ pub fn expand_deriving_eq(cx: &mut ExtCtxt<'_>,
|
|||
mitem: &MetaItem,
|
||||
item: &Annotatable,
|
||||
push: &mut dyn FnMut(Annotatable)) {
|
||||
cx.resolver.add_derives(cx.current_expansion.id.parent(), SpecialDerives::EQ);
|
||||
cx.resolver.add_derives(cx.current_expansion.id.expn_data().parent, SpecialDerives::EQ);
|
||||
|
||||
let inline = cx.meta_word(span, sym::inline);
|
||||
let hidden = cx.meta_list_item_word(span, sym::hidden);
|
||||
|
|
|
@ -13,7 +13,7 @@ pub fn expand_deriving_partial_eq(cx: &mut ExtCtxt<'_>,
|
|||
mitem: &MetaItem,
|
||||
item: &Annotatable,
|
||||
push: &mut dyn FnMut(Annotatable)) {
|
||||
cx.resolver.add_derives(cx.current_expansion.id.parent(), SpecialDerives::PARTIAL_EQ);
|
||||
cx.resolver.add_derives(cx.current_expansion.id.expn_data().parent, SpecialDerives::PARTIAL_EQ);
|
||||
|
||||
// structures are equal if all fields are equal, and non equal, if
|
||||
// any fields are not equal or if the enum variants are different
|
||||
|
|
|
@ -82,7 +82,7 @@ fn show_substructure(cx: &mut ExtCtxt<'_>, span: Span, substr: &Substructure<'_>
|
|||
|
||||
let expr = cx.expr_method_call(span,
|
||||
builder_expr.clone(),
|
||||
Ident::with_empty_ctxt(sym::field),
|
||||
Ident::with_dummy_span(sym::field),
|
||||
vec![field]);
|
||||
|
||||
// Use `let _ = expr;` to avoid triggering the
|
||||
|
@ -106,7 +106,7 @@ fn show_substructure(cx: &mut ExtCtxt<'_>, span: Span, substr: &Substructure<'_>
|
|||
let field = cx.expr_addr_of(field.span, field);
|
||||
let expr = cx.expr_method_call(span,
|
||||
builder_expr.clone(),
|
||||
Ident::with_empty_ctxt(sym::field),
|
||||
Ident::with_dummy_span(sym::field),
|
||||
vec![name, field]);
|
||||
stmts.push(stmt_let_undescore(cx, span, expr));
|
||||
}
|
||||
|
|
|
@ -425,7 +425,7 @@ impl<'a> TraitDef<'a> {
|
|||
return;
|
||||
}
|
||||
};
|
||||
let container_id = cx.current_expansion.id.parent();
|
||||
let container_id = cx.current_expansion.id.expn_data().parent;
|
||||
let is_always_copy =
|
||||
cx.resolver.has_derives(container_id, SpecialDerives::COPY) &&
|
||||
has_no_type_params;
|
||||
|
@ -928,7 +928,7 @@ impl<'a> MethodDef<'a> {
|
|||
|
||||
let args = {
|
||||
let self_args = explicit_self.map(|explicit_self| {
|
||||
let ident = Ident::with_empty_ctxt(kw::SelfLower).with_span_pos(trait_.span);
|
||||
let ident = Ident::with_dummy_span(kw::SelfLower).with_span_pos(trait_.span);
|
||||
ast::Arg::from_self(ThinVec::default(), explicit_self, ident)
|
||||
});
|
||||
let nonself_args = arg_types.into_iter()
|
||||
|
|
|
@ -23,13 +23,13 @@ pub fn expand_option_env<'cx>(cx: &'cx mut ExtCtxt<'_>,
|
|||
let sp = sp.apply_mark(cx.current_expansion.id);
|
||||
let e = match env::var(&*var.as_str()) {
|
||||
Err(..) => {
|
||||
let lt = cx.lifetime(sp, Ident::with_empty_ctxt(kw::StaticLifetime));
|
||||
let lt = cx.lifetime(sp, Ident::with_dummy_span(kw::StaticLifetime));
|
||||
cx.expr_path(cx.path_all(sp,
|
||||
true,
|
||||
cx.std_path(&[sym::option, sym::Option, sym::None]),
|
||||
vec![GenericArg::Type(cx.ty_rptr(sp,
|
||||
cx.ty_ident(sp,
|
||||
Ident::with_empty_ctxt(sym::str)),
|
||||
Ident::with_dummy_span(sym::str)),
|
||||
Some(lt),
|
||||
ast::Mutability::Immutable))],
|
||||
vec![]))
|
||||
|
|
|
@ -29,7 +29,7 @@ pub fn expand(
|
|||
};
|
||||
|
||||
// Generate a bunch of new items using the AllocFnFactory
|
||||
let span = item.span.with_ctxt(SyntaxContext::empty().apply_mark(ecx.current_expansion.id));
|
||||
let span = item.span.with_ctxt(SyntaxContext::root().apply_mark(ecx.current_expansion.id));
|
||||
let f = AllocFnFactory {
|
||||
span,
|
||||
kind: AllocatorKind::Global,
|
||||
|
@ -44,7 +44,7 @@ pub fn expand(
|
|||
let const_ty = ecx.ty(span, TyKind::Tup(Vec::new()));
|
||||
let const_body = ecx.expr_block(ecx.block(span, stmts));
|
||||
let const_item =
|
||||
ecx.item_const(span, Ident::with_empty_ctxt(kw::Underscore), const_ty, const_body);
|
||||
ecx.item_const(span, Ident::with_dummy_span(kw::Underscore), const_ty, const_body);
|
||||
|
||||
// Return the original item and the new methods.
|
||||
vec![Annotatable::Item(item), Annotatable::Item(const_item)]
|
||||
|
@ -120,7 +120,7 @@ impl AllocFnFactory<'_, '_> {
|
|||
) -> P<Expr> {
|
||||
match *ty {
|
||||
AllocatorTy::Layout => {
|
||||
let usize = self.cx.path_ident(self.span, Ident::with_empty_ctxt(sym::usize));
|
||||
let usize = self.cx.path_ident(self.span, Ident::with_dummy_span(sym::usize));
|
||||
let ty_usize = self.cx.ty_path(usize);
|
||||
let size = ident();
|
||||
let align = ident();
|
||||
|
@ -178,12 +178,12 @@ impl AllocFnFactory<'_, '_> {
|
|||
}
|
||||
|
||||
fn usize(&self) -> P<Ty> {
|
||||
let usize = self.cx.path_ident(self.span, Ident::with_empty_ctxt(sym::usize));
|
||||
let usize = self.cx.path_ident(self.span, Ident::with_dummy_span(sym::usize));
|
||||
self.cx.ty_path(usize)
|
||||
}
|
||||
|
||||
fn ptr_u8(&self) -> P<Ty> {
|
||||
let u8 = self.cx.path_ident(self.span, Ident::with_empty_ctxt(sym::u8));
|
||||
let u8 = self.cx.path_ident(self.span, Ident::with_dummy_span(sym::u8));
|
||||
let ty_u8 = self.cx.ty_path(u8);
|
||||
self.cx.ty_ptr(self.span, ty_u8, Mutability::Mutable)
|
||||
}
|
||||
|
|
|
@ -42,7 +42,7 @@ pub mod test_harness;
|
|||
|
||||
pub fn register_builtin_macros(resolver: &mut dyn syntax::ext::base::Resolver, edition: Edition) {
|
||||
let mut register = |name, kind| resolver.register_builtin_macro(
|
||||
Ident::with_empty_ctxt(name), SyntaxExtension {
|
||||
Ident::with_dummy_span(name), SyntaxExtension {
|
||||
is_builtin: true, ..SyntaxExtension::default(kind, edition)
|
||||
},
|
||||
);
|
||||
|
|
|
@ -11,7 +11,7 @@ use syntax::source_map::respan;
|
|||
use syntax::symbol::sym;
|
||||
use syntax::tokenstream::*;
|
||||
use syntax_pos::{Span, DUMMY_SP};
|
||||
use syntax_pos::hygiene::{ExpnId, ExpnInfo, ExpnKind, MacroKind};
|
||||
use syntax_pos::hygiene::{ExpnData, ExpnKind, MacroKind};
|
||||
|
||||
use std::mem;
|
||||
|
||||
|
@ -43,12 +43,12 @@ pub fn inject(
|
|||
) {
|
||||
if !named_exts.is_empty() {
|
||||
let mut extra_items = Vec::new();
|
||||
let span = DUMMY_SP.fresh_expansion(ExpnId::root(), ExpnInfo::allow_unstable(
|
||||
let span = DUMMY_SP.fresh_expansion(ExpnData::allow_unstable(
|
||||
ExpnKind::Macro(MacroKind::Attr, sym::plugin), DUMMY_SP, edition,
|
||||
[sym::rustc_attrs][..].into(),
|
||||
));
|
||||
for (name, ext) in named_exts {
|
||||
resolver.register_builtin_macro(Ident::with_empty_ctxt(name), ext);
|
||||
resolver.register_builtin_macro(Ident::with_dummy_span(name), ext);
|
||||
extra_items.push(plugin_macro_def(name, span));
|
||||
}
|
||||
// The `macro_rules` items must be inserted before any other items.
|
||||
|
|
|
@ -3,10 +3,9 @@ use std::mem;
|
|||
use smallvec::smallvec;
|
||||
use syntax::ast::{self, Ident};
|
||||
use syntax::attr;
|
||||
use syntax::source_map::{ExpnInfo, ExpnKind, respan};
|
||||
use syntax::source_map::{ExpnData, ExpnKind, respan};
|
||||
use syntax::ext::base::{ExtCtxt, MacroKind};
|
||||
use syntax::ext::expand::{AstFragment, ExpansionConfig};
|
||||
use syntax::ext::hygiene::ExpnId;
|
||||
use syntax::ext::proc_macro::is_proc_macro_attr;
|
||||
use syntax::parse::ParseSess;
|
||||
use syntax::ptr::P;
|
||||
|
@ -328,7 +327,7 @@ fn mk_decls(
|
|||
custom_attrs: &[ProcMacroDef],
|
||||
custom_macros: &[ProcMacroDef],
|
||||
) -> P<ast::Item> {
|
||||
let span = DUMMY_SP.fresh_expansion(ExpnId::root(), ExpnInfo::allow_unstable(
|
||||
let span = DUMMY_SP.fresh_expansion(ExpnData::allow_unstable(
|
||||
ExpnKind::Macro(MacroKind::Attr, sym::proc_macro), DUMMY_SP, cx.parse_sess.edition,
|
||||
[sym::rustc_attrs, sym::proc_macro_internals][..].into(),
|
||||
));
|
||||
|
@ -337,7 +336,7 @@ fn mk_decls(
|
|||
let doc = cx.meta_list(span, sym::doc, vec![hidden]);
|
||||
let doc_hidden = cx.attribute(doc);
|
||||
|
||||
let proc_macro = Ident::with_empty_ctxt(sym::proc_macro);
|
||||
let proc_macro = Ident::with_dummy_span(sym::proc_macro);
|
||||
let krate = cx.item(span,
|
||||
proc_macro,
|
||||
Vec::new(),
|
||||
|
@ -349,7 +348,7 @@ fn mk_decls(
|
|||
let custom_derive = Ident::from_str("custom_derive");
|
||||
let attr = Ident::from_str("attr");
|
||||
let bang = Ident::from_str("bang");
|
||||
let crate_kw = Ident::with_empty_ctxt(kw::Crate);
|
||||
let crate_kw = Ident::with_dummy_span(kw::Crate);
|
||||
|
||||
let decls = {
|
||||
let local_path = |sp: Span, name| {
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
use syntax::{ast, attr};
|
||||
use syntax::edition::Edition;
|
||||
use syntax::ext::hygiene::{ExpnId, MacroKind};
|
||||
use syntax::ext::hygiene::MacroKind;
|
||||
use syntax::ptr::P;
|
||||
use syntax::source_map::{ExpnInfo, ExpnKind, dummy_spanned, respan};
|
||||
use syntax::source_map::{ExpnData, ExpnKind, dummy_spanned, respan};
|
||||
use syntax::symbol::{Ident, Symbol, kw, sym};
|
||||
use syntax_pos::DUMMY_SP;
|
||||
|
||||
|
@ -32,7 +32,7 @@ pub fn inject(
|
|||
// HACK(eddyb) gensym the injected crates on the Rust 2018 edition,
|
||||
// so they don't accidentally interfere with the new import paths.
|
||||
let orig_name_sym = Symbol::intern(orig_name_str);
|
||||
let orig_name_ident = Ident::with_empty_ctxt(orig_name_sym);
|
||||
let orig_name_ident = Ident::with_dummy_span(orig_name_sym);
|
||||
let (rename, orig_name) = if rust_2018 {
|
||||
(orig_name_ident.gensym(), Some(orig_name_sym))
|
||||
} else {
|
||||
|
@ -40,7 +40,7 @@ pub fn inject(
|
|||
};
|
||||
krate.module.items.insert(0, P(ast::Item {
|
||||
attrs: vec![attr::mk_attr_outer(
|
||||
attr::mk_word_item(ast::Ident::with_empty_ctxt(sym::macro_use))
|
||||
attr::mk_word_item(ast::Ident::with_dummy_span(sym::macro_use))
|
||||
)],
|
||||
vis: dummy_spanned(ast::VisibilityKind::Inherited),
|
||||
node: ast::ItemKind::ExternCrate(alt_std_name.or(orig_name)),
|
||||
|
@ -55,7 +55,7 @@ pub fn inject(
|
|||
// the prelude.
|
||||
let name = names[0];
|
||||
|
||||
let span = DUMMY_SP.fresh_expansion(ExpnId::root(), ExpnInfo::allow_unstable(
|
||||
let span = DUMMY_SP.fresh_expansion(ExpnData::allow_unstable(
|
||||
ExpnKind::Macro(MacroKind::Attr, sym::std_inject), DUMMY_SP, edition,
|
||||
[sym::prelude_import][..].into(),
|
||||
));
|
||||
|
@ -66,7 +66,7 @@ pub fn inject(
|
|||
vis: respan(span.shrink_to_lo(), ast::VisibilityKind::Inherited),
|
||||
node: ast::ItemKind::Use(P(ast::UseTree {
|
||||
prefix: ast::Path {
|
||||
segments: iter::once(ast::Ident::with_empty_ctxt(kw::PathRoot))
|
||||
segments: iter::once(ast::Ident::with_dummy_span(kw::PathRoot))
|
||||
.chain(
|
||||
[name, "prelude", "v1"].iter().cloned()
|
||||
.map(ast::Ident::from_str)
|
||||
|
|
|
@ -29,7 +29,7 @@ pub fn expand_test_case(
|
|||
|
||||
if !ecx.ecfg.should_test { return vec![]; }
|
||||
|
||||
let sp = attr_sp.with_ctxt(SyntaxContext::empty().apply_mark(ecx.current_expansion.id));
|
||||
let sp = attr_sp.with_ctxt(SyntaxContext::root().apply_mark(ecx.current_expansion.id));
|
||||
let mut item = anno_item.expect_item();
|
||||
item = item.map(|mut item| {
|
||||
item.vis = respan(item.vis.span, ast::VisibilityKind::Public);
|
||||
|
@ -93,7 +93,7 @@ pub fn expand_test_or_bench(
|
|||
return vec![Annotatable::Item(item)];
|
||||
}
|
||||
|
||||
let ctxt = SyntaxContext::empty().apply_mark(cx.current_expansion.id);
|
||||
let ctxt = SyntaxContext::root().apply_mark(cx.current_expansion.id);
|
||||
let (sp, attr_sp) = (item.span.with_ctxt(ctxt), attr_sp.with_ctxt(ctxt));
|
||||
|
||||
// Gensym "test" so we can extern crate without conflicting with any local names
|
||||
|
|
|
@ -5,14 +5,13 @@ use smallvec::{smallvec, SmallVec};
|
|||
use syntax::ast::{self, Ident};
|
||||
use syntax::attr;
|
||||
use syntax::entry::{self, EntryPointType};
|
||||
use syntax::ext::base::{ExtCtxt, Resolver};
|
||||
use syntax::ext::base::{ExtCtxt, MacroKind, Resolver};
|
||||
use syntax::ext::expand::{AstFragment, ExpansionConfig};
|
||||
use syntax::ext::hygiene::{ExpnId, MacroKind};
|
||||
use syntax::feature_gate::Features;
|
||||
use syntax::mut_visit::{*, ExpectOne};
|
||||
use syntax::parse::ParseSess;
|
||||
use syntax::ptr::P;
|
||||
use syntax::source_map::{ExpnInfo, ExpnKind, dummy_spanned};
|
||||
use syntax::source_map::{ExpnData, ExpnKind, dummy_spanned};
|
||||
use syntax::symbol::{kw, sym, Symbol};
|
||||
use syntax_pos::{Span, DUMMY_SP};
|
||||
|
||||
|
@ -150,7 +149,7 @@ impl MutVisitor for EntryPointCleaner {
|
|||
EntryPointType::MainAttr |
|
||||
EntryPointType::Start =>
|
||||
item.map(|ast::Item {id, ident, attrs, node, vis, span, tokens}| {
|
||||
let allow_ident = Ident::with_empty_ctxt(sym::allow);
|
||||
let allow_ident = Ident::with_dummy_span(sym::allow);
|
||||
let dc_nested = attr::mk_nested_word_item(Ident::from_str("dead_code"));
|
||||
let allow_dead_code_item = attr::mk_list_item(allow_ident, vec![dc_nested]);
|
||||
let allow_dead_code = attr::mk_attr_outer(allow_dead_code_item);
|
||||
|
@ -191,7 +190,7 @@ fn mk_reexport_mod(cx: &mut TestCtxt<'_>,
|
|||
tests: Vec<Ident>,
|
||||
tested_submods: Vec<(Ident, Ident)>)
|
||||
-> (P<ast::Item>, Ident) {
|
||||
let super_ = Ident::with_empty_ctxt(kw::Super);
|
||||
let super_ = Ident::with_dummy_span(kw::Super);
|
||||
|
||||
let items = tests.into_iter().map(|r| {
|
||||
cx.ext_cx.item_use_simple(DUMMY_SP, dummy_spanned(ast::VisibilityKind::Public),
|
||||
|
@ -269,12 +268,12 @@ fn mk_main(cx: &mut TestCtxt<'_>) -> P<ast::Item> {
|
|||
// #![main]
|
||||
// test::test_main_static(&[..tests]);
|
||||
// }
|
||||
let sp = DUMMY_SP.fresh_expansion(ExpnId::root(), ExpnInfo::allow_unstable(
|
||||
let sp = DUMMY_SP.fresh_expansion(ExpnData::allow_unstable(
|
||||
ExpnKind::Macro(MacroKind::Attr, sym::test_case), DUMMY_SP, cx.ext_cx.parse_sess.edition,
|
||||
[sym::main, sym::test, sym::rustc_attrs][..].into(),
|
||||
));
|
||||
let ecx = &cx.ext_cx;
|
||||
let test_id = Ident::with_empty_ctxt(sym::test);
|
||||
let test_id = Ident::with_dummy_span(sym::test);
|
||||
|
||||
// test::test_main_static(...)
|
||||
let mut test_runner = cx.test_runner.clone().unwrap_or(
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
use crate::symbol::{Symbol, sym};
|
||||
use std::fmt;
|
||||
use std::str::FromStr;
|
||||
use crate::GLOBALS;
|
||||
|
||||
/// The edition of the compiler (RFC 2052)
|
||||
#[derive(Clone, Copy, Hash, PartialEq, PartialOrd, Debug, RustcEncodable, RustcDecodable, Eq)]
|
||||
|
@ -39,10 +38,6 @@ impl fmt::Display for Edition {
|
|||
}
|
||||
|
||||
impl Edition {
|
||||
pub fn from_session() -> Edition {
|
||||
GLOBALS.with(|globals| globals.edition)
|
||||
}
|
||||
|
||||
pub fn lint_name(&self) -> &'static str {
|
||||
match *self {
|
||||
Edition::Edition2015 => "rust_2015_compatibility",
|
||||
|
|
|
@ -13,8 +13,8 @@
|
|||
//
|
||||
// This explains why `HygieneData`, `SyntaxContext` and `ExpnId` have interfaces
|
||||
// with a certain amount of redundancy in them. For example,
|
||||
// `SyntaxContext::outer_expn_info` combines `SyntaxContext::outer` and
|
||||
// `ExpnId::expn_info` so that two `HygieneData` accesses can be performed within
|
||||
// `SyntaxContext::outer_expn_data` combines `SyntaxContext::outer` and
|
||||
// `ExpnId::expn_data` so that two `HygieneData` accesses can be performed within
|
||||
// a single `HygieneData::with` call.
|
||||
//
|
||||
// It also explains why many functions appear in `HygieneData` and again in
|
||||
|
@ -56,16 +56,6 @@ struct SyntaxContextData {
|
|||
#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
|
||||
pub struct ExpnId(u32);
|
||||
|
||||
// FIXME: Find a way to merge this with `ExpnInfo`.
|
||||
#[derive(Debug)]
|
||||
struct InternalExpnData {
|
||||
parent: ExpnId,
|
||||
/// Each expansion should have an associated expansion info, but sometimes there's a delay
|
||||
/// between creation of an expansion ID and obtaining its info (e.g. macros are collected
|
||||
/// first and then resolved later), so we use an `Option` here.
|
||||
expn_info: Option<ExpnInfo>,
|
||||
}
|
||||
|
||||
/// A property of a macro expansion that determines how identifiers
|
||||
/// produced by that expansion are resolved.
|
||||
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Hash, Debug, RustcEncodable, RustcDecodable)]
|
||||
|
@ -86,8 +76,8 @@ pub enum Transparency {
|
|||
}
|
||||
|
||||
impl ExpnId {
|
||||
pub fn fresh(parent: ExpnId, expn_info: Option<ExpnInfo>) -> Self {
|
||||
HygieneData::with(|data| data.fresh_expn(parent, expn_info))
|
||||
pub fn fresh(expn_data: Option<ExpnData>) -> Self {
|
||||
HygieneData::with(|data| data.fresh_expn(expn_data))
|
||||
}
|
||||
|
||||
/// The ID of the theoretical expansion that generates freshly parsed, unexpanded AST.
|
||||
|
@ -107,21 +97,16 @@ impl ExpnId {
|
|||
}
|
||||
|
||||
#[inline]
|
||||
pub fn parent(self) -> ExpnId {
|
||||
HygieneData::with(|data| data.parent_expn(self))
|
||||
pub fn expn_data(self) -> ExpnData {
|
||||
HygieneData::with(|data| data.expn_data(self).clone())
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn expn_info(self) -> Option<ExpnInfo> {
|
||||
HygieneData::with(|data| data.expn_info(self).cloned())
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn set_expn_info(self, info: ExpnInfo) {
|
||||
pub fn set_expn_data(self, expn_data: ExpnData) {
|
||||
HygieneData::with(|data| {
|
||||
let old_info = &mut data.expn_data[self.0 as usize].expn_info;
|
||||
assert!(old_info.is_none(), "expansion info is reset for an expansion ID");
|
||||
*old_info = Some(info);
|
||||
let old_expn_data = &mut data.expn_data[self.0 as usize];
|
||||
assert!(old_expn_data.is_none(), "expansion data is reset for an expansion ID");
|
||||
*old_expn_data = Some(expn_data);
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -139,12 +124,9 @@ impl ExpnId {
|
|||
#[inline]
|
||||
pub fn looks_like_proc_macro_derive(self) -> bool {
|
||||
HygieneData::with(|data| {
|
||||
if data.default_transparency(self) == Transparency::Opaque {
|
||||
if let Some(expn_info) = data.expn_info(self) {
|
||||
if let ExpnKind::Macro(MacroKind::Derive, _) = expn_info.kind {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
let expn_data = data.expn_data(self);
|
||||
if let ExpnKind::Macro(MacroKind::Derive, _) = expn_data.kind {
|
||||
return expn_data.default_transparency == Transparency::Opaque;
|
||||
}
|
||||
false
|
||||
})
|
||||
|
@ -153,7 +135,10 @@ impl ExpnId {
|
|||
|
||||
#[derive(Debug)]
|
||||
crate struct HygieneData {
|
||||
expn_data: Vec<InternalExpnData>,
|
||||
/// Each expansion should have an associated expansion data, but sometimes there's a delay
|
||||
/// between creation of an expansion ID and obtaining its data (e.g. macros are collected
|
||||
/// first and then resolved later), so we use an `Option` here.
|
||||
expn_data: Vec<Option<ExpnData>>,
|
||||
syntax_context_data: Vec<SyntaxContextData>,
|
||||
syntax_context_map: FxHashMap<(SyntaxContext, ExpnId, Transparency), SyntaxContext>,
|
||||
}
|
||||
|
@ -161,10 +146,7 @@ crate struct HygieneData {
|
|||
impl HygieneData {
|
||||
crate fn new(edition: Edition) -> Self {
|
||||
HygieneData {
|
||||
expn_data: vec![InternalExpnData {
|
||||
parent: ExpnId::root(),
|
||||
expn_info: Some(ExpnInfo::default(ExpnKind::Root, DUMMY_SP, edition)),
|
||||
}],
|
||||
expn_data: vec![Some(ExpnData::default(ExpnKind::Root, DUMMY_SP, edition))],
|
||||
syntax_context_data: vec![SyntaxContextData {
|
||||
outer_expn: ExpnId::root(),
|
||||
outer_transparency: Transparency::Opaque,
|
||||
|
@ -181,25 +163,14 @@ impl HygieneData {
|
|||
GLOBALS.with(|globals| f(&mut *globals.hygiene_data.borrow_mut()))
|
||||
}
|
||||
|
||||
fn fresh_expn(&mut self, parent: ExpnId, expn_info: Option<ExpnInfo>) -> ExpnId {
|
||||
self.expn_data.push(InternalExpnData { parent, expn_info });
|
||||
fn fresh_expn(&mut self, expn_data: Option<ExpnData>) -> ExpnId {
|
||||
self.expn_data.push(expn_data);
|
||||
ExpnId(self.expn_data.len() as u32 - 1)
|
||||
}
|
||||
|
||||
fn parent_expn(&self, expn_id: ExpnId) -> ExpnId {
|
||||
self.expn_data[expn_id.0 as usize].parent
|
||||
}
|
||||
|
||||
fn expn_info(&self, expn_id: ExpnId) -> Option<&ExpnInfo> {
|
||||
if expn_id != ExpnId::root() {
|
||||
Some(self.expn_data[expn_id.0 as usize].expn_info.as_ref()
|
||||
.expect("no expansion info for an expansion ID"))
|
||||
} else {
|
||||
// FIXME: Some code relies on `expn_info().is_none()` meaning "no expansion".
|
||||
// Introduce a method for checking for "no expansion" instead and always return
|
||||
// `ExpnInfo` from this function instead of the `Option`.
|
||||
None
|
||||
}
|
||||
fn expn_data(&self, expn_id: ExpnId) -> &ExpnData {
|
||||
self.expn_data[expn_id.0 as usize].as_ref()
|
||||
.expect("no expansion data for an expansion ID")
|
||||
}
|
||||
|
||||
fn is_descendant_of(&self, mut expn_id: ExpnId, ancestor: ExpnId) -> bool {
|
||||
|
@ -207,17 +178,11 @@ impl HygieneData {
|
|||
if expn_id == ExpnId::root() {
|
||||
return false;
|
||||
}
|
||||
expn_id = self.parent_expn(expn_id);
|
||||
expn_id = self.expn_data(expn_id).parent;
|
||||
}
|
||||
true
|
||||
}
|
||||
|
||||
fn default_transparency(&self, expn_id: ExpnId) -> Transparency {
|
||||
self.expn_info(expn_id).map_or(
|
||||
Transparency::SemiTransparent, |einfo| einfo.default_transparency
|
||||
)
|
||||
}
|
||||
|
||||
fn modern(&self, ctxt: SyntaxContext) -> SyntaxContext {
|
||||
self.syntax_context_data[ctxt.0 as usize].opaque
|
||||
}
|
||||
|
@ -246,7 +211,7 @@ impl HygieneData {
|
|||
|
||||
fn marks(&self, mut ctxt: SyntaxContext) -> Vec<(ExpnId, Transparency)> {
|
||||
let mut marks = Vec::new();
|
||||
while ctxt != SyntaxContext::empty() {
|
||||
while ctxt != SyntaxContext::root() {
|
||||
marks.push((self.outer_expn(ctxt), self.outer_transparency(ctxt)));
|
||||
ctxt = self.parent_ctxt(ctxt);
|
||||
}
|
||||
|
@ -255,12 +220,8 @@ impl HygieneData {
|
|||
}
|
||||
|
||||
fn walk_chain(&self, mut span: Span, to: SyntaxContext) -> Span {
|
||||
while span.ctxt() != crate::NO_EXPANSION && span.ctxt() != to {
|
||||
if let Some(info) = self.expn_info(self.outer_expn(span.ctxt())) {
|
||||
span = info.call_site;
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
while span.from_expansion() && span.ctxt() != to {
|
||||
span = self.expn_data(self.outer_expn(span.ctxt())).call_site;
|
||||
}
|
||||
span
|
||||
}
|
||||
|
@ -275,7 +236,9 @@ impl HygieneData {
|
|||
|
||||
fn apply_mark(&mut self, ctxt: SyntaxContext, expn_id: ExpnId) -> SyntaxContext {
|
||||
assert_ne!(expn_id, ExpnId::root());
|
||||
self.apply_mark_with_transparency(ctxt, expn_id, self.default_transparency(expn_id))
|
||||
self.apply_mark_with_transparency(
|
||||
ctxt, expn_id, self.expn_data(expn_id).default_transparency
|
||||
)
|
||||
}
|
||||
|
||||
fn apply_mark_with_transparency(&mut self, ctxt: SyntaxContext, expn_id: ExpnId,
|
||||
|
@ -285,15 +248,14 @@ impl HygieneData {
|
|||
return self.apply_mark_internal(ctxt, expn_id, transparency);
|
||||
}
|
||||
|
||||
let call_site_ctxt =
|
||||
self.expn_info(expn_id).map_or(SyntaxContext::empty(), |info| info.call_site.ctxt());
|
||||
let call_site_ctxt = self.expn_data(expn_id).call_site.ctxt();
|
||||
let mut call_site_ctxt = if transparency == Transparency::SemiTransparent {
|
||||
self.modern(call_site_ctxt)
|
||||
} else {
|
||||
self.modern_and_legacy(call_site_ctxt)
|
||||
};
|
||||
|
||||
if call_site_ctxt == SyntaxContext::empty() {
|
||||
if call_site_ctxt == SyntaxContext::root() {
|
||||
return self.apply_mark_internal(ctxt, expn_id, transparency);
|
||||
}
|
||||
|
||||
|
@ -400,7 +362,7 @@ pub fn update_dollar_crate_names(mut get_name: impl FnMut(SyntaxContext) -> Symb
|
|||
|
||||
impl SyntaxContext {
|
||||
#[inline]
|
||||
pub const fn empty() -> Self {
|
||||
pub const fn root() -> Self {
|
||||
SyntaxContext(0)
|
||||
}
|
||||
|
||||
|
@ -578,20 +540,20 @@ impl SyntaxContext {
|
|||
HygieneData::with(|data| data.outer_expn(self))
|
||||
}
|
||||
|
||||
/// `ctxt.outer_expn_info()` is equivalent to but faster than
|
||||
/// `ctxt.outer_expn().expn_info()`.
|
||||
/// `ctxt.outer_expn_data()` is equivalent to but faster than
|
||||
/// `ctxt.outer_expn().expn_data()`.
|
||||
#[inline]
|
||||
pub fn outer_expn_info(self) -> Option<ExpnInfo> {
|
||||
HygieneData::with(|data| data.expn_info(data.outer_expn(self)).cloned())
|
||||
pub fn outer_expn_data(self) -> ExpnData {
|
||||
HygieneData::with(|data| data.expn_data(data.outer_expn(self)).clone())
|
||||
}
|
||||
|
||||
/// `ctxt.outer_expn_with_info()` is equivalent to but faster than
|
||||
/// `{ let outer = ctxt.outer_expn(); (outer, outer.expn_info()) }`.
|
||||
/// `ctxt.outer_expn_with_data()` is equivalent to but faster than
|
||||
/// `{ let outer = ctxt.outer_expn(); (outer, outer.expn_data()) }`.
|
||||
#[inline]
|
||||
pub fn outer_expn_with_info(self) -> (ExpnId, Option<ExpnInfo>) {
|
||||
pub fn outer_expn_with_data(self) -> (ExpnId, ExpnData) {
|
||||
HygieneData::with(|data| {
|
||||
let outer = data.outer_expn(self);
|
||||
(outer, data.expn_info(outer).cloned())
|
||||
(outer, data.expn_data(outer).clone())
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -612,10 +574,10 @@ impl Span {
|
|||
/// other compiler-generated code to set per-span properties like allowed unstable features.
|
||||
/// The returned span belongs to the created expansion and has the new properties,
|
||||
/// but its location is inherited from the current span.
|
||||
pub fn fresh_expansion(self, parent: ExpnId, expn_info: ExpnInfo) -> Span {
|
||||
pub fn fresh_expansion(self, expn_data: ExpnData) -> Span {
|
||||
HygieneData::with(|data| {
|
||||
let expn_id = data.fresh_expn(parent, Some(expn_info));
|
||||
self.with_ctxt(data.apply_mark(SyntaxContext::empty(), expn_id))
|
||||
let expn_id = data.fresh_expn(Some(expn_data));
|
||||
self.with_ctxt(data.apply_mark(SyntaxContext::root(), expn_id))
|
||||
})
|
||||
}
|
||||
}
|
||||
|
@ -623,8 +585,12 @@ impl Span {
|
|||
/// A subset of properties from both macro definition and macro call available through global data.
|
||||
/// Avoid using this if you have access to the original definition or call structures.
|
||||
#[derive(Clone, Debug, RustcEncodable, RustcDecodable)]
|
||||
pub struct ExpnInfo {
|
||||
pub struct ExpnData {
|
||||
// --- The part unique to each expansion.
|
||||
/// The kind of this expansion - macro or compiler desugaring.
|
||||
pub kind: ExpnKind,
|
||||
/// The expansion that produced this expansion.
|
||||
pub parent: ExpnId,
|
||||
/// The location of the actual macro invocation or syntax sugar , e.g.
|
||||
/// `let x = foo!();` or `if let Some(y) = x {}`
|
||||
///
|
||||
|
@ -632,18 +598,18 @@ pub struct ExpnInfo {
|
|||
/// `foo!()` invoked `bar!()` internally, and there was an
|
||||
/// expression inside `bar!`; the call_site of the expression in
|
||||
/// the expansion would point to the `bar!` invocation; that
|
||||
/// call_site span would have its own ExpnInfo, with the call_site
|
||||
/// call_site span would have its own ExpnData, with the call_site
|
||||
/// pointing to the `foo!` invocation.
|
||||
pub call_site: Span,
|
||||
/// The kind of this expansion - macro or compiler desugaring.
|
||||
pub kind: ExpnKind,
|
||||
|
||||
// --- The part specific to the macro/desugaring definition.
|
||||
// --- FIXME: Share it between expansions with the same definition.
|
||||
// --- It may be reasonable to share this part between expansions with the same definition,
|
||||
// --- but such sharing is known to bring some minor inconveniences without also bringing
|
||||
// --- noticeable perf improvements (PR #62898).
|
||||
/// The span of the macro definition (possibly dummy).
|
||||
/// This span serves only informational purpose and is not used for resolution.
|
||||
pub def_site: Span,
|
||||
/// Transparency used by `apply_mark` for the expansion with this expansion info by default.
|
||||
/// Transparency used by `apply_mark` for the expansion with this expansion data by default.
|
||||
pub default_transparency: Transparency,
|
||||
/// List of #[unstable]/feature-gated features that the macro is allowed to use
|
||||
/// internally without forcing the whole crate to opt-in
|
||||
|
@ -659,12 +625,13 @@ pub struct ExpnInfo {
|
|||
pub edition: Edition,
|
||||
}
|
||||
|
||||
impl ExpnInfo {
|
||||
/// Constructs an expansion info with default properties.
|
||||
pub fn default(kind: ExpnKind, call_site: Span, edition: Edition) -> ExpnInfo {
|
||||
ExpnInfo {
|
||||
call_site,
|
||||
impl ExpnData {
|
||||
/// Constructs expansion data with default properties.
|
||||
pub fn default(kind: ExpnKind, call_site: Span, edition: Edition) -> ExpnData {
|
||||
ExpnData {
|
||||
kind,
|
||||
parent: ExpnId::root(),
|
||||
call_site,
|
||||
def_site: DUMMY_SP,
|
||||
default_transparency: Transparency::SemiTransparent,
|
||||
allow_internal_unstable: None,
|
||||
|
@ -675,12 +642,17 @@ impl ExpnInfo {
|
|||
}
|
||||
|
||||
pub fn allow_unstable(kind: ExpnKind, call_site: Span, edition: Edition,
|
||||
allow_internal_unstable: Lrc<[Symbol]>) -> ExpnInfo {
|
||||
ExpnInfo {
|
||||
allow_internal_unstable: Lrc<[Symbol]>) -> ExpnData {
|
||||
ExpnData {
|
||||
allow_internal_unstable: Some(allow_internal_unstable),
|
||||
..ExpnInfo::default(kind, call_site, edition)
|
||||
..ExpnData::default(kind, call_site, edition)
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn is_root(&self) -> bool {
|
||||
if let ExpnKind::Root = self.kind { true } else { false }
|
||||
}
|
||||
}
|
||||
|
||||
/// Expansion kind.
|
||||
|
@ -767,6 +739,18 @@ impl DesugaringKind {
|
|||
}
|
||||
}
|
||||
|
||||
impl Encodable for ExpnId {
|
||||
fn encode<E: Encoder>(&self, _: &mut E) -> Result<(), E::Error> {
|
||||
Ok(()) // FIXME(jseyfried) intercrate hygiene
|
||||
}
|
||||
}
|
||||
|
||||
impl Decodable for ExpnId {
|
||||
fn decode<D: Decoder>(_: &mut D) -> Result<Self, D::Error> {
|
||||
Ok(ExpnId::root()) // FIXME(jseyfried) intercrate hygiene
|
||||
}
|
||||
}
|
||||
|
||||
impl Encodable for SyntaxContext {
|
||||
fn encode<E: Encoder>(&self, _: &mut E) -> Result<(), E::Error> {
|
||||
Ok(()) // FIXME(jseyfried) intercrate hygiene
|
||||
|
@ -774,7 +758,7 @@ impl Encodable for SyntaxContext {
|
|||
}
|
||||
|
||||
impl Decodable for SyntaxContext {
|
||||
fn decode<D: Decoder>(_: &mut D) -> Result<SyntaxContext, D::Error> {
|
||||
Ok(SyntaxContext::empty()) // FIXME(jseyfried) intercrate hygiene
|
||||
fn decode<D: Decoder>(_: &mut D) -> Result<Self, D::Error> {
|
||||
Ok(SyntaxContext::root()) // FIXME(jseyfried) intercrate hygiene
|
||||
}
|
||||
}
|
||||
|
|
|
@ -21,7 +21,7 @@ use rustc_serialize::{Encodable, Decodable, Encoder, Decoder};
|
|||
pub mod edition;
|
||||
use edition::Edition;
|
||||
pub mod hygiene;
|
||||
pub use hygiene::{ExpnId, SyntaxContext, ExpnInfo, ExpnKind, MacroKind, DesugaringKind};
|
||||
pub use hygiene::{ExpnId, SyntaxContext, ExpnData, ExpnKind, MacroKind, DesugaringKind};
|
||||
|
||||
mod span_encoding;
|
||||
pub use span_encoding::{Span, DUMMY_SP};
|
||||
|
@ -49,7 +49,6 @@ pub struct Globals {
|
|||
symbol_interner: Lock<symbol::Interner>,
|
||||
span_interner: Lock<span_encoding::SpanInterner>,
|
||||
hygiene_data: Lock<hygiene::HygieneData>,
|
||||
edition: Edition,
|
||||
}
|
||||
|
||||
impl Globals {
|
||||
|
@ -58,7 +57,6 @@ impl Globals {
|
|||
symbol_interner: Lock::new(symbol::Interner::fresh()),
|
||||
span_interner: Lock::new(span_encoding::SpanInterner::default()),
|
||||
hygiene_data: Lock::new(hygiene::HygieneData::new(edition)),
|
||||
edition,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -288,6 +286,17 @@ impl Span {
|
|||
span.lo.0 == 0 && span.hi.0 == 0
|
||||
}
|
||||
|
||||
/// Returns `true` if this span comes from a macro or desugaring.
|
||||
#[inline]
|
||||
pub fn from_expansion(self) -> bool {
|
||||
self.ctxt() != SyntaxContext::root()
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn with_root_ctxt(lo: BytePos, hi: BytePos) -> Span {
|
||||
Span::new(lo, hi, SyntaxContext::root())
|
||||
}
|
||||
|
||||
/// Returns a new span representing an empty span at the beginning of this span
|
||||
#[inline]
|
||||
pub fn shrink_to_lo(self) -> Span {
|
||||
|
@ -344,20 +353,20 @@ impl Span {
|
|||
/// Returns the source span -- this is either the supplied span, or the span for
|
||||
/// the macro callsite that expanded to it.
|
||||
pub fn source_callsite(self) -> Span {
|
||||
self.ctxt().outer_expn_info().map(|info| info.call_site.source_callsite()).unwrap_or(self)
|
||||
let expn_data = self.ctxt().outer_expn_data();
|
||||
if !expn_data.is_root() { expn_data.call_site.source_callsite() } else { self }
|
||||
}
|
||||
|
||||
/// The `Span` for the tokens in the previous macro expansion from which `self` was generated,
|
||||
/// if any.
|
||||
pub fn parent(self) -> Option<Span> {
|
||||
self.ctxt().outer_expn_info().map(|i| i.call_site)
|
||||
let expn_data = self.ctxt().outer_expn_data();
|
||||
if !expn_data.is_root() { Some(expn_data.call_site) } else { None }
|
||||
}
|
||||
|
||||
/// Edition of the crate from which this span came.
|
||||
pub fn edition(self) -> edition::Edition {
|
||||
self.ctxt().outer_expn_info().map_or_else(|| {
|
||||
Edition::from_session()
|
||||
}, |einfo| einfo.edition)
|
||||
self.ctxt().outer_expn_data().edition
|
||||
}
|
||||
|
||||
#[inline]
|
||||
|
@ -373,52 +382,42 @@ impl Span {
|
|||
/// Returns the source callee.
|
||||
///
|
||||
/// Returns `None` if the supplied span has no expansion trace,
|
||||
/// else returns the `ExpnInfo` for the macro definition
|
||||
/// else returns the `ExpnData` for the macro definition
|
||||
/// corresponding to the source callsite.
|
||||
pub fn source_callee(self) -> Option<ExpnInfo> {
|
||||
fn source_callee(info: ExpnInfo) -> ExpnInfo {
|
||||
match info.call_site.ctxt().outer_expn_info() {
|
||||
Some(info) => source_callee(info),
|
||||
None => info,
|
||||
}
|
||||
pub fn source_callee(self) -> Option<ExpnData> {
|
||||
fn source_callee(expn_data: ExpnData) -> ExpnData {
|
||||
let next_expn_data = expn_data.call_site.ctxt().outer_expn_data();
|
||||
if !next_expn_data.is_root() { source_callee(next_expn_data) } else { expn_data }
|
||||
}
|
||||
self.ctxt().outer_expn_info().map(source_callee)
|
||||
let expn_data = self.ctxt().outer_expn_data();
|
||||
if !expn_data.is_root() { Some(source_callee(expn_data)) } else { None }
|
||||
}
|
||||
|
||||
/// Checks if a span is "internal" to a macro in which `#[unstable]`
|
||||
/// items can be used (that is, a macro marked with
|
||||
/// `#[allow_internal_unstable]`).
|
||||
pub fn allows_unstable(&self, feature: Symbol) -> bool {
|
||||
match self.ctxt().outer_expn_info() {
|
||||
Some(info) => info
|
||||
.allow_internal_unstable
|
||||
.map_or(false, |features| features.iter().any(|&f|
|
||||
f == feature || f == sym::allow_internal_unstable_backcompat_hack
|
||||
)),
|
||||
None => false,
|
||||
}
|
||||
self.ctxt().outer_expn_data().allow_internal_unstable.map_or(false, |features| {
|
||||
features.iter().any(|&f| {
|
||||
f == feature || f == sym::allow_internal_unstable_backcompat_hack
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
/// Checks if this span arises from a compiler desugaring of kind `kind`.
|
||||
pub fn is_desugaring(&self, kind: DesugaringKind) -> bool {
|
||||
match self.ctxt().outer_expn_info() {
|
||||
Some(info) => match info.kind {
|
||||
ExpnKind::Desugaring(k) => k == kind,
|
||||
_ => false,
|
||||
},
|
||||
None => false,
|
||||
match self.ctxt().outer_expn_data().kind {
|
||||
ExpnKind::Desugaring(k) => k == kind,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the compiler desugaring that created this span, or `None`
|
||||
/// if this span is not from a desugaring.
|
||||
pub fn desugaring_kind(&self) -> Option<DesugaringKind> {
|
||||
match self.ctxt().outer_expn_info() {
|
||||
Some(info) => match info.kind {
|
||||
ExpnKind::Desugaring(k) => Some(k),
|
||||
_ => None
|
||||
},
|
||||
None => None
|
||||
match self.ctxt().outer_expn_data().kind {
|
||||
ExpnKind::Desugaring(k) => Some(k),
|
||||
_ => None
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -426,19 +425,20 @@ impl Span {
|
|||
/// can be used without triggering the `unsafe_code` lint
|
||||
// (that is, a macro marked with `#[allow_internal_unsafe]`).
|
||||
pub fn allows_unsafe(&self) -> bool {
|
||||
match self.ctxt().outer_expn_info() {
|
||||
Some(info) => info.allow_internal_unsafe,
|
||||
None => false,
|
||||
}
|
||||
self.ctxt().outer_expn_data().allow_internal_unsafe
|
||||
}
|
||||
|
||||
pub fn macro_backtrace(mut self) -> Vec<MacroBacktrace> {
|
||||
let mut prev_span = DUMMY_SP;
|
||||
let mut result = vec![];
|
||||
while let Some(info) = self.ctxt().outer_expn_info() {
|
||||
loop {
|
||||
let expn_data = self.ctxt().outer_expn_data();
|
||||
if expn_data.is_root() {
|
||||
break;
|
||||
}
|
||||
// Don't print recursive invocations.
|
||||
if !info.call_site.source_equal(&prev_span) {
|
||||
let (pre, post) = match info.kind {
|
||||
if !expn_data.call_site.source_equal(&prev_span) {
|
||||
let (pre, post) = match expn_data.kind {
|
||||
ExpnKind::Root => break,
|
||||
ExpnKind::Desugaring(..) => ("desugaring of ", ""),
|
||||
ExpnKind::Macro(macro_kind, _) => match macro_kind {
|
||||
|
@ -448,14 +448,14 @@ impl Span {
|
|||
}
|
||||
};
|
||||
result.push(MacroBacktrace {
|
||||
call_site: info.call_site,
|
||||
macro_decl_name: format!("{}{}{}", pre, info.kind.descr(), post),
|
||||
def_site_span: info.def_site,
|
||||
call_site: expn_data.call_site,
|
||||
macro_decl_name: format!("{}{}{}", pre, expn_data.kind.descr(), post),
|
||||
def_site_span: expn_data.def_site,
|
||||
});
|
||||
}
|
||||
|
||||
prev_span = self;
|
||||
self = info.call_site;
|
||||
self = expn_data.call_site;
|
||||
}
|
||||
result
|
||||
}
|
||||
|
@ -468,9 +468,9 @@ impl Span {
|
|||
// Return the macro span on its own to avoid weird diagnostic output. It is preferable to
|
||||
// have an incomplete span than a completely nonsensical one.
|
||||
if span_data.ctxt != end_data.ctxt {
|
||||
if span_data.ctxt == SyntaxContext::empty() {
|
||||
if span_data.ctxt == SyntaxContext::root() {
|
||||
return end;
|
||||
} else if end_data.ctxt == SyntaxContext::empty() {
|
||||
} else if end_data.ctxt == SyntaxContext::root() {
|
||||
return self;
|
||||
}
|
||||
// Both spans fall within a macro.
|
||||
|
@ -479,7 +479,7 @@ impl Span {
|
|||
Span::new(
|
||||
cmp::min(span_data.lo, end_data.lo),
|
||||
cmp::max(span_data.hi, end_data.hi),
|
||||
if span_data.ctxt == SyntaxContext::empty() { end_data.ctxt } else { span_data.ctxt },
|
||||
if span_data.ctxt == SyntaxContext::root() { end_data.ctxt } else { span_data.ctxt },
|
||||
)
|
||||
}
|
||||
|
||||
|
@ -490,7 +490,7 @@ impl Span {
|
|||
Span::new(
|
||||
span.hi,
|
||||
end.lo,
|
||||
if end.ctxt == SyntaxContext::empty() { end.ctxt } else { span.ctxt },
|
||||
if end.ctxt == SyntaxContext::root() { end.ctxt } else { span.ctxt },
|
||||
)
|
||||
}
|
||||
|
||||
|
@ -501,7 +501,7 @@ impl Span {
|
|||
Span::new(
|
||||
span.lo,
|
||||
end.lo,
|
||||
if end.ctxt == SyntaxContext::empty() { end.ctxt } else { span.ctxt },
|
||||
if end.ctxt == SyntaxContext::root() { end.ctxt } else { span.ctxt },
|
||||
)
|
||||
}
|
||||
|
||||
|
@ -611,7 +611,7 @@ impl rustc_serialize::UseSpecializedDecodable for Span {
|
|||
d.read_struct("Span", 2, |d| {
|
||||
let lo = d.read_struct_field("lo", 0, Decodable::decode)?;
|
||||
let hi = d.read_struct_field("hi", 1, Decodable::decode)?;
|
||||
Ok(Span::new(lo, hi, NO_EXPANSION))
|
||||
Ok(Span::with_root_ctxt(lo, hi))
|
||||
})
|
||||
}
|
||||
}
|
||||
|
@ -755,8 +755,6 @@ impl From<Vec<Span>> for MultiSpan {
|
|||
}
|
||||
}
|
||||
|
||||
pub const NO_EXPANSION: SyntaxContext = SyntaxContext::empty();
|
||||
|
||||
/// Identifies an offset of a multi-byte character in a `SourceFile`.
|
||||
#[derive(Copy, Clone, RustcEncodable, RustcDecodable, Eq, PartialEq, Debug)]
|
||||
pub struct MultiByteChar {
|
||||
|
|
|
@ -14,7 +14,6 @@ use std::fmt;
|
|||
use std::hash::{Hash, Hasher};
|
||||
use std::str;
|
||||
|
||||
use crate::hygiene::SyntaxContext;
|
||||
use crate::{Span, DUMMY_SP, GLOBALS};
|
||||
|
||||
#[cfg(test)]
|
||||
|
@ -745,25 +744,25 @@ impl Ident {
|
|||
Ident { name, span }
|
||||
}
|
||||
|
||||
/// Constructs a new identifier with an empty syntax context.
|
||||
/// Constructs a new identifier with a dummy span.
|
||||
#[inline]
|
||||
pub const fn with_empty_ctxt(name: Symbol) -> Ident {
|
||||
pub const fn with_dummy_span(name: Symbol) -> Ident {
|
||||
Ident::new(name, DUMMY_SP)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn invalid() -> Ident {
|
||||
Ident::with_empty_ctxt(kw::Invalid)
|
||||
Ident::with_dummy_span(kw::Invalid)
|
||||
}
|
||||
|
||||
/// Maps an interned string to an identifier with an empty syntax context.
|
||||
pub fn from_interned_str(string: InternedString) -> Ident {
|
||||
Ident::with_empty_ctxt(string.as_symbol())
|
||||
Ident::with_dummy_span(string.as_symbol())
|
||||
}
|
||||
|
||||
/// Maps a string to an identifier with an empty span.
|
||||
pub fn from_str(string: &str) -> Ident {
|
||||
Ident::with_empty_ctxt(Symbol::intern(string))
|
||||
Ident::with_dummy_span(Symbol::intern(string))
|
||||
}
|
||||
|
||||
/// Maps a string and a span to an identifier.
|
||||
|
@ -851,7 +850,7 @@ impl fmt::Display for Ident {
|
|||
|
||||
impl Encodable for Ident {
|
||||
fn encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> {
|
||||
if self.span.ctxt().modern() == SyntaxContext::empty() {
|
||||
if !self.span.modern().from_expansion() {
|
||||
s.emit_str(&self.as_str())
|
||||
} else { // FIXME(jseyfried): intercrate hygiene
|
||||
let mut string = "#".to_owned();
|
||||
|
|
|
@ -4,17 +4,17 @@ error: cannot find macro `__build_diagnostic_array!` in this scope
|
|||
LL | __build_diagnostic_array!(DIAGNOSTICS);
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
error: cannot find macro `__register_diagnostic!` in this scope
|
||||
--> $DIR/feature-gate-rustc-diagnostic-macros.rs:4:1
|
||||
|
|
||||
LL | __register_diagnostic!(E0001);
|
||||
| ^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
error: cannot find macro `__diagnostic_used!` in this scope
|
||||
--> $DIR/feature-gate-rustc-diagnostic-macros.rs:8:5
|
||||
|
|
||||
LL | __diagnostic_used!(E0001);
|
||||
| ^^^^^^^^^^^^^^^^^
|
||||
|
||||
error: cannot find macro `__register_diagnostic!` in this scope
|
||||
--> $DIR/feature-gate-rustc-diagnostic-macros.rs:4:1
|
||||
|
|
||||
LL | __register_diagnostic!(E0001);
|
||||
| ^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
error: aborting due to 3 previous errors
|
||||
|
||||
|
|
|
@ -1,3 +1,11 @@
|
|||
error: cannot find macro `panic!` in this scope
|
||||
--> $DIR/no_implicit_prelude.rs:16:9
|
||||
|
|
||||
LL | assert_eq!(0, 0);
|
||||
| ^^^^^^^^^^^^^^^^^
|
||||
|
|
||||
= note: this error originates in a macro outside of the current crate (in Nightly builds, run with -Z external-macro-backtrace for more info)
|
||||
|
||||
error[E0433]: failed to resolve: use of undeclared type or module `Vec`
|
||||
--> $DIR/no_implicit_prelude.rs:11:9
|
||||
|
|
||||
|
@ -7,14 +15,6 @@ LL | fn f() { ::bar::m!(); }
|
|||
LL | Vec::new();
|
||||
| ^^^ use of undeclared type or module `Vec`
|
||||
|
||||
error: cannot find macro `panic!` in this scope
|
||||
--> $DIR/no_implicit_prelude.rs:16:9
|
||||
|
|
||||
LL | assert_eq!(0, 0);
|
||||
| ^^^^^^^^^^^^^^^^^
|
||||
|
|
||||
= note: this error originates in a macro outside of the current crate (in Nightly builds, run with -Z external-macro-backtrace for more info)
|
||||
|
||||
error[E0599]: no method named `clone` found for type `()` in the current scope
|
||||
--> $DIR/no_implicit_prelude.rs:12:12
|
||||
|
|
||||
|
|
|
@ -21,25 +21,6 @@ LL | use inner1::*;
|
|||
| ^^^^^^^^^
|
||||
= help: consider adding an explicit import of `exported` to disambiguate
|
||||
|
||||
error[E0659]: `include` is ambiguous (macro-expanded name vs less macro-expanded name from outer scope during import/macro resolution)
|
||||
--> $DIR/local-modularized-tricky-fail-1.rs:46:1
|
||||
|
|
||||
LL | include!();
|
||||
| ^^^^^^^ ambiguous name
|
||||
|
|
||||
= note: `include` could refer to a macro from prelude
|
||||
note: `include` could also refer to the macro defined here
|
||||
--> $DIR/local-modularized-tricky-fail-1.rs:17:5
|
||||
|
|
||||
LL | / macro_rules! include {
|
||||
LL | | () => ()
|
||||
LL | | }
|
||||
| |_____^
|
||||
...
|
||||
LL | define_include!();
|
||||
| ------------------ in this macro invocation
|
||||
= help: use `crate::include` to refer to this macro unambiguously
|
||||
|
||||
error[E0659]: `panic` is ambiguous (macro-expanded name vs less macro-expanded name from outer scope during import/macro resolution)
|
||||
--> $DIR/local-modularized-tricky-fail-1.rs:35:5
|
||||
|
|
||||
|
@ -59,6 +40,25 @@ LL | define_panic!();
|
|||
| ---------------- in this macro invocation
|
||||
= help: use `crate::panic` to refer to this macro unambiguously
|
||||
|
||||
error[E0659]: `include` is ambiguous (macro-expanded name vs less macro-expanded name from outer scope during import/macro resolution)
|
||||
--> $DIR/local-modularized-tricky-fail-1.rs:46:1
|
||||
|
|
||||
LL | include!();
|
||||
| ^^^^^^^ ambiguous name
|
||||
|
|
||||
= note: `include` could refer to a macro from prelude
|
||||
note: `include` could also refer to the macro defined here
|
||||
--> $DIR/local-modularized-tricky-fail-1.rs:17:5
|
||||
|
|
||||
LL | / macro_rules! include {
|
||||
LL | | () => ()
|
||||
LL | | }
|
||||
| |_____^
|
||||
...
|
||||
LL | define_include!();
|
||||
| ------------------ in this macro invocation
|
||||
= help: use `crate::include` to refer to this macro unambiguously
|
||||
|
||||
error: aborting due to 3 previous errors
|
||||
|
||||
For more information about this error, try `rustc --explain E0659`.
|
||||
|
|
|
@ -13,20 +13,6 @@ LL | use foo::*;
|
|||
= help: consider adding an explicit import of `panic` to disambiguate
|
||||
= help: or use `self::panic` to refer to this macro unambiguously
|
||||
|
||||
error[E0659]: `panic` is ambiguous (macro-expanded name vs less macro-expanded name from outer scope during import/macro resolution)
|
||||
--> $DIR/shadow_builtin_macros.rs:20:14
|
||||
|
|
||||
LL | fn f() { panic!(); }
|
||||
| ^^^^^ ambiguous name
|
||||
|
|
||||
= note: `panic` could refer to a macro from prelude
|
||||
note: `panic` could also refer to the macro imported here
|
||||
--> $DIR/shadow_builtin_macros.rs:19:26
|
||||
|
|
||||
LL | ::two_macros::m!(use foo::panic;);
|
||||
| ^^^^^^^^^^
|
||||
= help: use `self::panic` to refer to this macro unambiguously
|
||||
|
||||
error[E0659]: `panic` is ambiguous (macro-expanded name vs less macro-expanded name from outer scope during import/macro resolution)
|
||||
--> $DIR/shadow_builtin_macros.rs:33:5
|
||||
|
|
||||
|
@ -62,6 +48,20 @@ note: `n` could also refer to the macro imported here
|
|||
LL | #[macro_use(n)]
|
||||
| ^
|
||||
|
||||
error[E0659]: `panic` is ambiguous (macro-expanded name vs less macro-expanded name from outer scope during import/macro resolution)
|
||||
--> $DIR/shadow_builtin_macros.rs:20:14
|
||||
|
|
||||
LL | fn f() { panic!(); }
|
||||
| ^^^^^ ambiguous name
|
||||
|
|
||||
= note: `panic` could refer to a macro from prelude
|
||||
note: `panic` could also refer to the macro imported here
|
||||
--> $DIR/shadow_builtin_macros.rs:19:26
|
||||
|
|
||||
LL | ::two_macros::m!(use foo::panic;);
|
||||
| ^^^^^^^^^^
|
||||
= help: use `self::panic` to refer to this macro unambiguously
|
||||
|
||||
error: aborting due to 4 previous errors
|
||||
|
||||
For more information about this error, try `rustc --explain E0659`.
|
||||
|
|
|
@ -1,9 +1,3 @@
|
|||
error: cannot find attribute macro `marco_use` in this scope
|
||||
--> $DIR/issue-49074.rs:3:3
|
||||
|
|
||||
LL | #[marco_use] // typo
|
||||
| ^^^^^^^^^ help: a built-in attribute with a similar name exists: `macro_use`
|
||||
|
||||
error: cannot find macro `bar!` in this scope
|
||||
--> $DIR/issue-49074.rs:12:4
|
||||
|
|
||||
|
@ -12,5 +6,11 @@ LL | bar!();
|
|||
|
|
||||
= help: have you added the `#[macro_use]` on the module/import?
|
||||
|
||||
error: cannot find attribute macro `marco_use` in this scope
|
||||
--> $DIR/issue-49074.rs:3:3
|
||||
|
|
||||
LL | #[marco_use] // typo
|
||||
| ^^^^^^^^^ help: a built-in attribute with a similar name exists: `macro_use`
|
||||
|
||||
error: aborting due to 2 previous errors
|
||||
|
||||
|
|
|
@ -88,18 +88,6 @@ error: expected derive macro, found macro `crate::my_macro`
|
|||
LL | #[derive(crate::my_macro)]
|
||||
| ^^^^^^^^^^^^^^^ not a derive macro
|
||||
|
||||
error: cannot find attribute macro `my_macro` in this scope
|
||||
--> $DIR/macro-namespace-reserved-2.rs:38:3
|
||||
|
|
||||
LL | #[my_macro]
|
||||
| ^^^^^^^^
|
||||
|
||||
error: cannot find derive macro `my_macro` in this scope
|
||||
--> $DIR/macro-namespace-reserved-2.rs:48:10
|
||||
|
|
||||
LL | #[derive(my_macro)]
|
||||
| ^^^^^^^^
|
||||
|
||||
error: cannot find macro `my_macro_attr!` in this scope
|
||||
--> $DIR/macro-namespace-reserved-2.rs:28:5
|
||||
|
|
||||
|
@ -112,5 +100,17 @@ error: cannot find macro `MyTrait!` in this scope
|
|||
LL | MyTrait!();
|
||||
| ^^^^^^^
|
||||
|
||||
error: cannot find attribute macro `my_macro` in this scope
|
||||
--> $DIR/macro-namespace-reserved-2.rs:38:3
|
||||
|
|
||||
LL | #[my_macro]
|
||||
| ^^^^^^^^
|
||||
|
||||
error: cannot find derive macro `my_macro` in this scope
|
||||
--> $DIR/macro-namespace-reserved-2.rs:48:10
|
||||
|
|
||||
LL | #[derive(my_macro)]
|
||||
| ^^^^^^^^
|
||||
|
||||
error: aborting due to 19 previous errors
|
||||
|
||||
|
|
|
@ -7,12 +7,6 @@ LL | #[rustc_attribute_should_be_reserved]
|
|||
= note: for more information, see https://github.com/rust-lang/rust/issues/29642
|
||||
= help: add `#![feature(rustc_attrs)]` to the crate attributes to enable
|
||||
|
||||
error: cannot find attribute macro `rustc_attribute_should_be_reserved` in this scope
|
||||
--> $DIR/reserved-attr-on-macro.rs:1:3
|
||||
|
|
||||
LL | #[rustc_attribute_should_be_reserved]
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
error: cannot determine resolution for the macro `foo`
|
||||
--> $DIR/reserved-attr-on-macro.rs:10:5
|
||||
|
|
||||
|
@ -21,6 +15,12 @@ LL | foo!();
|
|||
|
|
||||
= note: import resolution is stuck, try simplifying macro imports
|
||||
|
||||
error: cannot find attribute macro `rustc_attribute_should_be_reserved` in this scope
|
||||
--> $DIR/reserved-attr-on-macro.rs:1:3
|
||||
|
|
||||
LL | #[rustc_attribute_should_be_reserved]
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
error: aborting due to 3 previous errors
|
||||
|
||||
For more information about this error, try `rustc --explain E0658`.
|
||||
|
|
Loading…
Add table
Reference in a new issue