Auto merge of #63627 - Centril:rollup-v8i7x5i, r=Centril

Rollup of 7 pull requests

Successful merges:

 - #62593 (Group all ABI tests.)
 - #63173 (Use libunwind from llvm-project submodule for musl targets)
 - #63535 (Continue refactoring resolve and hygiene)
 - #63539 (Suggest Rust 2018 on `<expr>.await` with no such field)
 - #63584 (libcore: more cleanups using `#![feature(associated_type_bounds)]`)
 - #63612 (Do not suggest `try_into` for base types inside of macro expansions)
 - #63615 (Fix typo in DoubleEndedIterator::nth_back doc)

Failed merges:

r? @ghost
This commit is contained in:
bors 2019-08-16 06:53:13 +00:00
commit 5a6d801bf9
138 changed files with 1047 additions and 902 deletions

View file

@ -202,10 +202,6 @@ pub fn check(build: &mut Build) {
panic!("couldn't find libc.a in musl dir: {}", panic!("couldn't find libc.a in musl dir: {}",
root.join("lib").display()); root.join("lib").display());
} }
if fs::metadata(root.join("lib/libunwind.a")).is_err() {
panic!("couldn't find libunwind.a in musl dir: {}",
root.join("lib").display());
}
} }
None => { None => {
panic!("when targeting MUSL either the rust.musl-root \ panic!("when targeting MUSL either the rust.musl-root \

View file

@ -54,29 +54,3 @@ if [ "$REPLACE_CC" = "1" ]; then
ln -s $TARGET-g++ /usr/local/bin/$exec ln -s $TARGET-g++ /usr/local/bin/$exec
done done
fi fi
export CC=$TARGET-gcc
export CXX=$TARGET-g++
LLVM=70
# may have been downloaded in a previous run
if [ ! -d libunwind-release_$LLVM ]; then
curl -L https://github.com/llvm-mirror/llvm/archive/release_$LLVM.tar.gz | tar xzf -
curl -L https://github.com/llvm-mirror/libunwind/archive/release_$LLVM.tar.gz | tar xzf -
fi
# fixme(mati865): Replace it with https://github.com/rust-lang/rust/pull/59089
mkdir libunwind-build
cd libunwind-build
cmake ../libunwind-release_$LLVM \
-DLLVM_PATH=/build/llvm-release_$LLVM \
-DLIBUNWIND_ENABLE_SHARED=0 \
-DCMAKE_C_COMPILER=$CC \
-DCMAKE_CXX_COMPILER=$CXX \
-DCMAKE_C_FLAGS="$CFLAGS" \
-DCMAKE_CXX_FLAGS="$CXXFLAGS"
hide_output make -j$(nproc)
cp lib/libunwind.a $OUTPUT/$TARGET/lib
cd - && rm -rf libunwind-build

View file

@ -20,6 +20,8 @@ exit 1
TAG=$1 TAG=$1
shift shift
# Ancient binutils versions don't understand debug symbols produced by more recent tools.
# Apparently applying `-fPIC` everywhere allows them to link successfully.
export CFLAGS="-fPIC $CFLAGS" export CFLAGS="-fPIC $CFLAGS"
MUSL=musl-1.1.22 MUSL=musl-1.1.22
@ -38,27 +40,3 @@ else
fi fi
hide_output make install hide_output make install
hide_output make clean hide_output make clean
cd ..
LLVM=70
# may have been downloaded in a previous run
if [ ! -d libunwind-release_$LLVM ]; then
curl -L https://github.com/llvm-mirror/llvm/archive/release_$LLVM.tar.gz | tar xzf -
curl -L https://github.com/llvm-mirror/libunwind/archive/release_$LLVM.tar.gz | tar xzf -
fi
mkdir libunwind-build
cd libunwind-build
cmake ../libunwind-release_$LLVM \
-DLLVM_PATH=/build/llvm-release_$LLVM \
-DLIBUNWIND_ENABLE_SHARED=0 \
-DCMAKE_C_COMPILER=$CC \
-DCMAKE_CXX_COMPILER=$CXX \
-DCMAKE_C_FLAGS="$CFLAGS" \
-DCMAKE_CXX_FLAGS="$CXXFLAGS"
hide_output make -j$(nproc)
cp lib/libunwind.a /musl-$TAG/lib
cd ../ && rm -rf libunwind-build

View file

@ -72,8 +72,7 @@ impl<I: Iterator, U: IntoIterator, F> Iterator for FlatMap<I, U, F>
impl<I: DoubleEndedIterator, U, F> DoubleEndedIterator for FlatMap<I, U, F> impl<I: DoubleEndedIterator, U, F> DoubleEndedIterator for FlatMap<I, U, F>
where where
F: FnMut(I::Item) -> U, F: FnMut(I::Item) -> U,
U: IntoIterator, U: IntoIterator<IntoIter: DoubleEndedIterator>,
U::IntoIter: DoubleEndedIterator,
{ {
#[inline] #[inline]
fn next_back(&mut self) -> Option<U::Item> { self.inner.next_back() } fn next_back(&mut self) -> Option<U::Item> { self.inner.next_back() }
@ -107,10 +106,7 @@ impl<I, U, F> FusedIterator for FlatMap<I, U, F>
/// [`Iterator`]: trait.Iterator.html /// [`Iterator`]: trait.Iterator.html
#[must_use = "iterators are lazy and do nothing unless consumed"] #[must_use = "iterators are lazy and do nothing unless consumed"]
#[stable(feature = "iterator_flatten", since = "1.29.0")] #[stable(feature = "iterator_flatten", since = "1.29.0")]
pub struct Flatten<I: Iterator> pub struct Flatten<I: Iterator<Item: IntoIterator>> {
where
I::Item: IntoIterator,
{
inner: FlattenCompat<I, <I::Item as IntoIterator>::IntoIter>, inner: FlattenCompat<I, <I::Item as IntoIterator>::IntoIter>,
} }

View file

@ -69,7 +69,7 @@ pub trait DoubleEndedIterator: Iterator {
/// Returns the `n`th element from the end of the iterator. /// Returns the `n`th element from the end of the iterator.
/// ///
/// This is essentially the reversed version of [`nth`]. Although like most indexing /// This is essentially the reversed version of [`nth`]. Although like most indexing
/// operations, the count starts from zero, so `nth_back(0)` returns the first value fro /// operations, the count starts from zero, so `nth_back(0)` returns the first value from
/// the end, `nth_back(1)` the second, and so on. /// the end, `nth_back(1)` the second, and so on.
/// ///
/// Note that all elements between the end and the returned element will be /// Note that all elements between the end and the returned element will be

View file

@ -67,7 +67,7 @@ use syntax::errors;
use syntax::ext::base::SpecialDerives; use syntax::ext::base::SpecialDerives;
use syntax::ext::hygiene::ExpnId; use syntax::ext::hygiene::ExpnId;
use syntax::print::pprust; use syntax::print::pprust;
use syntax::source_map::{respan, ExpnInfo, ExpnKind, DesugaringKind, Spanned}; use syntax::source_map::{respan, ExpnData, ExpnKind, DesugaringKind, Spanned};
use syntax::symbol::{kw, sym, Symbol}; use syntax::symbol::{kw, sym, Symbol};
use syntax::tokenstream::{TokenStream, TokenTree}; use syntax::tokenstream::{TokenStream, TokenTree};
use syntax::parse::token::{self, Token}; use syntax::parse::token::{self, Token};
@ -704,10 +704,9 @@ impl<'a> LoweringContext<'a> {
span: Span, span: Span,
allow_internal_unstable: Option<Lrc<[Symbol]>>, allow_internal_unstable: Option<Lrc<[Symbol]>>,
) -> Span { ) -> Span {
span.fresh_expansion(ExpnId::root(), ExpnInfo { span.fresh_expansion(ExpnData {
def_site: span,
allow_internal_unstable, allow_internal_unstable,
..ExpnInfo::default(ExpnKind::Desugaring(reason), span, self.sess.edition()) ..ExpnData::default(ExpnKind::Desugaring(reason), span, self.sess.edition())
}) })
} }
@ -1224,7 +1223,7 @@ impl<'a> LoweringContext<'a> {
P(hir::Path { P(hir::Path {
res, res,
segments: hir_vec![hir::PathSegment::from_ident( segments: hir_vec![hir::PathSegment::from_ident(
Ident::with_empty_ctxt(kw::SelfUpper) Ident::with_dummy_span(kw::SelfUpper)
)], )],
span: t.span, span: t.span,
}), }),
@ -1558,7 +1557,7 @@ impl<'a> LoweringContext<'a> {
let (name, kind) = match name { let (name, kind) = match name {
hir::LifetimeName::Underscore => ( hir::LifetimeName::Underscore => (
hir::ParamName::Plain(Ident::with_empty_ctxt(kw::UnderscoreLifetime)), hir::ParamName::Plain(Ident::with_dummy_span(kw::UnderscoreLifetime)),
hir::LifetimeParamKind::Elided, hir::LifetimeParamKind::Elided,
), ),
hir::LifetimeName::Param(param_name) => ( hir::LifetimeName::Param(param_name) => (
@ -2002,7 +2001,7 @@ impl<'a> LoweringContext<'a> {
bindings: hir_vec![ bindings: hir_vec![
hir::TypeBinding { hir::TypeBinding {
hir_id: this.next_id(), hir_id: this.next_id(),
ident: Ident::with_empty_ctxt(FN_OUTPUT_NAME), ident: Ident::with_dummy_span(FN_OUTPUT_NAME),
kind: hir::TypeBindingKind::Equality { kind: hir::TypeBindingKind::Equality {
ty: output ty: output
.as_ref() .as_ref()
@ -2394,7 +2393,7 @@ impl<'a> LoweringContext<'a> {
let future_params = P(hir::GenericArgs { let future_params = P(hir::GenericArgs {
args: hir_vec![], args: hir_vec![],
bindings: hir_vec![hir::TypeBinding { bindings: hir_vec![hir::TypeBinding {
ident: Ident::with_empty_ctxt(FN_OUTPUT_NAME), ident: Ident::with_dummy_span(FN_OUTPUT_NAME),
kind: hir::TypeBindingKind::Equality { kind: hir::TypeBindingKind::Equality {
ty: output_ty, ty: output_ty,
}, },

View file

@ -552,7 +552,7 @@ impl LoweringContext<'_> {
// let mut pinned = <expr>; // let mut pinned = <expr>;
let expr = P(self.lower_expr(expr)); let expr = P(self.lower_expr(expr));
let pinned_ident = Ident::with_empty_ctxt(sym::pinned); let pinned_ident = Ident::with_dummy_span(sym::pinned);
let (pinned_pat, pinned_pat_hid) = self.pat_ident_binding_mode( let (pinned_pat, pinned_pat_hid) = self.pat_ident_binding_mode(
span, span,
pinned_ident, pinned_ident,
@ -593,7 +593,7 @@ impl LoweringContext<'_> {
let loop_node_id = self.sess.next_node_id(); let loop_node_id = self.sess.next_node_id();
let loop_hir_id = self.lower_node_id(loop_node_id); let loop_hir_id = self.lower_node_id(loop_node_id);
let ready_arm = { let ready_arm = {
let x_ident = Ident::with_empty_ctxt(sym::result); let x_ident = Ident::with_dummy_span(sym::result);
let (x_pat, x_pat_hid) = self.pat_ident(span, x_ident); let (x_pat, x_pat_hid) = self.pat_ident(span, x_ident);
let x_expr = P(self.expr_ident(span, x_ident, x_pat_hid)); let x_expr = P(self.expr_ident(span, x_ident, x_pat_hid));
let ready_pat = self.pat_std_enum( let ready_pat = self.pat_std_enum(
@ -1070,9 +1070,9 @@ impl LoweringContext<'_> {
); );
head.span = desugared_span; head.span = desugared_span;
let iter = Ident::with_empty_ctxt(sym::iter); let iter = Ident::with_dummy_span(sym::iter);
let next_ident = Ident::with_empty_ctxt(sym::__next); let next_ident = Ident::with_dummy_span(sym::__next);
let (next_pat, next_pat_hid) = self.pat_ident_binding_mode( let (next_pat, next_pat_hid) = self.pat_ident_binding_mode(
desugared_span, desugared_span,
next_ident, next_ident,
@ -1081,7 +1081,7 @@ impl LoweringContext<'_> {
// `::std::option::Option::Some(val) => __next = val` // `::std::option::Option::Some(val) => __next = val`
let pat_arm = { let pat_arm = {
let val_ident = Ident::with_empty_ctxt(sym::val); let val_ident = Ident::with_dummy_span(sym::val);
let (val_pat, val_pat_hid) = self.pat_ident(pat.span, val_ident); let (val_pat, val_pat_hid) = self.pat_ident(pat.span, val_ident);
let val_expr = P(self.expr_ident(pat.span, val_ident, val_pat_hid)); let val_expr = P(self.expr_ident(pat.span, val_ident, val_pat_hid));
let next_expr = P(self.expr_ident(pat.span, next_ident, next_pat_hid)); let next_expr = P(self.expr_ident(pat.span, next_ident, next_pat_hid));
@ -1247,7 +1247,7 @@ impl LoweringContext<'_> {
// `Ok(val) => #[allow(unreachable_code)] val,` // `Ok(val) => #[allow(unreachable_code)] val,`
let ok_arm = { let ok_arm = {
let val_ident = Ident::with_empty_ctxt(sym::val); let val_ident = Ident::with_dummy_span(sym::val);
let (val_pat, val_pat_nid) = self.pat_ident(span, val_ident); let (val_pat, val_pat_nid) = self.pat_ident(span, val_ident);
let val_expr = P(self.expr_ident_with_attrs( let val_expr = P(self.expr_ident_with_attrs(
span, span,
@ -1263,7 +1263,7 @@ impl LoweringContext<'_> {
// `Err(err) => #[allow(unreachable_code)] // `Err(err) => #[allow(unreachable_code)]
// return Try::from_error(From::from(err)),` // return Try::from_error(From::from(err)),`
let err_arm = { let err_arm = {
let err_ident = Ident::with_empty_ctxt(sym::err); let err_ident = Ident::with_dummy_span(sym::err);
let (err_local, err_local_nid) = self.pat_ident(try_span, err_ident); let (err_local, err_local_nid) = self.pat_ident(try_span, err_ident);
let from_expr = { let from_expr = {
let from_path = &[sym::convert, sym::From, sym::from]; let from_path = &[sym::convert, sym::From, sym::from];

View file

@ -202,7 +202,7 @@ impl ParamName {
match *self { match *self {
ParamName::Plain(ident) => ident, ParamName::Plain(ident) => ident,
ParamName::Fresh(_) | ParamName::Fresh(_) |
ParamName::Error => Ident::with_empty_ctxt(kw::UnderscoreLifetime), ParamName::Error => Ident::with_dummy_span(kw::UnderscoreLifetime),
} }
} }
@ -237,8 +237,8 @@ impl LifetimeName {
pub fn ident(&self) -> Ident { pub fn ident(&self) -> Ident {
match *self { match *self {
LifetimeName::Implicit | LifetimeName::Error => Ident::invalid(), LifetimeName::Implicit | LifetimeName::Error => Ident::invalid(),
LifetimeName::Underscore => Ident::with_empty_ctxt(kw::UnderscoreLifetime), LifetimeName::Underscore => Ident::with_dummy_span(kw::UnderscoreLifetime),
LifetimeName::Static => Ident::with_empty_ctxt(kw::StaticLifetime), LifetimeName::Static => Ident::with_dummy_span(kw::StaticLifetime),
LifetimeName::Param(param_name) => param_name.ident(), LifetimeName::Param(param_name) => param_name.ident(),
} }
} }

View file

@ -1457,7 +1457,7 @@ impl<'a> State<'a> {
} }
pub fn print_name(&mut self, name: ast::Name) { pub fn print_name(&mut self, name: ast::Name) {
self.print_ident(ast::Ident::with_empty_ctxt(name)) self.print_ident(ast::Ident::with_dummy_span(name))
} }
pub fn print_for_decl(&mut self, loc: &hir::Local, coll: &hir::Expr) { pub fn print_for_decl(&mut self, loc: &hir::Local, coll: &hir::Expr) {

View file

@ -350,7 +350,7 @@ impl<'a> HashStable<StableHashingContext<'a>> for Span {
let line_col_len = col | line | len; let line_col_len = col | line | len;
std_hash::Hash::hash(&line_col_len, hasher); std_hash::Hash::hash(&line_col_len, hasher);
if span.ctxt == SyntaxContext::empty() { if span.ctxt == SyntaxContext::root() {
TAG_NO_EXPANSION.hash_stable(hcx, hasher); TAG_NO_EXPANSION.hash_stable(hcx, hasher);
} else { } else {
TAG_EXPANSION.hash_stable(hcx, hasher); TAG_EXPANSION.hash_stable(hcx, hasher);
@ -370,7 +370,7 @@ impl<'a> HashStable<StableHashingContext<'a>> for Span {
} }
let mut hasher = StableHasher::new(); let mut hasher = StableHasher::new();
expn_id.expn_info().hash_stable(hcx, &mut hasher); expn_id.expn_data().hash_stable(hcx, &mut hasher);
let sub_hash: Fingerprint = hasher.finish(); let sub_hash: Fingerprint = hasher.finish();
let sub_hash = sub_hash.to_smaller_hash(); let sub_hash = sub_hash.to_smaller_hash();
cache.borrow_mut().insert(expn_id, sub_hash); cache.borrow_mut().insert(expn_id, sub_hash);

View file

@ -397,9 +397,10 @@ impl_stable_hash_for!(enum ::syntax_pos::hygiene::Transparency {
Opaque, Opaque,
}); });
impl_stable_hash_for!(struct ::syntax_pos::hygiene::ExpnInfo { impl_stable_hash_for!(struct ::syntax_pos::hygiene::ExpnData {
call_site,
kind, kind,
parent -> _,
call_site,
def_site, def_site,
default_transparency, default_transparency,
allow_internal_unstable, allow_internal_unstable,

View file

@ -9,7 +9,6 @@ use errors::Applicability;
use rustc_data_structures::fx::FxHashMap; use rustc_data_structures::fx::FxHashMap;
use syntax::ast::{Ident, Item, ItemKind}; use syntax::ast::{Ident, Item, ItemKind};
use syntax::symbol::{sym, Symbol}; use syntax::symbol::{sym, Symbol};
use syntax_pos::ExpnInfo;
declare_tool_lint! { declare_tool_lint! {
pub rustc::DEFAULT_HASH_TYPES, pub rustc::DEFAULT_HASH_TYPES,
@ -108,7 +107,7 @@ impl<'a, 'tcx> LateLintPass<'a, 'tcx> for TyTyKind {
.help("try using `Ty` instead") .help("try using `Ty` instead")
.emit(); .emit();
} else { } else {
if ty.span.ctxt().outer_expn_info().is_some() { if ty.span.from_expansion() {
return; return;
} }
if let Some(t) = is_ty_or_ty_ctxt(cx, ty) { if let Some(t) = is_ty_or_ty_ctxt(cx, ty) {
@ -228,30 +227,20 @@ impl EarlyLintPass for LintPassImpl {
if let ItemKind::Impl(_, _, _, _, Some(lint_pass), _, _) = &item.node { if let ItemKind::Impl(_, _, _, _, Some(lint_pass), _, _) = &item.node {
if let Some(last) = lint_pass.path.segments.last() { if let Some(last) = lint_pass.path.segments.last() {
if last.ident.name == sym::LintPass { if last.ident.name == sym::LintPass {
match &lint_pass.path.span.ctxt().outer_expn_info() { let expn_data = lint_pass.path.span.ctxt().outer_expn_data();
Some(info) if is_lint_pass_expansion(info) => {} let call_site = expn_data.call_site;
_ => { if expn_data.kind.descr() != sym::impl_lint_pass &&
cx.struct_span_lint( call_site.ctxt().outer_expn_data().kind.descr() != sym::declare_lint_pass {
LINT_PASS_IMPL_WITHOUT_MACRO, cx.struct_span_lint(
lint_pass.path.span, LINT_PASS_IMPL_WITHOUT_MACRO,
"implementing `LintPass` by hand", lint_pass.path.span,
) "implementing `LintPass` by hand",
.help("try using `declare_lint_pass!` or `impl_lint_pass!` instead") )
.emit(); .help("try using `declare_lint_pass!` or `impl_lint_pass!` instead")
} .emit();
} }
} }
} }
} }
} }
} }
fn is_lint_pass_expansion(expn_info: &ExpnInfo) -> bool {
if expn_info.kind.descr() == sym::impl_lint_pass {
true
} else if let Some(info) = expn_info.call_site.ctxt().outer_expn_info() {
info.kind.descr() == sym::declare_lint_pass
} else {
false
}
}

View file

@ -885,21 +885,16 @@ pub fn provide(providers: &mut Providers<'_>) {
/// This is used to test whether a lint should not even begin to figure out whether it should /// This is used to test whether a lint should not even begin to figure out whether it should
/// be reported on the current node. /// be reported on the current node.
pub fn in_external_macro(sess: &Session, span: Span) -> bool { pub fn in_external_macro(sess: &Session, span: Span) -> bool {
let info = match span.ctxt().outer_expn_info() { let expn_data = span.ctxt().outer_expn_data();
Some(info) => info, match expn_data.kind {
// no ExpnInfo means this span doesn't come from a macro
None => return false,
};
match info.kind {
ExpnKind::Root | ExpnKind::Desugaring(DesugaringKind::ForLoop) => false, ExpnKind::Root | ExpnKind::Desugaring(DesugaringKind::ForLoop) => false,
ExpnKind::Desugaring(_) => true, // well, it's "external" ExpnKind::Desugaring(_) => true, // well, it's "external"
ExpnKind::Macro(MacroKind::Bang, _) => { ExpnKind::Macro(MacroKind::Bang, _) => {
if info.def_site.is_dummy() { if expn_data.def_site.is_dummy() {
// dummy span for the def_site means it's an external macro // dummy span for the def_site means it's an external macro
return true; return true;
} }
match sess.source_map().span_to_snippet(info.def_site) { match sess.source_map().span_to_snippet(expn_data.def_site) {
Ok(code) => !code.starts_with("macro_rules"), Ok(code) => !code.starts_with("macro_rules"),
// no snippet = external macro or compiler-builtin expansion // no snippet = external macro or compiler-builtin expansion
Err(_) => true, Err(_) => true,
@ -911,10 +906,8 @@ pub fn in_external_macro(sess: &Session, span: Span) -> bool {
/// Returns whether `span` originates in a derive macro's expansion /// Returns whether `span` originates in a derive macro's expansion
pub fn in_derive_expansion(span: Span) -> bool { pub fn in_derive_expansion(span: Span) -> bool {
if let Some(info) = span.ctxt().outer_expn_info() { if let ExpnKind::Macro(MacroKind::Derive, _) = span.ctxt().outer_expn_data().kind {
if let ExpnKind::Macro(MacroKind::Derive, _) = info.kind { return true;
return true;
}
} }
false false
} }

View file

@ -36,7 +36,7 @@ use errors::{Applicability, DiagnosticBuilder};
use std::fmt; use std::fmt;
use syntax::ast; use syntax::ast;
use syntax::symbol::sym; use syntax::symbol::sym;
use syntax_pos::{DUMMY_SP, Span, ExpnInfo, ExpnKind}; use syntax_pos::{DUMMY_SP, Span, ExpnKind};
impl<'a, 'tcx> InferCtxt<'a, 'tcx> { impl<'a, 'tcx> InferCtxt<'a, 'tcx> {
pub fn report_fulfillment_errors(&self, pub fn report_fulfillment_errors(&self,
@ -61,9 +61,9 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> {
// We want to ignore desugarings here: spans are equivalent even // We want to ignore desugarings here: spans are equivalent even
// if one is the result of a desugaring and the other is not. // if one is the result of a desugaring and the other is not.
let mut span = error.obligation.cause.span; let mut span = error.obligation.cause.span;
if let Some(ExpnInfo { kind: ExpnKind::Desugaring(_), def_site, .. }) let expn_data = span.ctxt().outer_expn_data();
= span.ctxt().outer_expn_info() { if let ExpnKind::Desugaring(_) = expn_data.kind {
span = def_site; span = expn_data.call_site;
} }
error_map.entry(span).or_default().push( error_map.entry(span).or_default().push(

View file

@ -1417,7 +1417,7 @@ fn confirm_callable_candidate<'cx, 'tcx>(
projection_ty: ty::ProjectionTy::from_ref_and_name( projection_ty: ty::ProjectionTy::from_ref_and_name(
tcx, tcx,
trait_ref, trait_ref,
Ident::with_empty_ctxt(FN_OUTPUT_NAME), Ident::with_dummy_span(FN_OUTPUT_NAME),
), ),
ty: ret_type ty: ret_type
} }

View file

@ -23,16 +23,16 @@ use std::mem;
use syntax::ast::NodeId; use syntax::ast::NodeId;
use syntax::source_map::{SourceMap, StableSourceFileId}; use syntax::source_map::{SourceMap, StableSourceFileId};
use syntax_pos::{BytePos, Span, DUMMY_SP, SourceFile}; use syntax_pos::{BytePos, Span, DUMMY_SP, SourceFile};
use syntax_pos::hygiene::{ExpnId, SyntaxContext, ExpnInfo}; use syntax_pos::hygiene::{ExpnId, SyntaxContext, ExpnData};
const TAG_FILE_FOOTER: u128 = 0xC0FFEE_C0FFEE_C0FFEE_C0FFEE_C0FFEE; const TAG_FILE_FOOTER: u128 = 0xC0FFEE_C0FFEE_C0FFEE_C0FFEE_C0FFEE;
const TAG_CLEAR_CROSS_CRATE_CLEAR: u8 = 0; const TAG_CLEAR_CROSS_CRATE_CLEAR: u8 = 0;
const TAG_CLEAR_CROSS_CRATE_SET: u8 = 1; const TAG_CLEAR_CROSS_CRATE_SET: u8 = 1;
const TAG_NO_EXPANSION_INFO: u8 = 0; const TAG_NO_EXPN_DATA: u8 = 0;
const TAG_EXPANSION_INFO_SHORTHAND: u8 = 1; const TAG_EXPN_DATA_SHORTHAND: u8 = 1;
const TAG_EXPANSION_INFO_INLINE: u8 = 2; const TAG_EXPN_DATA_INLINE: u8 = 2;
const TAG_VALID_SPAN: u8 = 0; const TAG_VALID_SPAN: u8 = 0;
const TAG_INVALID_SPAN: u8 = 1; const TAG_INVALID_SPAN: u8 = 1;
@ -58,7 +58,7 @@ pub struct OnDiskCache<'sess> {
// These two fields caches that are populated lazily during decoding. // These two fields caches that are populated lazily during decoding.
file_index_to_file: Lock<FxHashMap<SourceFileIndex, Lrc<SourceFile>>>, file_index_to_file: Lock<FxHashMap<SourceFileIndex, Lrc<SourceFile>>>,
synthetic_expansion_infos: Lock<FxHashMap<AbsoluteBytePos, SyntaxContext>>, synthetic_syntax_contexts: Lock<FxHashMap<AbsoluteBytePos, SyntaxContext>>,
// A map from dep-node to the position of the cached query result in // A map from dep-node to the position of the cached query result in
// `serialized_data`. // `serialized_data`.
@ -135,7 +135,7 @@ impl<'sess> OnDiskCache<'sess> {
current_diagnostics: Default::default(), current_diagnostics: Default::default(),
query_result_index: footer.query_result_index.into_iter().collect(), query_result_index: footer.query_result_index.into_iter().collect(),
prev_diagnostics_index: footer.diagnostics_index.into_iter().collect(), prev_diagnostics_index: footer.diagnostics_index.into_iter().collect(),
synthetic_expansion_infos: Default::default(), synthetic_syntax_contexts: Default::default(),
alloc_decoding_state: AllocDecodingState::new(footer.interpret_alloc_index), alloc_decoding_state: AllocDecodingState::new(footer.interpret_alloc_index),
} }
} }
@ -151,7 +151,7 @@ impl<'sess> OnDiskCache<'sess> {
current_diagnostics: Default::default(), current_diagnostics: Default::default(),
query_result_index: Default::default(), query_result_index: Default::default(),
prev_diagnostics_index: Default::default(), prev_diagnostics_index: Default::default(),
synthetic_expansion_infos: Default::default(), synthetic_syntax_contexts: Default::default(),
alloc_decoding_state: AllocDecodingState::new(Vec::new()), alloc_decoding_state: AllocDecodingState::new(Vec::new()),
} }
} }
@ -185,7 +185,7 @@ impl<'sess> OnDiskCache<'sess> {
encoder, encoder,
type_shorthands: Default::default(), type_shorthands: Default::default(),
predicate_shorthands: Default::default(), predicate_shorthands: Default::default(),
expn_info_shorthands: Default::default(), expn_data_shorthands: Default::default(),
interpret_allocs: Default::default(), interpret_allocs: Default::default(),
interpret_allocs_inverse: Vec::new(), interpret_allocs_inverse: Vec::new(),
source_map: CachingSourceMapView::new(tcx.sess.source_map()), source_map: CachingSourceMapView::new(tcx.sess.source_map()),
@ -383,7 +383,7 @@ impl<'sess> OnDiskCache<'sess> {
cnum_map: self.cnum_map.get(), cnum_map: self.cnum_map.get(),
file_index_to_file: &self.file_index_to_file, file_index_to_file: &self.file_index_to_file,
file_index_to_stable_id: &self.file_index_to_stable_id, file_index_to_stable_id: &self.file_index_to_stable_id,
synthetic_expansion_infos: &self.synthetic_expansion_infos, synthetic_syntax_contexts: &self.synthetic_syntax_contexts,
alloc_decoding_session: self.alloc_decoding_state.new_decoding_session(), alloc_decoding_session: self.alloc_decoding_state.new_decoding_session(),
}; };
@ -440,7 +440,7 @@ struct CacheDecoder<'a, 'tcx> {
opaque: opaque::Decoder<'a>, opaque: opaque::Decoder<'a>,
source_map: &'a SourceMap, source_map: &'a SourceMap,
cnum_map: &'a IndexVec<CrateNum, Option<CrateNum>>, cnum_map: &'a IndexVec<CrateNum, Option<CrateNum>>,
synthetic_expansion_infos: &'a Lock<FxHashMap<AbsoluteBytePos, SyntaxContext>>, synthetic_syntax_contexts: &'a Lock<FxHashMap<AbsoluteBytePos, SyntaxContext>>,
file_index_to_file: &'a Lock<FxHashMap<SourceFileIndex, Lrc<SourceFile>>>, file_index_to_file: &'a Lock<FxHashMap<SourceFileIndex, Lrc<SourceFile>>>,
file_index_to_stable_id: &'a FxHashMap<SourceFileIndex, StableSourceFileId>, file_index_to_stable_id: &'a FxHashMap<SourceFileIndex, StableSourceFileId>,
alloc_decoding_session: AllocDecodingSession<'a>, alloc_decoding_session: AllocDecodingSession<'a>,
@ -586,37 +586,37 @@ impl<'a, 'tcx> SpecializedDecoder<Span> for CacheDecoder<'a, 'tcx> {
let lo = file_lo.lines[line_lo - 1] + col_lo; let lo = file_lo.lines[line_lo - 1] + col_lo;
let hi = lo + len; let hi = lo + len;
let expn_info_tag = u8::decode(self)?; let expn_data_tag = u8::decode(self)?;
// FIXME(mw): This method does not restore `InternalExpnData::parent` or // FIXME(mw): This method does not restore `ExpnData::parent` or
// `SyntaxContextData::prev_ctxt` or `SyntaxContextData::opaque`. These things // `SyntaxContextData::prev_ctxt` or `SyntaxContextData::opaque`. These things
// don't seem to be used after HIR lowering, so everything should be fine // don't seem to be used after HIR lowering, so everything should be fine
// as long as incremental compilation does not kick in before that. // as long as incremental compilation does not kick in before that.
let location = || Span::new(lo, hi, SyntaxContext::empty()); let location = || Span::with_root_ctxt(lo, hi);
let recover_from_expn_info = |this: &Self, expn_info, pos| { let recover_from_expn_data = |this: &Self, expn_data, pos| {
let span = location().fresh_expansion(ExpnId::root(), expn_info); let span = location().fresh_expansion(expn_data);
this.synthetic_expansion_infos.borrow_mut().insert(pos, span.ctxt()); this.synthetic_syntax_contexts.borrow_mut().insert(pos, span.ctxt());
span span
}; };
Ok(match expn_info_tag { Ok(match expn_data_tag {
TAG_NO_EXPANSION_INFO => { TAG_NO_EXPN_DATA => {
location() location()
} }
TAG_EXPANSION_INFO_INLINE => { TAG_EXPN_DATA_INLINE => {
let expn_info = Decodable::decode(self)?; let expn_data = Decodable::decode(self)?;
recover_from_expn_info( recover_from_expn_data(
self, expn_info, AbsoluteBytePos::new(self.opaque.position()) self, expn_data, AbsoluteBytePos::new(self.opaque.position())
) )
} }
TAG_EXPANSION_INFO_SHORTHAND => { TAG_EXPN_DATA_SHORTHAND => {
let pos = AbsoluteBytePos::decode(self)?; let pos = AbsoluteBytePos::decode(self)?;
let cached_ctxt = self.synthetic_expansion_infos.borrow().get(&pos).cloned(); let cached_ctxt = self.synthetic_syntax_contexts.borrow().get(&pos).cloned();
if let Some(ctxt) = cached_ctxt { if let Some(ctxt) = cached_ctxt {
Span::new(lo, hi, ctxt) Span::new(lo, hi, ctxt)
} else { } else {
let expn_info = let expn_data =
self.with_position(pos.to_usize(), |this| ExpnInfo::decode(this))?; self.with_position(pos.to_usize(), |this| ExpnData::decode(this))?;
recover_from_expn_info(self, expn_info, pos) recover_from_expn_data(self, expn_data, pos)
} }
} }
_ => { _ => {
@ -725,7 +725,7 @@ struct CacheEncoder<'a, 'tcx, E: ty_codec::TyEncoder> {
encoder: &'a mut E, encoder: &'a mut E,
type_shorthands: FxHashMap<Ty<'tcx>, usize>, type_shorthands: FxHashMap<Ty<'tcx>, usize>,
predicate_shorthands: FxHashMap<ty::Predicate<'tcx>, usize>, predicate_shorthands: FxHashMap<ty::Predicate<'tcx>, usize>,
expn_info_shorthands: FxHashMap<ExpnId, AbsoluteBytePos>, expn_data_shorthands: FxHashMap<ExpnId, AbsoluteBytePos>,
interpret_allocs: FxHashMap<interpret::AllocId, usize>, interpret_allocs: FxHashMap<interpret::AllocId, usize>,
interpret_allocs_inverse: Vec<interpret::AllocId>, interpret_allocs_inverse: Vec<interpret::AllocId>,
source_map: CachingSourceMapView<'tcx>, source_map: CachingSourceMapView<'tcx>,
@ -816,22 +816,18 @@ where
col_lo.encode(self)?; col_lo.encode(self)?;
len.encode(self)?; len.encode(self)?;
if span_data.ctxt == SyntaxContext::empty() { if span_data.ctxt == SyntaxContext::root() {
TAG_NO_EXPANSION_INFO.encode(self) TAG_NO_EXPN_DATA.encode(self)
} else { } else {
let (expn_id, expn_info) = span_data.ctxt.outer_expn_with_info(); let (expn_id, expn_data) = span_data.ctxt.outer_expn_with_data();
if let Some(expn_info) = expn_info { if let Some(pos) = self.expn_data_shorthands.get(&expn_id).cloned() {
if let Some(pos) = self.expn_info_shorthands.get(&expn_id).cloned() { TAG_EXPN_DATA_SHORTHAND.encode(self)?;
TAG_EXPANSION_INFO_SHORTHAND.encode(self)?; pos.encode(self)
pos.encode(self)
} else {
TAG_EXPANSION_INFO_INLINE.encode(self)?;
let pos = AbsoluteBytePos::new(self.position());
self.expn_info_shorthands.insert(expn_id, pos);
expn_info.encode(self)
}
} else { } else {
TAG_NO_EXPANSION_INFO.encode(self) TAG_EXPN_DATA_INLINE.encode(self)?;
let pos = AbsoluteBytePos::new(self.position());
self.expn_data_shorthands.insert(expn_id, pos);
expn_data.encode(self)
} }
} }
} }

View file

@ -1775,10 +1775,7 @@ impl SharedEmitterMain {
} }
} }
Ok(SharedEmitterMessage::InlineAsmError(cookie, msg)) => { Ok(SharedEmitterMessage::InlineAsmError(cookie, msg)) => {
match ExpnId::from_u32(cookie).expn_info() { sess.span_err(ExpnId::from_u32(cookie).expn_data().call_site, &msg)
Some(ei) => sess.span_err(ei.call_site, &msg),
None => sess.err(&msg),
}
} }
Ok(SharedEmitterMessage::AbortIfErrors) => { Ok(SharedEmitterMessage::AbortIfErrors) => {
sess.abort_if_errors(); sess.abort_if_errors();

View file

@ -8,7 +8,7 @@ use crate::base;
use crate::debuginfo::{self, VariableAccess, VariableKind, FunctionDebugContext}; use crate::debuginfo::{self, VariableAccess, VariableKind, FunctionDebugContext};
use crate::traits::*; use crate::traits::*;
use syntax_pos::{DUMMY_SP, NO_EXPANSION, BytePos, Span}; use syntax_pos::{DUMMY_SP, BytePos, Span};
use syntax::symbol::kw; use syntax::symbol::kw;
use std::iter; use std::iter;
@ -120,7 +120,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
// In order to have a good line stepping behavior in debugger, we overwrite debug // In order to have a good line stepping behavior in debugger, we overwrite debug
// locations of macro expansions with that of the outermost expansion site // locations of macro expansions with that of the outermost expansion site
// (unless the crate is being compiled with `-Z debug-macros`). // (unless the crate is being compiled with `-Z debug-macros`).
if source_info.span.ctxt() == NO_EXPANSION || if !source_info.span.from_expansion() ||
self.cx.sess().opts.debugging_opts.debug_macros { self.cx.sess().opts.debugging_opts.debug_macros {
let scope = self.scope_metadata_for_loc(source_info.scope, source_info.span.lo()); let scope = self.scope_metadata_for_loc(source_info.scope, source_info.span.lo());
(scope, source_info.span) (scope, source_info.span)

View file

@ -678,7 +678,7 @@ impl RustcDefaultCalls {
let mut cfgs = sess.parse_sess.config.iter().filter_map(|&(name, ref value)| { let mut cfgs = sess.parse_sess.config.iter().filter_map(|&(name, ref value)| {
let gated_cfg = GatedCfg::gate(&ast::MetaItem { let gated_cfg = GatedCfg::gate(&ast::MetaItem {
path: ast::Path::from_ident(ast::Ident::with_empty_ctxt(name)), path: ast::Path::from_ident(ast::Ident::with_dummy_span(name)),
node: ast::MetaItemKind::Word, node: ast::MetaItemKind::Word,
span: DUMMY_SP, span: DUMMY_SP,
}); });

View file

@ -43,8 +43,7 @@ use syntax_pos::{BytePos,
SourceFile, SourceFile,
FileName, FileName,
MultiSpan, MultiSpan,
Span, Span};
NO_EXPANSION};
/// Indicates the confidence in the correctness of a suggestion. /// Indicates the confidence in the correctness of a suggestion.
/// ///
@ -189,7 +188,7 @@ impl CodeSuggestion {
// Find the bounding span. // Find the bounding span.
let lo = substitution.parts.iter().map(|part| part.span.lo()).min().unwrap(); let lo = substitution.parts.iter().map(|part| part.span.lo()).min().unwrap();
let hi = substitution.parts.iter().map(|part| part.span.hi()).min().unwrap(); let hi = substitution.parts.iter().map(|part| part.span.hi()).min().unwrap();
let bounding_span = Span::new(lo, hi, NO_EXPANSION); let bounding_span = Span::with_root_ctxt(lo, hi);
let lines = cm.span_to_lines(bounding_span).unwrap(); let lines = cm.span_to_lines(bounding_span).unwrap();
assert!(!lines.lines.is_empty()); assert!(!lines.lines.is_empty());

View file

@ -42,7 +42,7 @@ use syntax::source_map::Spanned;
use syntax::edition::Edition; use syntax::edition::Edition;
use syntax::feature_gate::{self, AttributeGate, AttributeType}; use syntax::feature_gate::{self, AttributeGate, AttributeType};
use syntax::feature_gate::{Stability, deprecated_attributes}; use syntax::feature_gate::{Stability, deprecated_attributes};
use syntax_pos::{BytePos, Span, SyntaxContext}; use syntax_pos::{BytePos, Span};
use syntax::symbol::{Symbol, kw, sym}; use syntax::symbol::{Symbol, kw, sym};
use syntax::errors::{Applicability, DiagnosticBuilder}; use syntax::errors::{Applicability, DiagnosticBuilder};
use syntax::print::pprust::expr_to_string; use syntax::print::pprust::expr_to_string;
@ -78,7 +78,7 @@ impl EarlyLintPass for WhileTrue {
if let ast::ExprKind::While(cond, ..) = &e.node { if let ast::ExprKind::While(cond, ..) = &e.node {
if let ast::ExprKind::Lit(ref lit) = pierce_parens(cond).node { if let ast::ExprKind::Lit(ref lit) = pierce_parens(cond).node {
if let ast::LitKind::Bool(true) = lit.node { if let ast::LitKind::Bool(true) = lit.node {
if lit.span.ctxt() == SyntaxContext::empty() { if !lit.span.from_expansion() {
let msg = "denote infinite loops with `loop { ... }`"; let msg = "denote infinite loops with `loop { ... }`";
let condition_span = cx.sess.source_map().def_span(e.span); let condition_span = cx.sess.source_map().def_span(e.span);
cx.struct_span_lint(WHILE_TRUE, condition_span, msg) cx.struct_span_lint(WHILE_TRUE, condition_span, msg)
@ -167,7 +167,7 @@ impl<'a, 'tcx> LateLintPass<'a, 'tcx> for NonShorthandFieldPatterns {
if fieldpat.is_shorthand { if fieldpat.is_shorthand {
continue; continue;
} }
if fieldpat.span.ctxt().outer_expn_info().is_some() { if fieldpat.span.from_expansion() {
// Don't lint if this is a macro expansion: macro authors // Don't lint if this is a macro expansion: macro authors
// shouldn't have to worry about this kind of style issue // shouldn't have to worry about this kind of style issue
// (Issue #49588) // (Issue #49588)
@ -1012,7 +1012,7 @@ impl UnreachablePub {
let mut applicability = Applicability::MachineApplicable; let mut applicability = Applicability::MachineApplicable;
match vis.node { match vis.node {
hir::VisibilityKind::Public if !cx.access_levels.is_reachable(id) => { hir::VisibilityKind::Public if !cx.access_levels.is_reachable(id) => {
if span.ctxt().outer_expn_info().is_some() { if span.from_expansion() {
applicability = Applicability::MaybeIncorrect; applicability = Applicability::MaybeIncorrect;
} }
let def_span = cx.tcx.sess.source_map().def_span(span); let def_span = cx.tcx.sess.source_map().def_span(span);

View file

@ -517,9 +517,8 @@ impl EarlyLintPass for UnusedParens {
// trigger in situations that macro authors shouldn't have to care about, e.g., // trigger in situations that macro authors shouldn't have to care about, e.g.,
// when a parenthesized token tree matched in one macro expansion is matched as // when a parenthesized token tree matched in one macro expansion is matched as
// an expression in another and used as a fn/method argument (Issue #47775) // an expression in another and used as a fn/method argument (Issue #47775)
if e.span.ctxt().outer_expn_info() if e.span.ctxt().outer_expn_data().call_site.from_expansion() {
.map_or(false, |info| info.call_site.ctxt().outer_expn_info().is_some()) { return;
return;
} }
let msg = format!("{} argument", call_kind); let msg = format!("{} argument", call_kind);
for arg in args_to_check { for arg in args_to_check {

View file

@ -35,7 +35,7 @@ use syntax::ext::proc_macro::BangProcMacro;
use syntax::parse::source_file_to_stream; use syntax::parse::source_file_to_stream;
use syntax::parse::parser::emit_unclosed_delims; use syntax::parse::parser::emit_unclosed_delims;
use syntax::symbol::{Symbol, sym}; use syntax::symbol::{Symbol, sym};
use syntax_pos::{Span, NO_EXPANSION, FileName}; use syntax_pos::{Span, FileName};
use rustc_data_structures::bit_set::BitSet; use rustc_data_structures::bit_set::BitSet;
macro_rules! provide { macro_rules! provide {
@ -443,7 +443,7 @@ impl cstore::CStore {
let source_name = FileName::Macros(macro_full_name); let source_name = FileName::Macros(macro_full_name);
let source_file = sess.parse_sess.source_map().new_source_file(source_name, def.body); let source_file = sess.parse_sess.source_map().new_source_file(source_name, def.body);
let local_span = Span::new(source_file.start_pos, source_file.end_pos, NO_EXPANSION); let local_span = Span::with_root_ctxt(source_file.start_pos, source_file.end_pos);
let (body, mut errors) = source_file_to_stream(&sess.parse_sess, source_file, None); let (body, mut errors) = source_file_to_stream(&sess.parse_sess, source_file, None);
emit_unclosed_delims(&mut errors, &sess.diagnostic()); emit_unclosed_delims(&mut errors, &sess.diagnostic());

View file

@ -32,7 +32,7 @@ use syntax::source_map;
use syntax::symbol::{Symbol, sym}; use syntax::symbol::{Symbol, sym};
use syntax::ext::base::{MacroKind, SyntaxExtension}; use syntax::ext::base::{MacroKind, SyntaxExtension};
use syntax::ext::hygiene::ExpnId; use syntax::ext::hygiene::ExpnId;
use syntax_pos::{self, Span, BytePos, Pos, DUMMY_SP, NO_EXPANSION}; use syntax_pos::{self, Span, BytePos, Pos, DUMMY_SP};
use log::debug; use log::debug;
pub struct DecodeContext<'a, 'tcx> { pub struct DecodeContext<'a, 'tcx> {
@ -344,7 +344,7 @@ impl<'a, 'tcx> SpecializedDecoder<Span> for DecodeContext<'a, 'tcx> {
let hi = (hi + source_file.translated_source_file.start_pos) let hi = (hi + source_file.translated_source_file.start_pos)
- source_file.original_start_pos; - source_file.original_start_pos;
Ok(Span::new(lo, hi, NO_EXPANSION)) Ok(Span::with_root_ctxt(lo, hi))
} }
} }
@ -569,7 +569,7 @@ impl<'a, 'tcx> CrateMetadata {
ty::VariantDef::new( ty::VariantDef::new(
tcx, tcx,
Ident::with_empty_ctxt(self.item_name(index)), Ident::with_dummy_span(self.item_name(index)),
variant_did, variant_did,
ctor_did, ctor_did,
data.discr, data.discr,
@ -577,7 +577,7 @@ impl<'a, 'tcx> CrateMetadata {
let f = self.entry(index); let f = self.entry(index);
ty::FieldDef { ty::FieldDef {
did: self.local_def_id(index), did: self.local_def_id(index),
ident: Ident::with_empty_ctxt(self.item_name(index)), ident: Ident::with_dummy_span(self.item_name(index)),
vis: f.visibility.decode(self) vis: f.visibility.decode(self)
} }
}).collect(), }).collect(),
@ -741,7 +741,7 @@ impl<'a, 'tcx> CrateMetadata {
DefKind::Macro(ext.macro_kind()), DefKind::Macro(ext.macro_kind()),
self.local_def_id(DefIndex::from_proc_macro_index(id)), self.local_def_id(DefIndex::from_proc_macro_index(id)),
); );
let ident = Ident::with_empty_ctxt(name); let ident = Ident::with_dummy_span(name);
callback(def::Export { callback(def::Export {
ident: ident, ident: ident,
res: res, res: res,
@ -783,7 +783,7 @@ impl<'a, 'tcx> CrateMetadata {
if let Some(kind) = self.def_kind(child_index) { if let Some(kind) = self.def_kind(child_index) {
callback(def::Export { callback(def::Export {
res: Res::Def(kind, self.local_def_id(child_index)), res: Res::Def(kind, self.local_def_id(child_index)),
ident: Ident::with_empty_ctxt(self.item_name(child_index)), ident: Ident::with_dummy_span(self.item_name(child_index)),
vis: self.get_visibility(child_index), vis: self.get_visibility(child_index),
span: self.entry(child_index).span.decode((self, sess)), span: self.entry(child_index).span.decode((self, sess)),
}); });

View file

@ -1,9 +1,11 @@
//! Reduced graph building. //! After we obtain a fresh AST fragment from a macro, code in this module helps to integrate
//! that fragment into the module structures that are already partially built.
//! //!
//! Here we build the "reduced graph": the graph of the module tree without //! Items from the fragment are placed into modules,
//! any imports resolved. //! unexpanded macros in the fragment are visited and registered.
//! Imports are also considered items and placed into modules here, but not resolved yet.
use crate::macros::{InvocationData, LegacyBinding, LegacyScope}; use crate::macros::{LegacyBinding, LegacyScope};
use crate::resolve_imports::ImportDirective; use crate::resolve_imports::ImportDirective;
use crate::resolve_imports::ImportDirectiveSubclass::{self, GlobImport, SingleImport}; use crate::resolve_imports::ImportDirectiveSubclass::{self, GlobImport, SingleImport};
use crate::{Module, ModuleData, ModuleKind, NameBinding, NameBindingKind, Segment, ToNameBinding}; use crate::{Module, ModuleData, ModuleKind, NameBinding, NameBindingKind, Segment, ToNameBinding};
@ -30,6 +32,7 @@ use syntax::attr;
use syntax::ast::{self, Block, ForeignItem, ForeignItemKind, Item, ItemKind, NodeId}; use syntax::ast::{self, Block, ForeignItem, ForeignItemKind, Item, ItemKind, NodeId};
use syntax::ast::{MetaItemKind, StmtKind, TraitItem, TraitItemKind, Variant}; use syntax::ast::{MetaItemKind, StmtKind, TraitItem, TraitItemKind, Variant};
use syntax::ext::base::{MacroKind, SyntaxExtension}; use syntax::ext::base::{MacroKind, SyntaxExtension};
use syntax::ext::expand::AstFragment;
use syntax::ext::hygiene::ExpnId; use syntax::ext::hygiene::ExpnId;
use syntax::feature_gate::is_builtin_attr; use syntax::feature_gate::is_builtin_attr;
use syntax::parse::token::{self, Token}; use syntax::parse::token::{self, Token};
@ -67,7 +70,7 @@ impl<'a> ToNameBinding<'a> for (Res, ty::Visibility, Span, ExpnId) {
} }
} }
pub(crate) struct IsMacroExport; struct IsMacroExport;
impl<'a> ToNameBinding<'a> for (Res, ty::Visibility, Span, ExpnId, IsMacroExport) { impl<'a> ToNameBinding<'a> for (Res, ty::Visibility, Span, ExpnId, IsMacroExport) {
fn to_name_binding(self, arenas: &'a ResolverArenas<'a>) -> &'a NameBinding<'a> { fn to_name_binding(self, arenas: &'a ResolverArenas<'a>) -> &'a NameBinding<'a> {
@ -84,7 +87,7 @@ impl<'a> ToNameBinding<'a> for (Res, ty::Visibility, Span, ExpnId, IsMacroExport
impl<'a> Resolver<'a> { impl<'a> Resolver<'a> {
/// Defines `name` in namespace `ns` of module `parent` to be `def` if it is not yet defined; /// Defines `name` in namespace `ns` of module `parent` to be `def` if it is not yet defined;
/// otherwise, reports an error. /// otherwise, reports an error.
pub fn define<T>(&mut self, parent: Module<'a>, ident: Ident, ns: Namespace, def: T) crate fn define<T>(&mut self, parent: Module<'a>, ident: Ident, ns: Namespace, def: T)
where T: ToNameBinding<'a>, where T: ToNameBinding<'a>,
{ {
let binding = def.to_name_binding(self.arenas); let binding = def.to_name_binding(self.arenas);
@ -93,7 +96,7 @@ impl<'a> Resolver<'a> {
} }
} }
pub fn get_module(&mut self, def_id: DefId) -> Module<'a> { crate fn get_module(&mut self, def_id: DefId) -> Module<'a> {
if def_id.krate == LOCAL_CRATE { if def_id.krate == LOCAL_CRATE {
return self.module_map[&def_id] return self.module_map[&def_id]
} }
@ -119,7 +122,7 @@ impl<'a> Resolver<'a> {
module module
} }
pub fn macro_def_scope(&mut self, expn_id: ExpnId) -> Module<'a> { crate fn macro_def_scope(&mut self, expn_id: ExpnId) -> Module<'a> {
let def_id = match self.macro_defs.get(&expn_id) { let def_id = match self.macro_defs.get(&expn_id) {
Some(def_id) => *def_id, Some(def_id) => *def_id,
None => return self.graph_root, None => return self.graph_root,
@ -141,7 +144,7 @@ impl<'a> Resolver<'a> {
} }
} }
crate fn get_macro_by_def_id(&mut self, def_id: DefId) -> Option<Lrc<SyntaxExtension>> { fn get_macro_by_def_id(&mut self, def_id: DefId) -> Option<Lrc<SyntaxExtension>> {
if let Some(ext) = self.macro_map.get(&def_id) { if let Some(ext) = self.macro_map.get(&def_id) {
return Some(ext.clone()); return Some(ext.clone());
} }
@ -158,21 +161,29 @@ impl<'a> Resolver<'a> {
/// Ensures that the reduced graph rooted at the given external module /// Ensures that the reduced graph rooted at the given external module
/// is built, building it if it is not. /// is built, building it if it is not.
pub fn populate_module_if_necessary(&mut self, module: Module<'a>) { crate fn populate_module_if_necessary(&mut self, module: Module<'a>) {
if module.populated.get() { return } if module.populated.get() { return }
let def_id = module.def_id().unwrap(); let def_id = module.def_id().unwrap();
for child in self.cstore.item_children_untracked(def_id, self.session) { for child in self.cstore.item_children_untracked(def_id, self.session) {
let child = child.map_id(|_| panic!("unexpected id")); let child = child.map_id(|_| panic!("unexpected id"));
BuildReducedGraphVisitor { parent_scope: self.dummy_parent_scope(), r: self } BuildReducedGraphVisitor { parent_scope: ParentScope::module(module), r: self }
.build_reduced_graph_for_external_crate_res(module, child); .build_reduced_graph_for_external_crate_res(child);
} }
module.populated.set(true) module.populated.set(true)
} }
crate fn build_reduced_graph(
&mut self, fragment: &AstFragment, parent_scope: ParentScope<'a>
) -> LegacyScope<'a> {
let mut visitor = BuildReducedGraphVisitor { r: self, parent_scope };
fragment.visit_with(&mut visitor);
visitor.parent_scope.legacy
}
} }
pub struct BuildReducedGraphVisitor<'a, 'b> { struct BuildReducedGraphVisitor<'a, 'b> {
pub r: &'b mut Resolver<'a>, r: &'b mut Resolver<'a>,
pub parent_scope: ParentScope<'a>, parent_scope: ParentScope<'a>,
} }
impl<'a, 'b> BuildReducedGraphVisitor<'a, 'b> { impl<'a, 'b> BuildReducedGraphVisitor<'a, 'b> {
@ -300,10 +311,9 @@ impl<'a, 'b> BuildReducedGraphVisitor<'a, 'b> {
root_id: NodeId, root_id: NodeId,
vis: ty::Visibility, vis: ty::Visibility,
) { ) {
let parent_scope = &self.parent_scope; let current_module = self.parent_scope.module;
let current_module = parent_scope.module;
let directive = self.r.arenas.alloc_import_directive(ImportDirective { let directive = self.r.arenas.alloc_import_directive(ImportDirective {
parent_scope: parent_scope.clone(), parent_scope: self.parent_scope,
module_path, module_path,
imported_module: Cell::new(None), imported_module: Cell::new(None),
subclass, subclass,
@ -601,7 +611,7 @@ impl<'a, 'b> BuildReducedGraphVisitor<'a, 'b> {
let directive = self.r.arenas.alloc_import_directive(ImportDirective { let directive = self.r.arenas.alloc_import_directive(ImportDirective {
root_id: item.id, root_id: item.id,
id: item.id, id: item.id,
parent_scope: self.parent_scope.clone(), parent_scope: self.parent_scope,
imported_module: Cell::new(Some(ModuleOrUniformRoot::Module(module))), imported_module: Cell::new(Some(ModuleOrUniformRoot::Module(module))),
subclass: ImportDirectiveSubclass::ExternCrate { subclass: ImportDirectiveSubclass::ExternCrate {
source: orig_name, source: orig_name,
@ -706,7 +716,7 @@ impl<'a, 'b> BuildReducedGraphVisitor<'a, 'b> {
self.r.define(parent, ident, TypeNS, (module, vis, sp, expansion)); self.r.define(parent, ident, TypeNS, (module, vis, sp, expansion));
for variant in &(*enum_definition).variants { for variant in &(*enum_definition).variants {
self.build_reduced_graph_for_variant(variant, module, vis, expansion); self.build_reduced_graph_for_variant(variant, module, vis);
} }
} }
@ -797,8 +807,8 @@ impl<'a, 'b> BuildReducedGraphVisitor<'a, 'b> {
fn build_reduced_graph_for_variant(&mut self, fn build_reduced_graph_for_variant(&mut self,
variant: &Variant, variant: &Variant,
parent: Module<'a>, parent: Module<'a>,
vis: ty::Visibility, vis: ty::Visibility) {
expn_id: ExpnId) { let expn_id = self.parent_scope.expansion;
let ident = variant.ident; let ident = variant.ident;
// Define a name in the type namespace. // Define a name in the type namespace.
@ -861,11 +871,8 @@ impl<'a, 'b> BuildReducedGraphVisitor<'a, 'b> {
} }
/// Builds the reduced graph for a single item in an external crate. /// Builds the reduced graph for a single item in an external crate.
fn build_reduced_graph_for_external_crate_res( fn build_reduced_graph_for_external_crate_res(&mut self, child: Export<ast::NodeId>) {
&mut self, let parent = self.parent_scope.module;
parent: Module<'a>,
child: Export<ast::NodeId>,
) {
let Export { ident, res, vis, span } = child; let Export { ident, res, vis, span } = child;
// FIXME: We shouldn't create the gensym here, it should come from metadata, // FIXME: We shouldn't create the gensym here, it should come from metadata,
// but metadata cannot encode gensyms currently, so we create it here. // but metadata cannot encode gensyms currently, so we create it here.
@ -997,7 +1004,7 @@ impl<'a, 'b> BuildReducedGraphVisitor<'a, 'b> {
|this: &Self, span| this.r.arenas.alloc_import_directive(ImportDirective { |this: &Self, span| this.r.arenas.alloc_import_directive(ImportDirective {
root_id: item.id, root_id: item.id,
id: item.id, id: item.id,
parent_scope: this.parent_scope.clone(), parent_scope: this.parent_scope,
imported_module: Cell::new(Some(ModuleOrUniformRoot::Module(module))), imported_module: Cell::new(Some(ModuleOrUniformRoot::Module(module))),
subclass: ImportDirectiveSubclass::MacroUse, subclass: ImportDirectiveSubclass::MacroUse,
use_span_with_attributes: item.span_with_attributes(), use_span_with_attributes: item.span_with_attributes(),
@ -1066,20 +1073,15 @@ impl<'a, 'b> BuildReducedGraphVisitor<'a, 'b> {
false false
} }
fn visit_invoc(&mut self, id: ast::NodeId) -> &'a InvocationData<'a> { fn visit_invoc(&mut self, id: ast::NodeId) -> LegacyScope<'a> {
let invoc_id = id.placeholder_to_expn_id(); let invoc_id = id.placeholder_to_expn_id();
self.parent_scope.module.unresolved_invocations.borrow_mut().insert(invoc_id); self.parent_scope.module.unresolved_invocations.borrow_mut().insert(invoc_id);
let invocation_data = self.r.arenas.alloc_invocation_data(InvocationData { let old_parent_scope = self.r.invocation_parent_scopes.insert(invoc_id, self.parent_scope);
module: self.parent_scope.module, assert!(old_parent_scope.is_none(), "invocation data is reset for an invocation");
parent_legacy_scope: self.parent_scope.legacy,
output_legacy_scope: Cell::new(None),
});
let old_invocation_data = self.r.invocations.insert(invoc_id, invocation_data);
assert!(old_invocation_data.is_none(), "invocation data is reset for an invocation");
invocation_data LegacyScope::Invocation(invoc_id)
} }
fn proc_macro_stub(item: &ast::Item) -> Option<(MacroKind, Ident, Span)> { fn proc_macro_stub(item: &ast::Item) -> Option<(MacroKind, Ident, Span)> {
@ -1180,7 +1182,7 @@ impl<'a, 'b> Visitor<'b> for BuildReducedGraphVisitor<'a, 'b> {
return return
} }
ItemKind::Mac(..) => { ItemKind::Mac(..) => {
self.parent_scope.legacy = LegacyScope::Invocation(self.visit_invoc(item.id)); self.parent_scope.legacy = self.visit_invoc(item.id);
return return
} }
ItemKind::Mod(..) => self.contains_macro_use(&item.attrs), ItemKind::Mod(..) => self.contains_macro_use(&item.attrs),
@ -1199,7 +1201,7 @@ impl<'a, 'b> Visitor<'b> for BuildReducedGraphVisitor<'a, 'b> {
fn visit_stmt(&mut self, stmt: &'b ast::Stmt) { fn visit_stmt(&mut self, stmt: &'b ast::Stmt) {
if let ast::StmtKind::Mac(..) = stmt.node { if let ast::StmtKind::Mac(..) = stmt.node {
self.parent_scope.legacy = LegacyScope::Invocation(self.visit_invoc(stmt.id)); self.parent_scope.legacy = self.visit_invoc(stmt.id);
} else { } else {
visit::walk_stmt(self, stmt); visit::walk_stmt(self, stmt);
} }
@ -1267,9 +1269,7 @@ impl<'a, 'b> Visitor<'b> for BuildReducedGraphVisitor<'a, 'b> {
fn visit_attribute(&mut self, attr: &'b ast::Attribute) { fn visit_attribute(&mut self, attr: &'b ast::Attribute) {
if !attr.is_sugared_doc && is_builtin_attr(attr) { if !attr.is_sugared_doc && is_builtin_attr(attr) {
self.parent_scope.module.builtin_attrs.borrow_mut().push(( self.r.builtin_attrs.push((attr.path.segments[0].ident, self.parent_scope));
attr.path.segments[0].ident, self.parent_scope.clone()
));
} }
visit::walk_attribute(self, attr); visit::walk_attribute(self, attr);
} }

View file

@ -376,9 +376,9 @@ impl<'a> Resolver<'a> {
Scope::DeriveHelpers => { Scope::DeriveHelpers => {
let res = Res::NonMacroAttr(NonMacroAttrKind::DeriveHelper); let res = Res::NonMacroAttr(NonMacroAttrKind::DeriveHelper);
if filter_fn(res) { if filter_fn(res) {
for derive in &parent_scope.derives { for derive in parent_scope.derives {
let parent_scope = let parent_scope =
&ParentScope { derives: Vec::new(), ..*parent_scope }; &ParentScope { derives: &[], ..*parent_scope };
if let Ok((Some(ext), _)) = this.resolve_macro_path( if let Ok((Some(ext), _)) = this.resolve_macro_path(
derive, Some(MacroKind::Derive), parent_scope, false, false derive, Some(MacroKind::Derive), parent_scope, false, false
) { ) {
@ -455,7 +455,7 @@ impl<'a> Resolver<'a> {
let mut tmp_suggestions = Vec::new(); let mut tmp_suggestions = Vec::new();
add_module_candidates(prelude, &mut tmp_suggestions, filter_fn); add_module_candidates(prelude, &mut tmp_suggestions, filter_fn);
suggestions.extend(tmp_suggestions.into_iter().filter(|s| { suggestions.extend(tmp_suggestions.into_iter().filter(|s| {
use_prelude || this.is_builtin_macro(s.res.opt_def_id()) use_prelude || this.is_builtin_macro(s.res)
})); }));
} }
} }
@ -595,7 +595,7 @@ impl<'a> Resolver<'a> {
where FilterFn: Fn(Res) -> bool where FilterFn: Fn(Res) -> bool
{ {
let mut suggestions = self.lookup_import_candidates_from_module( let mut suggestions = self.lookup_import_candidates_from_module(
lookup_ident, namespace, self.graph_root, Ident::with_empty_ctxt(kw::Crate), &filter_fn lookup_ident, namespace, self.graph_root, Ident::with_dummy_span(kw::Crate), &filter_fn
); );
if lookup_ident.span.rust_2018() { if lookup_ident.span.rust_2018() {

View file

@ -1,3 +1,10 @@
//! "Late resolution" is the pass that resolves most of names in a crate beside imports and macros.
//! It runs when the crate is fully expanded and its module structure is fully built.
//! So it just walks through the crate and resolves all the expressions, types, etc.
//!
//! If you wonder why there's no `early.rs`, that's because it's split into three files -
//! `build_reduced_graph.rs`, `macros.rs` and `resolve_imports.rs`.
use GenericParameters::*; use GenericParameters::*;
use RibKind::*; use RibKind::*;
@ -352,7 +359,7 @@ impl<'a, 'tcx> Visitor<'tcx> for LateResolutionVisitor<'a, '_> {
self.smart_resolve_path(ty.id, qself.as_ref(), path, PathSource::Type); self.smart_resolve_path(ty.id, qself.as_ref(), path, PathSource::Type);
} }
TyKind::ImplicitSelf => { TyKind::ImplicitSelf => {
let self_ty = Ident::with_empty_ctxt(kw::SelfUpper); let self_ty = Ident::with_dummy_span(kw::SelfUpper);
let res = self.resolve_ident_in_lexical_scope(self_ty, TypeNS, Some(ty.id), ty.span) let res = self.resolve_ident_in_lexical_scope(self_ty, TypeNS, Some(ty.id), ty.span)
.map_or(Res::Err, |d| d.res()); .map_or(Res::Err, |d| d.res());
self.r.record_partial_res(ty.id, PartialRes::new(res)); self.r.record_partial_res(ty.id, PartialRes::new(res));
@ -442,7 +449,7 @@ impl<'a, 'tcx> Visitor<'tcx> for LateResolutionVisitor<'a, '_> {
GenericParamKind::Type { ref default, .. } => { GenericParamKind::Type { ref default, .. } => {
found_default |= default.is_some(); found_default |= default.is_some();
if found_default { if found_default {
Some((Ident::with_empty_ctxt(param.ident.name), Res::Err)) Some((Ident::with_dummy_span(param.ident.name), Res::Err))
} else { } else {
None None
} }
@ -459,7 +466,7 @@ impl<'a, 'tcx> Visitor<'tcx> for LateResolutionVisitor<'a, '_> {
false false
} }
}) })
.map(|param| (Ident::with_empty_ctxt(param.ident.name), Res::Err))); .map(|param| (Ident::with_dummy_span(param.ident.name), Res::Err)));
for param in &generics.params { for param in &generics.params {
match param.kind { match param.kind {
@ -476,7 +483,7 @@ impl<'a, 'tcx> Visitor<'tcx> for LateResolutionVisitor<'a, '_> {
} }
// Allow all following defaults to refer to this type parameter. // Allow all following defaults to refer to this type parameter.
default_ban_rib.bindings.remove(&Ident::with_empty_ctxt(param.ident.name)); default_ban_rib.bindings.remove(&Ident::with_dummy_span(param.ident.name));
} }
GenericParamKind::Const { ref ty } => { GenericParamKind::Const { ref ty } => {
self.ribs[TypeNS].push(const_ty_param_ban_rib); self.ribs[TypeNS].push(const_ty_param_ban_rib);
@ -501,8 +508,8 @@ impl<'a, 'b> LateResolutionVisitor<'a, '_> {
fn new(resolver: &'b mut Resolver<'a>) -> LateResolutionVisitor<'a, 'b> { fn new(resolver: &'b mut Resolver<'a>) -> LateResolutionVisitor<'a, 'b> {
// During late resolution we only track the module component of the parent scope, // During late resolution we only track the module component of the parent scope,
// although it may be useful to track other components as well for diagnostics. // although it may be useful to track other components as well for diagnostics.
let parent_scope = resolver.dummy_parent_scope();
let graph_root = resolver.graph_root; let graph_root = resolver.graph_root;
let parent_scope = ParentScope::module(graph_root);
LateResolutionVisitor { LateResolutionVisitor {
r: resolver, r: resolver,
parent_scope, parent_scope,
@ -574,7 +581,6 @@ impl<'a, 'b> LateResolutionVisitor<'a, '_> {
self.ribs[ValueNS].push(Rib::new(ModuleRibKind(module))); self.ribs[ValueNS].push(Rib::new(ModuleRibKind(module)));
self.ribs[TypeNS].push(Rib::new(ModuleRibKind(module))); self.ribs[TypeNS].push(Rib::new(ModuleRibKind(module)));
self.r.finalize_current_module_macro_resolutions(module);
let ret = f(self); let ret = f(self);
self.parent_scope.module = orig_module; self.parent_scope.module = orig_module;
@ -965,7 +971,7 @@ impl<'a, 'b> LateResolutionVisitor<'a, '_> {
let mut self_type_rib = Rib::new(NormalRibKind); let mut self_type_rib = Rib::new(NormalRibKind);
// Plain insert (no renaming, since types are not currently hygienic) // Plain insert (no renaming, since types are not currently hygienic)
self_type_rib.bindings.insert(Ident::with_empty_ctxt(kw::SelfUpper), self_res); self_type_rib.bindings.insert(Ident::with_dummy_span(kw::SelfUpper), self_res);
self.ribs[TypeNS].push(self_type_rib); self.ribs[TypeNS].push(self_type_rib);
f(self); f(self);
self.ribs[TypeNS].pop(); self.ribs[TypeNS].pop();
@ -976,7 +982,7 @@ impl<'a, 'b> LateResolutionVisitor<'a, '_> {
{ {
let self_res = Res::SelfCtor(impl_id); let self_res = Res::SelfCtor(impl_id);
let mut self_type_rib = Rib::new(NormalRibKind); let mut self_type_rib = Rib::new(NormalRibKind);
self_type_rib.bindings.insert(Ident::with_empty_ctxt(kw::SelfUpper), self_res); self_type_rib.bindings.insert(Ident::with_dummy_span(kw::SelfUpper), self_res);
self.ribs[ValueNS].push(self_type_rib); self.ribs[ValueNS].push(self_type_rib);
f(self); f(self);
self.ribs[ValueNS].pop(); self.ribs[ValueNS].pop();
@ -1227,7 +1233,6 @@ impl<'a, 'b> LateResolutionVisitor<'a, '_> {
self.ribs[ValueNS].push(Rib::new(ModuleRibKind(anonymous_module))); self.ribs[ValueNS].push(Rib::new(ModuleRibKind(anonymous_module)));
self.ribs[TypeNS].push(Rib::new(ModuleRibKind(anonymous_module))); self.ribs[TypeNS].push(Rib::new(ModuleRibKind(anonymous_module)));
self.parent_scope.module = anonymous_module; self.parent_scope.module = anonymous_module;
self.r.finalize_current_module_macro_resolutions(anonymous_module);
} else { } else {
self.ribs[ValueNS].push(Rib::new(NormalRibKind)); self.ribs[ValueNS].push(Rib::new(NormalRibKind));
} }
@ -1476,7 +1481,7 @@ impl<'a, 'b> LateResolutionVisitor<'a, '_> {
self.r.trait_map.insert(id, traits); self.r.trait_map.insert(id, traits);
} }
let mut std_path = vec![Segment::from_ident(Ident::with_empty_ctxt(sym::std))]; let mut std_path = vec![Segment::from_ident(Ident::with_dummy_span(sym::std))];
std_path.extend(path); std_path.extend(path);
if self.r.primitive_type_table.primitive_types.contains_key(&path[0].ident.name) { if self.r.primitive_type_table.primitive_types.contains_key(&path[0].ident.name) {
let cl = CrateLint::No; let cl = CrateLint::No;
@ -1507,7 +1512,7 @@ impl<'a, 'b> LateResolutionVisitor<'a, '_> {
fn self_type_is_available(&mut self, span: Span) -> bool { fn self_type_is_available(&mut self, span: Span) -> bool {
let binding = self.resolve_ident_in_lexical_scope( let binding = self.resolve_ident_in_lexical_scope(
Ident::with_empty_ctxt(kw::SelfUpper), Ident::with_dummy_span(kw::SelfUpper),
TypeNS, TypeNS,
None, None,
span, span,
@ -1984,7 +1989,6 @@ impl<'a, 'b> LateResolutionVisitor<'a, '_> {
impl<'a> Resolver<'a> { impl<'a> Resolver<'a> {
pub(crate) fn late_resolve_crate(&mut self, krate: &Crate) { pub(crate) fn late_resolve_crate(&mut self, krate: &Crate) {
self.finalize_current_module_macro_resolutions(self.graph_root);
let mut late_resolution_visitor = LateResolutionVisitor::new(self); let mut late_resolution_visitor = LateResolutionVisitor::new(self);
visit::walk_crate(&mut late_resolution_visitor, krate); visit::walk_crate(&mut late_resolution_visitor, krate);
for (id, span) in late_resolution_visitor.unused_labels.iter() { for (id, span) in late_resolution_visitor.unused_labels.iter() {

View file

@ -1,3 +1,12 @@
//! This crate is responsible for the part of name resolution that doesn't require type checker.
//!
//! Module structure of the crate is built here.
//! Paths in macros, imports, expressions, types, patterns are resolved here.
//! Label names are resolved here as well.
//!
//! Type-relative name resolution (methods, fields, associated items) happens in `librustc_typeck`.
//! Lifetime names are resolved in `librustc/middle/resolve_lifetime.rs`.
#![doc(html_root_url = "https://doc.rust-lang.org/nightly/")] #![doc(html_root_url = "https://doc.rust-lang.org/nightly/")]
#![feature(crate_visibility_modifier)] #![feature(crate_visibility_modifier)]
@ -54,7 +63,7 @@ use diagnostics::{Suggestion, ImportSuggestion};
use diagnostics::{find_span_of_binding_until_next_binding, extend_span_to_previous_binding}; use diagnostics::{find_span_of_binding_until_next_binding, extend_span_to_previous_binding};
use late::{PathSource, Rib, RibKind::*}; use late::{PathSource, Rib, RibKind::*};
use resolve_imports::{ImportDirective, ImportDirectiveSubclass, NameResolution, ImportResolver}; use resolve_imports::{ImportDirective, ImportDirectiveSubclass, NameResolution, ImportResolver};
use macros::{InvocationData, LegacyBinding, LegacyScope}; use macros::{LegacyBinding, LegacyScope};
type Res = def::Res<NodeId>; type Res = def::Res<NodeId>;
@ -122,12 +131,25 @@ enum ScopeSet {
/// Serves as a starting point for the scope visitor. /// Serves as a starting point for the scope visitor.
/// This struct is currently used only for early resolution (imports and macros), /// This struct is currently used only for early resolution (imports and macros),
/// but not for late resolution yet. /// but not for late resolution yet.
#[derive(Clone, Debug)] #[derive(Clone, Copy, Debug)]
pub struct ParentScope<'a> { pub struct ParentScope<'a> {
module: Module<'a>, module: Module<'a>,
expansion: ExpnId, expansion: ExpnId,
legacy: LegacyScope<'a>, legacy: LegacyScope<'a>,
derives: Vec<ast::Path>, derives: &'a [ast::Path],
}
impl<'a> ParentScope<'a> {
/// Creates a parent scope with the passed argument used as the module scope component,
/// and other scope components set to default empty values.
pub fn module(module: Module<'a>) -> ParentScope<'a> {
ParentScope {
module,
expansion: ExpnId::root(),
legacy: LegacyScope::Empty,
derives: &[],
}
}
} }
#[derive(Eq)] #[derive(Eq)]
@ -274,7 +296,7 @@ impl<'tcx> Visitor<'tcx> for UsePlacementFinder {
ItemKind::Use(..) => { ItemKind::Use(..) => {
// don't suggest placing a use before the prelude // don't suggest placing a use before the prelude
// import or other generated ones // import or other generated ones
if item.span.ctxt().outer_expn_info().is_none() { if !item.span.from_expansion() {
self.span = Some(item.span.shrink_to_lo()); self.span = Some(item.span.shrink_to_lo());
self.found_use = true; self.found_use = true;
return; return;
@ -284,7 +306,7 @@ impl<'tcx> Visitor<'tcx> for UsePlacementFinder {
ItemKind::ExternCrate(_) => {} ItemKind::ExternCrate(_) => {}
// but place them before the first other item // but place them before the first other item
_ => if self.span.map_or(true, |span| item.span < span ) { _ => if self.span.map_or(true, |span| item.span < span ) {
if item.span.ctxt().outer_expn_info().is_none() { if !item.span.from_expansion() {
// don't insert between attributes and an item // don't insert between attributes and an item
if item.attrs.is_empty() { if item.attrs.is_empty() {
self.span = Some(item.span.shrink_to_lo()); self.span = Some(item.span.shrink_to_lo());
@ -418,11 +440,6 @@ pub struct ModuleData<'a> {
normal_ancestor_id: DefId, normal_ancestor_id: DefId,
resolutions: RefCell<FxHashMap<(Ident, Namespace), &'a RefCell<NameResolution<'a>>>>, resolutions: RefCell<FxHashMap<(Ident, Namespace), &'a RefCell<NameResolution<'a>>>>,
single_segment_macro_resolutions: RefCell<Vec<(Ident, MacroKind, ParentScope<'a>,
Option<&'a NameBinding<'a>>)>>,
multi_segment_macro_resolutions: RefCell<Vec<(Vec<Segment>, Span, MacroKind, ParentScope<'a>,
Option<Res>)>>,
builtin_attrs: RefCell<Vec<(Ident, ParentScope<'a>)>>,
// Macro invocations that can expand into items in this module. // Macro invocations that can expand into items in this module.
unresolved_invocations: RefCell<FxHashSet<ExpnId>>, unresolved_invocations: RefCell<FxHashSet<ExpnId>>,
@ -459,9 +476,6 @@ impl<'a> ModuleData<'a> {
kind, kind,
normal_ancestor_id, normal_ancestor_id,
resolutions: Default::default(), resolutions: Default::default(),
single_segment_macro_resolutions: RefCell::new(Vec::new()),
multi_segment_macro_resolutions: RefCell::new(Vec::new()),
builtin_attrs: RefCell::new(Vec::new()),
unresolved_invocations: Default::default(), unresolved_invocations: Default::default(),
no_implicit_prelude: false, no_implicit_prelude: false,
glob_importers: RefCell::new(Vec::new()), glob_importers: RefCell::new(Vec::new()),
@ -807,7 +821,7 @@ pub struct Resolver<'a> {
pub definitions: Definitions, pub definitions: Definitions,
graph_root: Module<'a>, pub graph_root: Module<'a>,
prelude: Option<Module<'a>>, prelude: Option<Module<'a>>,
pub extern_prelude: FxHashMap<Ident, ExternPreludeEntry<'a>>, pub extern_prelude: FxHashMap<Ident, ExternPreludeEntry<'a>>,
@ -896,15 +910,24 @@ pub struct Resolver<'a> {
local_macro_def_scopes: FxHashMap<NodeId, Module<'a>>, local_macro_def_scopes: FxHashMap<NodeId, Module<'a>>,
unused_macros: NodeMap<Span>, unused_macros: NodeMap<Span>,
proc_macro_stubs: NodeSet, proc_macro_stubs: NodeSet,
/// Traces collected during macro resolution and validated when it's complete.
single_segment_macro_resolutions: Vec<(Ident, MacroKind, ParentScope<'a>,
Option<&'a NameBinding<'a>>)>,
multi_segment_macro_resolutions: Vec<(Vec<Segment>, Span, MacroKind, ParentScope<'a>,
Option<Res>)>,
builtin_attrs: Vec<(Ident, ParentScope<'a>)>,
/// Some built-in derives mark items they are applied to so they are treated specially later. /// Some built-in derives mark items they are applied to so they are treated specially later.
/// Derive macros cannot modify the item themselves and have to store the markers in the global /// Derive macros cannot modify the item themselves and have to store the markers in the global
/// context, so they attach the markers to derive container IDs using this resolver table. /// context, so they attach the markers to derive container IDs using this resolver table.
/// FIXME: Find a way for `PartialEq` and `Eq` to emulate `#[structural_match]` /// FIXME: Find a way for `PartialEq` and `Eq` to emulate `#[structural_match]`
/// by marking the produced impls rather than the original items. /// by marking the produced impls rather than the original items.
special_derives: FxHashMap<ExpnId, SpecialDerives>, special_derives: FxHashMap<ExpnId, SpecialDerives>,
/// Parent scopes in which the macros were invoked.
/// Maps the `ExpnId` of an expansion to its containing module or block. /// FIXME: `derives` are missing in these parent scopes and need to be taken from elsewhere.
invocations: FxHashMap<ExpnId, &'a InvocationData<'a>>, invocation_parent_scopes: FxHashMap<ExpnId, ParentScope<'a>>,
/// Legacy scopes *produced* by expanding the macro invocations,
/// include all the `macro_rules` items and other invocations generated by them.
output_legacy_scopes: FxHashMap<ExpnId, LegacyScope<'a>>,
/// Avoid duplicated errors for "name already defined". /// Avoid duplicated errors for "name already defined".
name_already_seen: FxHashMap<Name, Span>, name_already_seen: FxHashMap<Name, Span>,
@ -927,8 +950,8 @@ pub struct ResolverArenas<'a> {
name_bindings: arena::TypedArena<NameBinding<'a>>, name_bindings: arena::TypedArena<NameBinding<'a>>,
import_directives: arena::TypedArena<ImportDirective<'a>>, import_directives: arena::TypedArena<ImportDirective<'a>>,
name_resolutions: arena::TypedArena<RefCell<NameResolution<'a>>>, name_resolutions: arena::TypedArena<RefCell<NameResolution<'a>>>,
invocation_data: arena::TypedArena<InvocationData<'a>>,
legacy_bindings: arena::TypedArena<LegacyBinding<'a>>, legacy_bindings: arena::TypedArena<LegacyBinding<'a>>,
ast_paths: arena::TypedArena<ast::Path>,
} }
impl<'a> ResolverArenas<'a> { impl<'a> ResolverArenas<'a> {
@ -952,13 +975,12 @@ impl<'a> ResolverArenas<'a> {
fn alloc_name_resolution(&'a self) -> &'a RefCell<NameResolution<'a>> { fn alloc_name_resolution(&'a self) -> &'a RefCell<NameResolution<'a>> {
self.name_resolutions.alloc(Default::default()) self.name_resolutions.alloc(Default::default())
} }
fn alloc_invocation_data(&'a self, expansion_data: InvocationData<'a>)
-> &'a InvocationData<'a> {
self.invocation_data.alloc(expansion_data)
}
fn alloc_legacy_binding(&'a self, binding: LegacyBinding<'a>) -> &'a LegacyBinding<'a> { fn alloc_legacy_binding(&'a self, binding: LegacyBinding<'a>) -> &'a LegacyBinding<'a> {
self.legacy_bindings.alloc(binding) self.legacy_bindings.alloc(binding)
} }
fn alloc_ast_paths(&'a self, paths: &[ast::Path]) -> &'a [ast::Path] {
self.ast_paths.alloc_from_iter(paths.iter().cloned())
}
} }
impl<'a, 'b> ty::DefIdTree for &'a Resolver<'b> { impl<'a, 'b> ty::DefIdTree for &'a Resolver<'b> {
@ -985,11 +1007,11 @@ impl<'a> hir::lowering::Resolver for Resolver<'a> {
} else { } else {
kw::Crate kw::Crate
}; };
let segments = iter::once(Ident::with_empty_ctxt(root)) let segments = iter::once(Ident::with_dummy_span(root))
.chain( .chain(
crate_root.into_iter() crate_root.into_iter()
.chain(components.iter().cloned()) .chain(components.iter().cloned())
.map(Ident::with_empty_ctxt) .map(Ident::with_dummy_span)
).map(|i| self.new_ast_path_segment(i)).collect::<Vec<_>>(); ).map(|i| self.new_ast_path_segment(i)).collect::<Vec<_>>();
let path = ast::Path { let path = ast::Path {
@ -997,7 +1019,7 @@ impl<'a> hir::lowering::Resolver for Resolver<'a> {
segments, segments,
}; };
let parent_scope = &self.dummy_parent_scope(); let parent_scope = &ParentScope::module(self.graph_root);
let res = match self.resolve_ast_path(&path, ns, parent_scope) { let res = match self.resolve_ast_path(&path, ns, parent_scope) {
Ok(res) => res, Ok(res) => res,
Err((span, error)) => { Err((span, error)) => {
@ -1060,18 +1082,17 @@ impl<'a> Resolver<'a> {
.collect(); .collect();
if !attr::contains_name(&krate.attrs, sym::no_core) { if !attr::contains_name(&krate.attrs, sym::no_core) {
extern_prelude.insert(Ident::with_empty_ctxt(sym::core), Default::default()); extern_prelude.insert(Ident::with_dummy_span(sym::core), Default::default());
if !attr::contains_name(&krate.attrs, sym::no_std) { if !attr::contains_name(&krate.attrs, sym::no_std) {
extern_prelude.insert(Ident::with_empty_ctxt(sym::std), Default::default()); extern_prelude.insert(Ident::with_dummy_span(sym::std), Default::default());
if session.rust_2018() { if session.rust_2018() {
extern_prelude.insert(Ident::with_empty_ctxt(sym::meta), Default::default()); extern_prelude.insert(Ident::with_dummy_span(sym::meta), Default::default());
} }
} }
} }
let mut invocations = FxHashMap::default(); let mut invocation_parent_scopes = FxHashMap::default();
invocations.insert(ExpnId::root(), invocation_parent_scopes.insert(ExpnId::root(), ParentScope::module(graph_root));
arenas.alloc_invocation_data(InvocationData::root(graph_root)));
let mut macro_defs = FxHashMap::default(); let mut macro_defs = FxHashMap::default();
macro_defs.insert(ExpnId::root(), root_def_id); macro_defs.insert(ExpnId::root(), root_def_id);
@ -1143,7 +1164,8 @@ impl<'a> Resolver<'a> {
dummy_ext_bang: Lrc::new(SyntaxExtension::dummy_bang(session.edition())), dummy_ext_bang: Lrc::new(SyntaxExtension::dummy_bang(session.edition())),
dummy_ext_derive: Lrc::new(SyntaxExtension::dummy_derive(session.edition())), dummy_ext_derive: Lrc::new(SyntaxExtension::dummy_derive(session.edition())),
non_macro_attrs: [non_macro_attr(false), non_macro_attr(true)], non_macro_attrs: [non_macro_attr(false), non_macro_attr(true)],
invocations, invocation_parent_scopes,
output_legacy_scopes: Default::default(),
macro_defs, macro_defs,
local_macro_def_scopes: FxHashMap::default(), local_macro_def_scopes: FxHashMap::default(),
name_already_seen: FxHashMap::default(), name_already_seen: FxHashMap::default(),
@ -1151,6 +1173,9 @@ impl<'a> Resolver<'a> {
struct_constructors: Default::default(), struct_constructors: Default::default(),
unused_macros: Default::default(), unused_macros: Default::default(),
proc_macro_stubs: Default::default(), proc_macro_stubs: Default::default(),
single_segment_macro_resolutions: Default::default(),
multi_segment_macro_resolutions: Default::default(),
builtin_attrs: Default::default(),
special_derives: Default::default(), special_derives: Default::default(),
active_features: active_features:
features.declared_lib_features.iter().map(|(feat, ..)| *feat) features.declared_lib_features.iter().map(|(feat, ..)| *feat)
@ -1182,9 +1207,8 @@ impl<'a> Resolver<'a> {
f(self, MacroNS); f(self, MacroNS);
} }
fn is_builtin_macro(&mut self, def_id: Option<DefId>) -> bool { fn is_builtin_macro(&mut self, res: Res) -> bool {
def_id.and_then(|def_id| self.get_macro_by_def_id(def_id)) self.get_macro(res).map_or(false, |ext| ext.is_builtin)
.map_or(false, |ext| ext.is_builtin)
} }
fn macro_def(&self, mut ctxt: SyntaxContext) -> DefId { fn macro_def(&self, mut ctxt: SyntaxContext) -> DefId {
@ -1203,6 +1227,7 @@ impl<'a> Resolver<'a> {
/// Entry point to crate resolution. /// Entry point to crate resolution.
pub fn resolve_crate(&mut self, krate: &Crate) { pub fn resolve_crate(&mut self, krate: &Crate) {
ImportResolver { r: self }.finalize_imports(); ImportResolver { r: self }.finalize_imports();
self.finalize_macro_resolutions();
self.late_resolve_crate(krate); self.late_resolve_crate(krate);
@ -1319,13 +1344,15 @@ impl<'a> Resolver<'a> {
ScopeSet::AbsolutePath(ns) => (ns, true), ScopeSet::AbsolutePath(ns) => (ns, true),
ScopeSet::Macro(_) => (MacroNS, false), ScopeSet::Macro(_) => (MacroNS, false),
}; };
// Jump out of trait or enum modules, they do not act as scopes.
let module = parent_scope.module.nearest_item_scope();
let mut scope = match ns { let mut scope = match ns {
_ if is_absolute_path => Scope::CrateRoot, _ if is_absolute_path => Scope::CrateRoot,
TypeNS | ValueNS => Scope::Module(parent_scope.module), TypeNS | ValueNS => Scope::Module(module),
MacroNS => Scope::DeriveHelpers, MacroNS => Scope::DeriveHelpers,
}; };
let mut ident = ident.modern(); let mut ident = ident.modern();
let mut use_prelude = !parent_scope.module.no_implicit_prelude; let mut use_prelude = !module.no_implicit_prelude;
loop { loop {
let visit = match scope { let visit = match scope {
@ -1355,10 +1382,11 @@ impl<'a> Resolver<'a> {
LegacyScope::Binding(binding) => Scope::MacroRules( LegacyScope::Binding(binding) => Scope::MacroRules(
binding.parent_legacy_scope binding.parent_legacy_scope
), ),
LegacyScope::Invocation(invoc) => Scope::MacroRules( LegacyScope::Invocation(invoc_id) => Scope::MacroRules(
invoc.output_legacy_scope.get().unwrap_or(invoc.parent_legacy_scope) self.output_legacy_scopes.get(&invoc_id).cloned()
.unwrap_or(self.invocation_parent_scopes[&invoc_id].legacy)
), ),
LegacyScope::Empty => Scope::Module(parent_scope.module), LegacyScope::Empty => Scope::Module(module),
} }
Scope::CrateRoot => match ns { Scope::CrateRoot => match ns {
TypeNS => { TypeNS => {
@ -1430,7 +1458,7 @@ impl<'a> Resolver<'a> {
} }
let (general_span, modern_span) = if ident.name == kw::SelfUpper { let (general_span, modern_span) = if ident.name == kw::SelfUpper {
// FIXME(jseyfried) improve `Self` hygiene // FIXME(jseyfried) improve `Self` hygiene
let empty_span = ident.span.with_ctxt(SyntaxContext::empty()); let empty_span = ident.span.with_ctxt(SyntaxContext::root());
(empty_span, empty_span) (empty_span, empty_span)
} else if ns == TypeNS { } else if ns == TypeNS {
let modern_span = ident.span.modern(); let modern_span = ident.span.modern();
@ -1501,7 +1529,7 @@ impl<'a> Resolver<'a> {
self.hygienic_lexical_parent(module, &mut ident.span) self.hygienic_lexical_parent(module, &mut ident.span)
}; };
module = unwrap_or!(opt_module, break); module = unwrap_or!(opt_module, break);
let adjusted_parent_scope = &ParentScope { module, ..parent_scope.clone() }; let adjusted_parent_scope = &ParentScope { module, ..*parent_scope };
let result = self.resolve_ident_in_module_unadjusted( let result = self.resolve_ident_in_module_unadjusted(
ModuleOrUniformRoot::Module(module), ModuleOrUniformRoot::Module(module),
ident, ident,
@ -1637,7 +1665,7 @@ impl<'a> Resolver<'a> {
ModuleOrUniformRoot::Module(m) => { ModuleOrUniformRoot::Module(m) => {
if let Some(def) = ident.span.modernize_and_adjust(m.expansion) { if let Some(def) = ident.span.modernize_and_adjust(m.expansion) {
tmp_parent_scope = tmp_parent_scope =
ParentScope { module: self.macro_def_scope(def), ..parent_scope.clone() }; ParentScope { module: self.macro_def_scope(def), ..*parent_scope };
adjusted_parent_scope = &tmp_parent_scope; adjusted_parent_scope = &tmp_parent_scope;
} }
} }
@ -2624,7 +2652,7 @@ impl<'a> Resolver<'a> {
let path = if path_str.starts_with("::") { let path = if path_str.starts_with("::") {
ast::Path { ast::Path {
span, span,
segments: iter::once(Ident::with_empty_ctxt(kw::PathRoot)) segments: iter::once(Ident::with_dummy_span(kw::PathRoot))
.chain({ .chain({
path_str.split("::").skip(1).map(Ident::from_str) path_str.split("::").skip(1).map(Ident::from_str)
}) })
@ -2645,7 +2673,7 @@ impl<'a> Resolver<'a> {
let def_id = self.definitions.local_def_id(module_id); let def_id = self.definitions.local_def_id(module_id);
self.module_map.get(&def_id).copied().unwrap_or(self.graph_root) self.module_map.get(&def_id).copied().unwrap_or(self.graph_root)
}); });
let parent_scope = &ParentScope { module, ..self.dummy_parent_scope() }; let parent_scope = &ParentScope::module(module);
let res = self.resolve_ast_path(&path, ns, parent_scope).map_err(|_| ())?; let res = self.resolve_ast_path(&path, ns, parent_scope).map_err(|_| ())?;
Ok((path, res)) Ok((path, res))
} }
@ -2713,7 +2741,7 @@ fn module_to_string(module: Module<'_>) -> Option<String> {
fn collect_mod(names: &mut Vec<Ident>, module: Module<'_>) { fn collect_mod(names: &mut Vec<Ident>, module: Module<'_>) {
if let ModuleKind::Def(.., name) = module.kind { if let ModuleKind::Def(.., name) = module.kind {
if let Some(parent) = module.parent { if let Some(parent) = module.parent {
names.push(Ident::with_empty_ctxt(name)); names.push(Ident::with_dummy_span(name));
collect_mod(names, parent); collect_mod(names, parent);
} }
} else { } else {

View file

@ -1,9 +1,11 @@
//! A bunch of methods and structures more or less related to resolving macros and
//! interface provided by `Resolver` to macro expander.
use crate::{AmbiguityError, AmbiguityKind, AmbiguityErrorMisc, Determinacy}; use crate::{AmbiguityError, AmbiguityKind, AmbiguityErrorMisc, Determinacy};
use crate::{CrateLint, Resolver, ResolutionError, Scope, ScopeSet, ParentScope, Weak}; use crate::{CrateLint, Resolver, ResolutionError, Scope, ScopeSet, ParentScope, Weak};
use crate::{Module, ModuleKind, NameBinding, PathResult, Segment, ToNameBinding}; use crate::{ModuleKind, NameBinding, PathResult, Segment, ToNameBinding};
use crate::{ModuleOrUniformRoot, KNOWN_TOOLS}; use crate::{ModuleOrUniformRoot, KNOWN_TOOLS};
use crate::Namespace::*; use crate::Namespace::*;
use crate::build_reduced_graph::BuildReducedGraphVisitor;
use crate::resolve_imports::ImportResolver; use crate::resolve_imports::ImportResolver;
use rustc::hir::def::{self, DefKind, NonMacroAttrKind}; use rustc::hir::def::{self, DefKind, NonMacroAttrKind};
use rustc::hir::map::DefCollector; use rustc::hir::map::DefCollector;
@ -15,43 +17,18 @@ use syntax::edition::Edition;
use syntax::ext::base::{self, Indeterminate, SpecialDerives}; use syntax::ext::base::{self, Indeterminate, SpecialDerives};
use syntax::ext::base::{MacroKind, SyntaxExtension}; use syntax::ext::base::{MacroKind, SyntaxExtension};
use syntax::ext::expand::{AstFragment, Invocation, InvocationKind}; use syntax::ext::expand::{AstFragment, Invocation, InvocationKind};
use syntax::ext::hygiene::{self, ExpnId, ExpnInfo, ExpnKind}; use syntax::ext::hygiene::{self, ExpnId, ExpnData, ExpnKind};
use syntax::ext::tt::macro_rules; use syntax::ext::tt::macro_rules;
use syntax::feature_gate::{emit_feature_err, is_builtin_attr_name}; use syntax::feature_gate::{emit_feature_err, is_builtin_attr_name};
use syntax::feature_gate::GateIssue; use syntax::feature_gate::GateIssue;
use syntax::symbol::{Symbol, kw, sym}; use syntax::symbol::{Symbol, kw, sym};
use syntax_pos::{Span, DUMMY_SP}; use syntax_pos::{Span, DUMMY_SP};
use std::cell::Cell;
use std::{mem, ptr}; use std::{mem, ptr};
use rustc_data_structures::sync::Lrc; use rustc_data_structures::sync::Lrc;
type Res = def::Res<ast::NodeId>; type Res = def::Res<ast::NodeId>;
// FIXME: Merge this with `ParentScope`.
#[derive(Clone, Debug)]
pub struct InvocationData<'a> {
/// The module in which the macro was invoked.
crate module: Module<'a>,
/// The legacy scope in which the macro was invoked.
/// The invocation path is resolved in this scope.
crate parent_legacy_scope: LegacyScope<'a>,
/// The legacy scope *produced* by expanding this macro invocation,
/// includes all the macro_rules items, other invocations, etc generated by it.
/// `None` if the macro is not expanded yet.
crate output_legacy_scope: Cell<Option<LegacyScope<'a>>>,
}
impl<'a> InvocationData<'a> {
pub fn root(graph_root: Module<'a>) -> Self {
InvocationData {
module: graph_root,
parent_legacy_scope: LegacyScope::Empty,
output_legacy_scope: Cell::new(None),
}
}
}
/// Binding produced by a `macro_rules` item. /// Binding produced by a `macro_rules` item.
/// Not modularized, can shadow previous legacy bindings, etc. /// Not modularized, can shadow previous legacy bindings, etc.
#[derive(Debug)] #[derive(Debug)]
@ -75,7 +52,7 @@ pub enum LegacyScope<'a> {
Binding(&'a LegacyBinding<'a>), Binding(&'a LegacyBinding<'a>),
/// The scope introduced by a macro invocation that can potentially /// The scope introduced by a macro invocation that can potentially
/// create a `macro_rules!` macro definition. /// create a `macro_rules!` macro definition.
Invocation(&'a InvocationData<'a>), Invocation(ExpnId),
} }
// Macro namespace is separated into two sub-namespaces, one for bang macros and // Macro namespace is separated into two sub-namespaces, one for bang macros and
@ -120,17 +97,12 @@ impl<'a> base::Resolver for Resolver<'a> {
} }
fn get_module_scope(&mut self, id: ast::NodeId) -> ExpnId { fn get_module_scope(&mut self, id: ast::NodeId) -> ExpnId {
let span = DUMMY_SP.fresh_expansion(ExpnId::root(), ExpnInfo::default( let expn_id = ExpnId::fresh(Some(ExpnData::default(
ExpnKind::Macro(MacroKind::Attr, sym::test_case), DUMMY_SP, self.session.edition() ExpnKind::Macro(MacroKind::Attr, sym::test_case), DUMMY_SP, self.session.edition()
)); )));
let expn_id = span.ctxt().outer_expn();
let module = self.module_map[&self.definitions.local_def_id(id)]; let module = self.module_map[&self.definitions.local_def_id(id)];
self.invocation_parent_scopes.insert(expn_id, ParentScope::module(module));
self.definitions.set_invocation_parent(expn_id, module.def_id().unwrap().index); self.definitions.set_invocation_parent(expn_id, module.def_id().unwrap().index);
self.invocations.insert(expn_id, self.arenas.alloc_invocation_data(InvocationData {
module,
parent_legacy_scope: LegacyScope::Empty,
output_legacy_scope: Cell::new(None),
}));
expn_id expn_id
} }
@ -144,29 +116,24 @@ impl<'a> base::Resolver for Resolver<'a> {
}); });
} }
fn visit_ast_fragment_with_placeholders(&mut self, expn_id: ExpnId, fragment: &AstFragment, fn visit_ast_fragment_with_placeholders(
derives: &[ExpnId]) { &mut self, expansion: ExpnId, fragment: &AstFragment, derives: &[ExpnId]
fragment.visit_with(&mut DefCollector::new(&mut self.definitions, expn_id)); ) {
// Fill in some data for derives if the fragment is from a derive container.
let invocation = self.invocations[&expn_id]; // We are inside the `expansion` now, but other parent scope components are still the same.
invocation.module.unresolved_invocations.borrow_mut().remove(&expn_id); let parent_scope = ParentScope { expansion, ..self.invocation_parent_scopes[&expansion] };
invocation.module.unresolved_invocations.borrow_mut().extend(derives); let parent_def = self.definitions.invocation_parent(expansion);
let parent_def = self.definitions.invocation_parent(expn_id); self.invocation_parent_scopes.extend(derives.iter().map(|&derive| (derive, parent_scope)));
for &derive_invoc_id in derives { for &derive_invoc_id in derives {
self.definitions.set_invocation_parent(derive_invoc_id, parent_def); self.definitions.set_invocation_parent(derive_invoc_id, parent_def);
} }
self.invocations.extend(derives.iter().map(|&derive| (derive, invocation))); parent_scope.module.unresolved_invocations.borrow_mut().remove(&expansion);
let mut visitor = BuildReducedGraphVisitor { parent_scope.module.unresolved_invocations.borrow_mut().extend(derives);
r: self,
parent_scope: ParentScope { // Integrate the new AST fragment into all the definition and module structures.
module: invocation.module, fragment.visit_with(&mut DefCollector::new(&mut self.definitions, expansion));
expansion: expn_id, let output_legacy_scope = self.build_reduced_graph(fragment, parent_scope);
legacy: invocation.parent_legacy_scope, self.output_legacy_scopes.insert(expansion, output_legacy_scope);
derives: Vec::new(),
},
};
fragment.visit_with(&mut visitor);
invocation.output_legacy_scope.set(Some(visitor.parent_scope.legacy));
} }
fn register_builtin_macro(&mut self, ident: ast::Ident, ext: SyntaxExtension) { fn register_builtin_macro(&mut self, ident: ast::Ident, ext: SyntaxExtension) {
@ -182,13 +149,14 @@ impl<'a> base::Resolver for Resolver<'a> {
fn resolve_macro_invocation(&mut self, invoc: &Invocation, invoc_id: ExpnId, force: bool) fn resolve_macro_invocation(&mut self, invoc: &Invocation, invoc_id: ExpnId, force: bool)
-> Result<Option<Lrc<SyntaxExtension>>, Indeterminate> { -> Result<Option<Lrc<SyntaxExtension>>, Indeterminate> {
let (path, kind, derives_in_scope, after_derive) = match invoc.kind { let parent_scope = self.invocation_parent_scopes[&invoc_id];
let (path, kind, derives, after_derive) = match invoc.kind {
InvocationKind::Attr { ref attr, ref derives, after_derive, .. } => InvocationKind::Attr { ref attr, ref derives, after_derive, .. } =>
(&attr.path, MacroKind::Attr, derives.clone(), after_derive), (&attr.path, MacroKind::Attr, self.arenas.alloc_ast_paths(derives), after_derive),
InvocationKind::Bang { ref mac, .. } => InvocationKind::Bang { ref mac, .. } =>
(&mac.path, MacroKind::Bang, Vec::new(), false), (&mac.path, MacroKind::Bang, &[][..], false),
InvocationKind::Derive { ref path, .. } => InvocationKind::Derive { ref path, .. } =>
(path, MacroKind::Derive, Vec::new(), false), (path, MacroKind::Derive, &[][..], false),
InvocationKind::DeriveContainer { ref derives, .. } => { InvocationKind::DeriveContainer { ref derives, .. } => {
// Block expansion of derives in the container until we know whether one of them // Block expansion of derives in the container until we know whether one of them
// is a built-in `Copy`. Skip the resolution if there's only one derive - either // is a built-in `Copy`. Skip the resolution if there's only one derive - either
@ -196,10 +164,9 @@ impl<'a> base::Resolver for Resolver<'a> {
// will automatically knows about itself. // will automatically knows about itself.
let mut result = Ok(None); let mut result = Ok(None);
if derives.len() > 1 { if derives.len() > 1 {
let parent_scope = &self.invoc_parent_scope(invoc_id, Vec::new());
for path in derives { for path in derives {
match self.resolve_macro_path(path, Some(MacroKind::Derive), match self.resolve_macro_path(path, Some(MacroKind::Derive),
parent_scope, true, force) { &parent_scope, true, force) {
Ok((Some(ref ext), _)) if ext.is_derive_copy => { Ok((Some(ref ext), _)) if ext.is_derive_copy => {
self.add_derives(invoc.expansion_data.id, SpecialDerives::COPY); self.add_derives(invoc.expansion_data.id, SpecialDerives::COPY);
return Ok(None); return Ok(None);
@ -213,11 +180,14 @@ impl<'a> base::Resolver for Resolver<'a> {
} }
}; };
let parent_scope = &self.invoc_parent_scope(invoc_id, derives_in_scope); // Derives are not included when `invocations` are collected, so we have to add them here.
let parent_scope = &ParentScope { derives, ..parent_scope };
let (ext, res) = self.smart_resolve_macro_path(path, kind, parent_scope, force)?; let (ext, res) = self.smart_resolve_macro_path(path, kind, parent_scope, force)?;
let span = invoc.span(); let span = invoc.span();
invoc.expansion_data.id.set_expn_info(ext.expn_info(span, fast_print_path(path))); invoc.expansion_data.id.set_expn_data(
ext.expn_data(parent_scope.expansion, span, fast_print_path(path))
);
if let Res::Def(_, def_id) = res { if let Res::Def(_, def_id) = res {
if after_derive { if after_derive {
@ -251,20 +221,6 @@ impl<'a> base::Resolver for Resolver<'a> {
} }
impl<'a> Resolver<'a> { impl<'a> Resolver<'a> {
pub fn dummy_parent_scope(&self) -> ParentScope<'a> {
self.invoc_parent_scope(ExpnId::root(), Vec::new())
}
fn invoc_parent_scope(&self, invoc_id: ExpnId, derives: Vec<ast::Path>) -> ParentScope<'a> {
let invoc = self.invocations[&invoc_id];
ParentScope {
module: invoc.module.nearest_item_scope(),
expansion: invoc_id.parent(),
legacy: invoc.parent_legacy_scope,
derives,
}
}
/// Resolve macro path with error reporting and recovery. /// Resolve macro path with error reporting and recovery.
fn smart_resolve_macro_path( fn smart_resolve_macro_path(
&mut self, &mut self,
@ -346,8 +302,7 @@ impl<'a> Resolver<'a> {
// Possibly apply the macro helper hack // Possibly apply the macro helper hack
if kind == Some(MacroKind::Bang) && path.len() == 1 && if kind == Some(MacroKind::Bang) && path.len() == 1 &&
path[0].ident.span.ctxt().outer_expn_info() path[0].ident.span.ctxt().outer_expn_data().local_inner_macros {
.map_or(false, |info| info.local_inner_macros) {
let root = Ident::new(kw::DollarCrate, path[0].ident.span); let root = Ident::new(kw::DollarCrate, path[0].ident.span);
path.insert(0, Segment::from_ident(root)); path.insert(0, Segment::from_ident(root));
} }
@ -367,8 +322,8 @@ impl<'a> Resolver<'a> {
if trace { if trace {
let kind = kind.expect("macro kind must be specified if tracing is enabled"); let kind = kind.expect("macro kind must be specified if tracing is enabled");
parent_scope.module.multi_segment_macro_resolutions.borrow_mut() self.multi_segment_macro_resolutions
.push((path, path_span, kind, parent_scope.clone(), res.ok())); .push((path, path_span, kind, *parent_scope, res.ok()));
} }
self.prohibit_imported_non_macro_attrs(None, res.ok(), path_span); self.prohibit_imported_non_macro_attrs(None, res.ok(), path_span);
@ -384,8 +339,8 @@ impl<'a> Resolver<'a> {
if trace { if trace {
let kind = kind.expect("macro kind must be specified if tracing is enabled"); let kind = kind.expect("macro kind must be specified if tracing is enabled");
parent_scope.module.single_segment_macro_resolutions.borrow_mut() self.single_segment_macro_resolutions
.push((path[0].ident, kind, parent_scope.clone(), binding.ok())); .push((path[0].ident, kind, *parent_scope, binding.ok()));
} }
let res = binding.map(|binding| binding.res()); let res = binding.map(|binding| binding.res());
@ -454,8 +409,8 @@ impl<'a> Resolver<'a> {
let result = match scope { let result = match scope {
Scope::DeriveHelpers => { Scope::DeriveHelpers => {
let mut result = Err(Determinacy::Determined); let mut result = Err(Determinacy::Determined);
for derive in &parent_scope.derives { for derive in parent_scope.derives {
let parent_scope = &ParentScope { derives: Vec::new(), ..*parent_scope }; let parent_scope = &ParentScope { derives: &[], ..*parent_scope };
match this.resolve_macro_path(derive, Some(MacroKind::Derive), match this.resolve_macro_path(derive, Some(MacroKind::Derive),
parent_scope, true, force) { parent_scope, true, force) {
Ok((Some(ext), _)) => if ext.helper_attrs.contains(&ident.name) { Ok((Some(ext), _)) => if ext.helper_attrs.contains(&ident.name) {
@ -475,8 +430,9 @@ impl<'a> Resolver<'a> {
Scope::MacroRules(legacy_scope) => match legacy_scope { Scope::MacroRules(legacy_scope) => match legacy_scope {
LegacyScope::Binding(legacy_binding) if ident == legacy_binding.ident => LegacyScope::Binding(legacy_binding) if ident == legacy_binding.ident =>
Ok((legacy_binding.binding, Flags::MACRO_RULES)), Ok((legacy_binding.binding, Flags::MACRO_RULES)),
LegacyScope::Invocation(invoc) if invoc.output_legacy_scope.get().is_none() => LegacyScope::Invocation(invoc_id)
Err(Determinacy::Undetermined), if !this.output_legacy_scopes.contains_key(&invoc_id) =>
Err(Determinacy::Undetermined),
_ => Err(Determinacy::Determined), _ => Err(Determinacy::Determined),
} }
Scope::CrateRoot => { Scope::CrateRoot => {
@ -500,7 +456,7 @@ impl<'a> Resolver<'a> {
} }
} }
Scope::Module(module) => { Scope::Module(module) => {
let adjusted_parent_scope = &ParentScope { module, ..parent_scope.clone() }; let adjusted_parent_scope = &ParentScope { module, ..*parent_scope };
let binding = this.resolve_ident_in_module_unadjusted_ext( let binding = this.resolve_ident_in_module_unadjusted_ext(
ModuleOrUniformRoot::Module(module), ModuleOrUniformRoot::Module(module),
ident, ident,
@ -575,7 +531,7 @@ impl<'a> Resolver<'a> {
false, false,
path_span, path_span,
) { ) {
if use_prelude || this.is_builtin_macro(binding.res().opt_def_id()) { if use_prelude || this.is_builtin_macro(binding.res()) {
result = Ok((binding, Flags::PRELUDE | Flags::MISC_FROM_PRELUDE)); result = Ok((binding, Flags::PRELUDE | Flags::MISC_FROM_PRELUDE));
} }
} }
@ -694,7 +650,7 @@ impl<'a> Resolver<'a> {
} }
} }
pub fn finalize_current_module_macro_resolutions(&mut self, module: Module<'a>) { crate fn finalize_macro_resolutions(&mut self) {
let check_consistency = |this: &mut Self, path: &[Segment], span, kind: MacroKind, let check_consistency = |this: &mut Self, path: &[Segment], span, kind: MacroKind,
initial_res: Option<Res>, res: Res| { initial_res: Option<Res>, res: Res| {
if let Some(initial_res) = initial_res { if let Some(initial_res) = initial_res {
@ -730,8 +686,7 @@ impl<'a> Resolver<'a> {
} }
}; };
let macro_resolutions = let macro_resolutions = mem::take(&mut self.multi_segment_macro_resolutions);
mem::take(&mut *module.multi_segment_macro_resolutions.borrow_mut());
for (mut path, path_span, kind, parent_scope, initial_res) in macro_resolutions { for (mut path, path_span, kind, parent_scope, initial_res) in macro_resolutions {
// FIXME: Path resolution will ICE if segment IDs present. // FIXME: Path resolution will ICE if segment IDs present.
for seg in &mut path { seg.id = None; } for seg in &mut path { seg.id = None; }
@ -758,8 +713,7 @@ impl<'a> Resolver<'a> {
} }
} }
let macro_resolutions = let macro_resolutions = mem::take(&mut self.single_segment_macro_resolutions);
mem::take(&mut *module.single_segment_macro_resolutions.borrow_mut());
for (ident, kind, parent_scope, initial_binding) in macro_resolutions { for (ident, kind, parent_scope, initial_binding) in macro_resolutions {
match self.early_resolve_ident_in_lexical_scope(ident, ScopeSet::Macro(kind), match self.early_resolve_ident_in_lexical_scope(ident, ScopeSet::Macro(kind),
&parent_scope, true, true, ident.span) { &parent_scope, true, true, ident.span) {
@ -784,7 +738,7 @@ impl<'a> Resolver<'a> {
} }
} }
let builtin_attrs = mem::take(&mut *module.builtin_attrs.borrow_mut()); let builtin_attrs = mem::take(&mut self.builtin_attrs);
for (ident, parent_scope) in builtin_attrs { for (ident, parent_scope) in builtin_attrs {
let _ = self.early_resolve_ident_in_lexical_scope( let _ = self.early_resolve_ident_in_lexical_scope(
ident, ScopeSet::Macro(MacroKind::Attr), &parent_scope, true, true, ident.span ident, ScopeSet::Macro(MacroKind::Attr), &parent_scope, true, true, ident.span

View file

@ -1,3 +1,5 @@
//! A bunch of methods and structures more or less related to resolving imports.
use ImportDirectiveSubclass::*; use ImportDirectiveSubclass::*;
use crate::{AmbiguityError, AmbiguityKind, AmbiguityErrorMisc}; use crate::{AmbiguityError, AmbiguityKind, AmbiguityErrorMisc};
@ -394,7 +396,7 @@ impl<'a> Resolver<'a> {
match ident.span.glob_adjust(module.expansion, glob_import.span) { match ident.span.glob_adjust(module.expansion, glob_import.span) {
Some(Some(def)) => { Some(Some(def)) => {
tmp_parent_scope = tmp_parent_scope =
ParentScope { module: self.macro_def_scope(def), ..parent_scope.clone() }; ParentScope { module: self.macro_def_scope(def), ..*parent_scope };
adjusted_parent_scope = &tmp_parent_scope; adjusted_parent_scope = &tmp_parent_scope;
} }
Some(None) => {} Some(None) => {}
@ -848,7 +850,7 @@ impl<'a, 'b> ImportResolver<'a, 'b> {
directive.vis.set(orig_vis); directive.vis.set(orig_vis);
let module = match path_res { let module = match path_res {
PathResult::Module(module) => { PathResult::Module(module) => {
// Consistency checks, analogous to `finalize_current_module_macro_resolutions`. // Consistency checks, analogous to `finalize_macro_resolutions`.
if let Some(initial_module) = directive.imported_module.get() { if let Some(initial_module) = directive.imported_module.get() {
if !ModuleOrUniformRoot::same_def(module, initial_module) && no_ambiguity { if !ModuleOrUniformRoot::same_def(module, initial_module) && no_ambiguity {
span_bug!(directive.span, "inconsistent resolution for an import"); span_bug!(directive.span, "inconsistent resolution for an import");
@ -973,7 +975,7 @@ impl<'a, 'b> ImportResolver<'a, 'b> {
match binding { match binding {
Ok(binding) => { Ok(binding) => {
// Consistency checks, analogous to `finalize_current_module_macro_resolutions`. // Consistency checks, analogous to `finalize_macro_resolutions`.
let initial_res = source_bindings[ns].get().map(|initial_binding| { let initial_res = source_bindings[ns].get().map(|initial_binding| {
all_ns_err = false; all_ns_err = false;
if let Some(target_binding) = target_bindings[ns].get() { if let Some(target_binding) = target_bindings[ns].get() {

View file

@ -1156,7 +1156,7 @@ fn escape(s: String) -> String {
// Helper function to determine if a span came from a // Helper function to determine if a span came from a
// macro expansion or syntax extension. // macro expansion or syntax extension.
fn generated_code(span: Span) -> bool { fn generated_code(span: Span) -> bool {
span.ctxt() != NO_EXPANSION || span.is_dummy() span.from_expansion() || span.is_dummy()
} }
// DefId::index is a newtype and so the JSON serialisation is ugly. Therefore // DefId::index is a newtype and so the JSON serialisation is ugly. Therefore

View file

@ -347,9 +347,9 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
sp, sp,
); );
// Check the `expn_info()` to see if this is a macro; if so, it's hard to // If the span is from a macro, then it's hard to extract the text
// extract the text and make a good suggestion, so don't bother. // and make a good suggestion, so don't bother.
let is_macro = sp.ctxt().outer_expn_info().is_some(); let is_macro = sp.from_expansion();
match (&expr.node, &expected.sty, &checked_ty.sty) { match (&expr.node, &expected.sty, &checked_ty.sty) {
(_, &ty::Ref(_, exp, _), &ty::Ref(_, check, _)) => match (&exp.sty, &check.sty) { (_, &ty::Ref(_, exp, _), &ty::Ref(_, check, _)) => match (&exp.sty, &check.sty) {
@ -554,6 +554,10 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
// FIXME(estebank): modify once we decide to suggest `as` casts // FIXME(estebank): modify once we decide to suggest `as` casts
return false; return false;
} }
if !self.tcx.sess.source_map().span_to_filename(expr.span).is_real() {
// Ignore if span is from within a macro.
return false;
}
// If casting this expression to a given numeric type would be appropriate in case of a type // If casting this expression to a given numeric type would be appropriate in case of a type
// mismatch. // mismatch.

View file

@ -24,6 +24,7 @@ use syntax::source_map::Span;
use syntax::util::lev_distance::find_best_match_for_name; use syntax::util::lev_distance::find_best_match_for_name;
use rustc::hir; use rustc::hir;
use rustc::hir::{ExprKind, QPath}; use rustc::hir::{ExprKind, QPath};
use rustc::hir::def_id::DefId;
use rustc::hir::def::{CtorKind, Res, DefKind}; use rustc::hir::def::{CtorKind, Res, DefKind};
use rustc::hir::ptr::P; use rustc::hir::ptr::P;
use rustc::infer; use rustc::infer;
@ -1336,114 +1337,180 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
autoderef.unambiguous_final_ty(self); autoderef.unambiguous_final_ty(self);
if let Some((did, field_ty)) = private_candidate { if let Some((did, field_ty)) = private_candidate {
let struct_path = self.tcx().def_path_str(did); self.ban_private_field_access(expr, expr_t, field, did);
let mut err = struct_span_err!(self.tcx().sess, expr.span, E0616, return field_ty;
"field `{}` of struct `{}` is private",
field, struct_path);
// Also check if an accessible method exists, which is often what is meant.
if self.method_exists(field, expr_t, expr.hir_id, false)
&& !self.expr_in_place(expr.hir_id)
{
self.suggest_method_call(
&mut err,
&format!("a method `{}` also exists, call it with parentheses", field),
field,
expr_t,
expr.hir_id,
);
}
err.emit();
field_ty
} else if field.name == kw::Invalid {
self.tcx().types.err
} else if self.method_exists(field, expr_t, expr.hir_id, true) {
let mut err = type_error_struct!(self.tcx().sess, field.span, expr_t, E0615,
"attempted to take value of method `{}` on type `{}`",
field, expr_t);
if !self.expr_in_place(expr.hir_id) {
self.suggest_method_call(
&mut err,
"use parentheses to call the method",
field,
expr_t,
expr.hir_id
);
} else {
err.help("methods are immutable and cannot be assigned to");
}
err.emit();
self.tcx().types.err
} else {
if !expr_t.is_primitive_ty() {
let mut err = self.no_such_field_err(field.span, field, expr_t);
match expr_t.sty {
ty::Adt(def, _) if !def.is_enum() => {
if let Some(suggested_field_name) =
Self::suggest_field_name(def.non_enum_variant(),
&field.as_str(), vec![]) {
err.span_suggestion(
field.span,
"a field with a similar name exists",
suggested_field_name.to_string(),
Applicability::MaybeIncorrect,
);
} else {
err.span_label(field.span, "unknown field");
let struct_variant_def = def.non_enum_variant();
let field_names = self.available_field_names(struct_variant_def);
if !field_names.is_empty() {
err.note(&format!("available fields are: {}",
self.name_series_display(field_names)));
}
};
}
ty::Array(_, len) => {
if let (Some(len), Ok(user_index)) = (
len.try_eval_usize(self.tcx, self.param_env),
field.as_str().parse::<u64>()
) {
let base = self.tcx.sess.source_map()
.span_to_snippet(base.span)
.unwrap_or_else(|_|
self.tcx.hir().hir_to_pretty_string(base.hir_id));
let help = "instead of using tuple indexing, use array indexing";
let suggestion = format!("{}[{}]", base, field);
let applicability = if len < user_index {
Applicability::MachineApplicable
} else {
Applicability::MaybeIncorrect
};
err.span_suggestion(
expr.span, help, suggestion, applicability
);
}
}
ty::RawPtr(..) => {
let base = self.tcx.sess.source_map()
.span_to_snippet(base.span)
.unwrap_or_else(|_| self.tcx.hir().hir_to_pretty_string(base.hir_id));
let msg = format!("`{}` is a raw pointer; try dereferencing it", base);
let suggestion = format!("(*{}).{}", base, field);
err.span_suggestion(
expr.span,
&msg,
suggestion,
Applicability::MaybeIncorrect,
);
}
_ => {}
}
err
} else {
type_error_struct!(self.tcx().sess, field.span, expr_t, E0610,
"`{}` is a primitive type and therefore doesn't have fields",
expr_t)
}.emit();
self.tcx().types.err
} }
if field.name == kw::Invalid {
} else if self.method_exists(field, expr_t, expr.hir_id, true) {
self.ban_take_value_of_method(expr, expr_t, field);
} else if !expr_t.is_primitive_ty() {
let mut err = self.no_such_field_err(field.span, field, expr_t);
match expr_t.sty {
ty::Adt(def, _) if !def.is_enum() => {
self.suggest_fields_on_recordish(&mut err, def, field);
}
ty::Array(_, len) => {
self.maybe_suggest_array_indexing(&mut err, expr, base, field, len);
}
ty::RawPtr(..) => {
self.suggest_first_deref_field(&mut err, expr, base, field);
}
_ => {}
}
if field.name == kw::Await {
// We know by construction that `<expr>.await` is either on Rust 2015
// or results in `ExprKind::Await`. Suggest switching the edition to 2018.
err.note("to `.await` a `Future`, switch to Rust 2018");
err.help("set `edition = \"2018\"` in `Cargo.toml`");
err.note("for more on editions, read https://doc.rust-lang.org/edition-guide");
}
err.emit();
} else {
type_error_struct!(
self.tcx().sess,
field.span,
expr_t,
E0610,
"`{}` is a primitive type and therefore doesn't have fields",
expr_t
)
.emit();
}
self.tcx().types.err
}
fn ban_private_field_access(
&self,
expr: &hir::Expr,
expr_t: Ty<'tcx>,
field: ast::Ident,
base_did: DefId,
) {
let struct_path = self.tcx().def_path_str(base_did);
let mut err = struct_span_err!(
self.tcx().sess,
expr.span,
E0616,
"field `{}` of struct `{}` is private",
field,
struct_path
);
// Also check if an accessible method exists, which is often what is meant.
if self.method_exists(field, expr_t, expr.hir_id, false)
&& !self.expr_in_place(expr.hir_id)
{
self.suggest_method_call(
&mut err,
&format!("a method `{}` also exists, call it with parentheses", field),
field,
expr_t,
expr.hir_id,
);
}
err.emit();
}
fn ban_take_value_of_method(&self, expr: &hir::Expr, expr_t: Ty<'tcx>, field: ast::Ident) {
let mut err = type_error_struct!(
self.tcx().sess,
field.span,
expr_t,
E0615,
"attempted to take value of method `{}` on type `{}`",
field,
expr_t
);
if !self.expr_in_place(expr.hir_id) {
self.suggest_method_call(
&mut err,
"use parentheses to call the method",
field,
expr_t,
expr.hir_id
);
} else {
err.help("methods are immutable and cannot be assigned to");
}
err.emit();
}
fn suggest_fields_on_recordish(
&self,
err: &mut DiagnosticBuilder<'_>,
def: &'tcx ty::AdtDef,
field: ast::Ident,
) {
if let Some(suggested_field_name) =
Self::suggest_field_name(def.non_enum_variant(), &field.as_str(), vec![])
{
err.span_suggestion(
field.span,
"a field with a similar name exists",
suggested_field_name.to_string(),
Applicability::MaybeIncorrect,
);
} else {
err.span_label(field.span, "unknown field");
let struct_variant_def = def.non_enum_variant();
let field_names = self.available_field_names(struct_variant_def);
if !field_names.is_empty() {
err.note(&format!("available fields are: {}",
self.name_series_display(field_names)));
}
}
}
fn maybe_suggest_array_indexing(
&self,
err: &mut DiagnosticBuilder<'_>,
expr: &hir::Expr,
base: &hir::Expr,
field: ast::Ident,
len: &ty::Const<'tcx>,
) {
if let (Some(len), Ok(user_index)) = (
len.try_eval_usize(self.tcx, self.param_env),
field.as_str().parse::<u64>()
) {
let base = self.tcx.sess.source_map()
.span_to_snippet(base.span)
.unwrap_or_else(|_| self.tcx.hir().hir_to_pretty_string(base.hir_id));
let help = "instead of using tuple indexing, use array indexing";
let suggestion = format!("{}[{}]", base, field);
let applicability = if len < user_index {
Applicability::MachineApplicable
} else {
Applicability::MaybeIncorrect
};
err.span_suggestion(expr.span, help, suggestion, applicability);
}
}
fn suggest_first_deref_field(
&self,
err: &mut DiagnosticBuilder<'_>,
expr: &hir::Expr,
base: &hir::Expr,
field: ast::Ident,
) {
let base = self.tcx.sess.source_map()
.span_to_snippet(base.span)
.unwrap_or_else(|_| self.tcx.hir().hir_to_pretty_string(base.hir_id));
let msg = format!("`{}` is a raw pointer; try dereferencing it", base);
let suggestion = format!("(*{}).{}", base, field);
err.span_suggestion(
expr.span,
&msg,
suggestion,
Applicability::MaybeIncorrect,
);
} }
fn no_such_field_err<T: Display>(&self, span: Span, field: T, expr_t: &ty::TyS<'_>) fn no_such_field_err<T: Display>(&self, span: Span, field: T, expr_t: &ty::TyS<'_>)

View file

@ -985,7 +985,7 @@ impl hir::intravisit::Visitor<'tcx> for UsePlacementFinder<'tcx> {
hir::ItemKind::Use(..) => { hir::ItemKind::Use(..) => {
// Don't suggest placing a `use` before the prelude // Don't suggest placing a `use` before the prelude
// import or other generated ones. // import or other generated ones.
if item.span.ctxt().outer_expn_info().is_none() { if !item.span.from_expansion() {
self.span = Some(item.span.shrink_to_lo()); self.span = Some(item.span.shrink_to_lo());
self.found_use = true; self.found_use = true;
return; return;
@ -995,7 +995,7 @@ impl hir::intravisit::Visitor<'tcx> for UsePlacementFinder<'tcx> {
hir::ItemKind::ExternCrate(_) => {} hir::ItemKind::ExternCrate(_) => {}
// ...but do place them before the first other item. // ...but do place them before the first other item.
_ => if self.span.map_or(true, |span| item.span < span ) { _ => if self.span.map_or(true, |span| item.span < span ) {
if item.span.ctxt().outer_expn_info().is_none() { if !item.span.from_expansion() {
// Don't insert between attributes and an item. // Don't insert between attributes and an item.
if item.attrs.is_empty() { if item.attrs.is_empty() {
self.span = Some(item.span.shrink_to_lo()); self.span = Some(item.span.shrink_to_lo());

View file

@ -2943,7 +2943,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
(PlaceOp::Index, false) => (self.tcx.lang_items().index_trait(), sym::index), (PlaceOp::Index, false) => (self.tcx.lang_items().index_trait(), sym::index),
(PlaceOp::Index, true) => (self.tcx.lang_items().index_mut_trait(), sym::index_mut), (PlaceOp::Index, true) => (self.tcx.lang_items().index_mut_trait(), sym::index_mut),
}; };
(tr, ast::Ident::with_empty_ctxt(name)) (tr, ast::Ident::with_dummy_span(name))
} }
fn try_overloaded_place_op(&self, fn try_overloaded_place_op(&self,

View file

@ -930,7 +930,7 @@ impl Attributes {
if attr.check_name(sym::enable) { if attr.check_name(sym::enable) {
if let Some(feat) = attr.value_str() { if let Some(feat) = attr.value_str() {
let meta = attr::mk_name_value_item_str( let meta = attr::mk_name_value_item_str(
Ident::with_empty_ctxt(sym::target_feature), feat, DUMMY_SP Ident::with_dummy_span(sym::target_feature), feat, DUMMY_SP
); );
if let Ok(feat_cfg) = Cfg::parse(&meta) { if let Ok(feat_cfg) = Cfg::parse(&meta) {
cfg &= feat_cfg; cfg &= feat_cfg;

View file

@ -4,6 +4,7 @@ use rustc::hir::def_id::DefId;
use rustc::hir; use rustc::hir;
use rustc::lint as lint; use rustc::lint as lint;
use rustc::ty; use rustc::ty;
use rustc_resolve::ParentScope;
use syntax; use syntax;
use syntax::ast::{self, Ident}; use syntax::ast::{self, Ident};
use syntax::ext::base::SyntaxExtensionKind; use syntax::ext::base::SyntaxExtensionKind;
@ -431,7 +432,7 @@ fn macro_resolve(cx: &DocContext<'_>, path_str: &str) -> Option<Res> {
let path = ast::Path::from_ident(Ident::from_str(path_str)); let path = ast::Path::from_ident(Ident::from_str(path_str));
cx.enter_resolver(|resolver| { cx.enter_resolver(|resolver| {
if let Ok((Some(ext), res)) = resolver.resolve_macro_path( if let Ok((Some(ext), res)) = resolver.resolve_macro_path(
&path, None, &resolver.dummy_parent_scope(), false, false &path, None, &ParentScope::module(resolver.graph_root), false, false
) { ) {
if let SyntaxExtensionKind::LegacyBang { .. } = ext.kind { if let SyntaxExtensionKind::LegacyBang { .. } = ext.kind {
return Some(res.map_id(|_| panic!("unexpected id"))); return Some(res.map_id(|_| panic!("unexpected id")));

View file

@ -327,7 +327,7 @@ impl Attribute {
if self.is_sugared_doc { if self.is_sugared_doc {
let comment = self.value_str().unwrap(); let comment = self.value_str().unwrap();
let meta = mk_name_value_item_str( let meta = mk_name_value_item_str(
Ident::with_empty_ctxt(sym::doc), Ident::with_dummy_span(sym::doc),
Symbol::intern(&strip_doc_comment_decoration(&comment.as_str())), Symbol::intern(&strip_doc_comment_decoration(&comment.as_str())),
DUMMY_SP, DUMMY_SP,
); );
@ -412,7 +412,7 @@ pub fn mk_sugared_doc_attr(text: Symbol, span: Span) -> Attribute {
Attribute { Attribute {
id: mk_attr_id(), id: mk_attr_id(),
style, style,
path: Path::from_ident(Ident::with_empty_ctxt(sym::doc).with_span_pos(span)), path: Path::from_ident(Ident::with_dummy_span(sym::doc).with_span_pos(span)),
tokens: MetaItemKind::NameValue(lit).tokens(span), tokens: MetaItemKind::NameValue(lit).tokens(span),
is_sugared_doc: true, is_sugared_doc: true,
span, span,

View file

@ -172,7 +172,7 @@ pub fn expand_build_diagnostic_array<'cx>(ecx: &'cx mut ExtCtxt<'_>,
(descriptions.len(), ecx.expr_vec(span, descriptions)) (descriptions.len(), ecx.expr_vec(span, descriptions))
}); });
let static_ = ecx.lifetime(span, Ident::with_empty_ctxt(kw::StaticLifetime)); let static_ = ecx.lifetime(span, Ident::with_dummy_span(kw::StaticLifetime));
let ty_str = ecx.ty_rptr( let ty_str = ecx.ty_rptr(
span, span,
ecx.ty_ident(span, ecx.ident_of("str")), ecx.ty_ident(span, ecx.ident_of("str")),

View file

@ -15,7 +15,7 @@ use crate::tokenstream::{self, TokenStream, TokenTree};
use errors::{DiagnosticBuilder, DiagnosticId}; use errors::{DiagnosticBuilder, DiagnosticId};
use smallvec::{smallvec, SmallVec}; use smallvec::{smallvec, SmallVec};
use syntax_pos::{FileName, Span, MultiSpan, DUMMY_SP}; use syntax_pos::{FileName, Span, MultiSpan, DUMMY_SP};
use syntax_pos::hygiene::{ExpnInfo, ExpnKind}; use syntax_pos::hygiene::{ExpnData, ExpnKind};
use rustc_data_structures::fx::FxHashMap; use rustc_data_structures::fx::FxHashMap;
use rustc_data_structures::sync::{self, Lrc}; use rustc_data_structures::sync::{self, Lrc};
@ -640,10 +640,11 @@ impl SyntaxExtension {
SyntaxExtension::default(SyntaxExtensionKind::NonMacroAttr { mark_used }, edition) SyntaxExtension::default(SyntaxExtensionKind::NonMacroAttr { mark_used }, edition)
} }
pub fn expn_info(&self, call_site: Span, descr: Symbol) -> ExpnInfo { pub fn expn_data(&self, parent: ExpnId, call_site: Span, descr: Symbol) -> ExpnData {
ExpnInfo { ExpnData {
call_site,
kind: ExpnKind::Macro(self.macro_kind(), descr), kind: ExpnKind::Macro(self.macro_kind(), descr),
parent,
call_site,
def_site: self.span, def_site: self.span,
default_transparency: self.default_transparency, default_transparency: self.default_transparency,
allow_internal_unstable: self.allow_internal_unstable.clone(), allow_internal_unstable: self.allow_internal_unstable.clone(),
@ -707,7 +708,7 @@ pub struct ExpansionData {
/// One of these is made during expansion and incrementally updated as we go; /// One of these is made during expansion and incrementally updated as we go;
/// when a macro expansion occurs, the resulting nodes have the `backtrace() /// when a macro expansion occurs, the resulting nodes have the `backtrace()
/// -> expn_info` of their expansion context stored into their span. /// -> expn_data` of their expansion context stored into their span.
pub struct ExtCtxt<'a> { pub struct ExtCtxt<'a> {
pub parse_sess: &'a parse::ParseSess, pub parse_sess: &'a parse::ParseSess,
pub ecfg: expand::ExpansionConfig<'a>, pub ecfg: expand::ExpansionConfig<'a>,
@ -756,13 +757,10 @@ impl<'a> ExtCtxt<'a> {
pub fn parse_sess(&self) -> &'a parse::ParseSess { self.parse_sess } pub fn parse_sess(&self) -> &'a parse::ParseSess { self.parse_sess }
pub fn cfg(&self) -> &ast::CrateConfig { &self.parse_sess.config } pub fn cfg(&self) -> &ast::CrateConfig { &self.parse_sess.config }
pub fn call_site(&self) -> Span { pub fn call_site(&self) -> Span {
match self.current_expansion.id.expn_info() { self.current_expansion.id.expn_data().call_site
Some(expn_info) => expn_info.call_site,
None => DUMMY_SP,
}
} }
pub fn backtrace(&self) -> SyntaxContext { pub fn backtrace(&self) -> SyntaxContext {
SyntaxContext::empty().apply_mark(self.current_expansion.id) SyntaxContext::root().apply_mark(self.current_expansion.id)
} }
/// Returns span for the macro which originally caused the current expansion to happen. /// Returns span for the macro which originally caused the current expansion to happen.
@ -772,17 +770,13 @@ impl<'a> ExtCtxt<'a> {
let mut ctxt = self.backtrace(); let mut ctxt = self.backtrace();
let mut last_macro = None; let mut last_macro = None;
loop { loop {
if ctxt.outer_expn_info().map_or(None, |info| { let expn_data = ctxt.outer_expn_data();
if info.kind.descr() == sym::include { // Stop going up the backtrace once include! is encountered
// Stop going up the backtrace once include! is encountered if expn_data.is_root() || expn_data.kind.descr() == sym::include {
return None; break;
}
ctxt = info.call_site.ctxt();
last_macro = Some(info.call_site);
Some(())
}).is_none() {
break
} }
ctxt = expn_data.call_site.ctxt();
last_macro = Some(expn_data.call_site);
} }
last_macro last_macro
} }
@ -872,7 +866,7 @@ impl<'a> ExtCtxt<'a> {
pub fn std_path(&self, components: &[Symbol]) -> Vec<ast::Ident> { pub fn std_path(&self, components: &[Symbol]) -> Vec<ast::Ident> {
let def_site = DUMMY_SP.apply_mark(self.current_expansion.id); let def_site = DUMMY_SP.apply_mark(self.current_expansion.id);
iter::once(Ident::new(kw::DollarCrate, def_site)) iter::once(Ident::new(kw::DollarCrate, def_site))
.chain(components.iter().map(|&s| Ident::with_empty_ctxt(s))) .chain(components.iter().map(|&s| Ident::with_dummy_span(s)))
.collect() .collect()
} }
pub fn name_of(&self, st: &str) -> ast::Name { pub fn name_of(&self, st: &str) -> ast::Name {

View file

@ -340,7 +340,7 @@ impl<'a> ExtCtxt<'a> {
self.expr_path(self.path_ident(span, id)) self.expr_path(self.path_ident(span, id))
} }
pub fn expr_self(&self, span: Span) -> P<ast::Expr> { pub fn expr_self(&self, span: Span) -> P<ast::Expr> {
self.expr_ident(span, Ident::with_empty_ctxt(kw::SelfLower)) self.expr_ident(span, Ident::with_dummy_span(kw::SelfLower))
} }
pub fn expr_binary(&self, sp: Span, op: ast::BinOpKind, pub fn expr_binary(&self, sp: Span, op: ast::BinOpKind,

View file

@ -5,7 +5,7 @@ use crate::source_map::respan;
use crate::config::StripUnconfigured; use crate::config::StripUnconfigured;
use crate::ext::base::*; use crate::ext::base::*;
use crate::ext::proc_macro::collect_derives; use crate::ext::proc_macro::collect_derives;
use crate::ext::hygiene::{ExpnId, SyntaxContext, ExpnInfo, ExpnKind}; use crate::ext::hygiene::{ExpnId, SyntaxContext, ExpnData, ExpnKind};
use crate::ext::tt::macro_rules::annotate_err_with_kind; use crate::ext::tt::macro_rules::annotate_err_with_kind;
use crate::ext::placeholders::{placeholder, PlaceholderExpander}; use crate::ext::placeholders::{placeholder, PlaceholderExpander};
use crate::feature_gate::{self, Features, GateIssue, is_builtin_attr, emit_feature_err}; use crate::feature_gate::{self, Features, GateIssue, is_builtin_attr, emit_feature_err};
@ -353,7 +353,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
derives.reserve(traits.len()); derives.reserve(traits.len());
invocations.reserve(traits.len()); invocations.reserve(traits.len());
for path in traits { for path in traits {
let expn_id = ExpnId::fresh(self.cx.current_expansion.id, None); let expn_id = ExpnId::fresh(None);
derives.push(expn_id); derives.push(expn_id);
invocations.push(Invocation { invocations.push(Invocation {
kind: InvocationKind::Derive { path, item: item.clone() }, kind: InvocationKind::Derive { path, item: item.clone() },
@ -475,11 +475,11 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
} }
if self.cx.current_expansion.depth > self.cx.ecfg.recursion_limit { if self.cx.current_expansion.depth > self.cx.ecfg.recursion_limit {
let info = self.cx.current_expansion.id.expn_info().unwrap(); let expn_data = self.cx.current_expansion.id.expn_data();
let suggested_limit = self.cx.ecfg.recursion_limit * 2; let suggested_limit = self.cx.ecfg.recursion_limit * 2;
let mut err = self.cx.struct_span_err(info.call_site, let mut err = self.cx.struct_span_err(expn_data.call_site,
&format!("recursion limit reached while expanding the macro `{}`", &format!("recursion limit reached while expanding the macro `{}`",
info.kind.descr())); expn_data.kind.descr()));
err.help(&format!( err.help(&format!(
"consider adding a `#![recursion_limit=\"{}\"]` attribute to your crate", "consider adding a `#![recursion_limit=\"{}\"]` attribute to your crate",
suggested_limit)); suggested_limit));
@ -759,7 +759,7 @@ impl<'a> Parser<'a> {
let msg = format!("macro expansion ignores token `{}` and any following", let msg = format!("macro expansion ignores token `{}` and any following",
self.this_token_to_string()); self.this_token_to_string());
// Avoid emitting backtrace info twice. // Avoid emitting backtrace info twice.
let def_site_span = self.token.span.with_ctxt(SyntaxContext::empty()); let def_site_span = self.token.span.with_ctxt(SyntaxContext::root());
let mut err = self.diagnostic().struct_span_err(def_site_span, &msg); let mut err = self.diagnostic().struct_span_err(def_site_span, &msg);
err.span_label(span, "caused by the macro expansion here"); err.span_label(span, "caused by the macro expansion here");
let msg = format!( let msg = format!(
@ -796,17 +796,20 @@ struct InvocationCollector<'a, 'b> {
impl<'a, 'b> InvocationCollector<'a, 'b> { impl<'a, 'b> InvocationCollector<'a, 'b> {
fn collect(&mut self, fragment_kind: AstFragmentKind, kind: InvocationKind) -> AstFragment { fn collect(&mut self, fragment_kind: AstFragmentKind, kind: InvocationKind) -> AstFragment {
// Expansion info for all the collected invocations is set upon their resolution, // Expansion data for all the collected invocations is set upon their resolution,
// with exception of the derive container case which is not resolved and can get // with exception of the derive container case which is not resolved and can get
// its expansion info immediately. // its expansion data immediately.
let expn_info = match &kind { let expn_data = match &kind {
InvocationKind::DeriveContainer { item, .. } => Some(ExpnInfo::default( InvocationKind::DeriveContainer { item, .. } => Some(ExpnData {
ExpnKind::Macro(MacroKind::Attr, sym::derive), parent: self.cx.current_expansion.id,
item.span(), self.cx.parse_sess.edition, ..ExpnData::default(
)), ExpnKind::Macro(MacroKind::Attr, sym::derive),
item.span(), self.cx.parse_sess.edition,
)
}),
_ => None, _ => None,
}; };
let expn_id = ExpnId::fresh(self.cx.current_expansion.id, expn_info); let expn_id = ExpnId::fresh(expn_data);
self.invocations.push(Invocation { self.invocations.push(Invocation {
kind, kind,
fragment_kind, fragment_kind,
@ -1249,21 +1252,21 @@ impl<'a, 'b> MutVisitor for InvocationCollector<'a, 'b> {
let include_info = vec![ let include_info = vec![
ast::NestedMetaItem::MetaItem( ast::NestedMetaItem::MetaItem(
attr::mk_name_value_item_str( attr::mk_name_value_item_str(
Ident::with_empty_ctxt(sym::file), Ident::with_dummy_span(sym::file),
file, file,
DUMMY_SP, DUMMY_SP,
), ),
), ),
ast::NestedMetaItem::MetaItem( ast::NestedMetaItem::MetaItem(
attr::mk_name_value_item_str( attr::mk_name_value_item_str(
Ident::with_empty_ctxt(sym::contents), Ident::with_dummy_span(sym::contents),
src_interned, src_interned,
DUMMY_SP, DUMMY_SP,
), ),
), ),
]; ];
let include_ident = Ident::with_empty_ctxt(sym::include); let include_ident = Ident::with_dummy_span(sym::include);
let item = attr::mk_list_item(include_ident, include_info); let item = attr::mk_list_item(include_ident, include_info);
items.push(ast::NestedMetaItem::MetaItem(item)); items.push(ast::NestedMetaItem::MetaItem(item));
} }
@ -1325,7 +1328,7 @@ impl<'a, 'b> MutVisitor for InvocationCollector<'a, 'b> {
} }
} }
let meta = attr::mk_list_item(Ident::with_empty_ctxt(sym::doc), items); let meta = attr::mk_list_item(Ident::with_dummy_span(sym::doc), items);
*at = attr::Attribute { *at = attr::Attribute {
span: at.span, span: at.span,
id: at.id, id: at.id,

View file

@ -362,10 +362,10 @@ pub(crate) struct Rustc<'a> {
impl<'a> Rustc<'a> { impl<'a> Rustc<'a> {
pub fn new(cx: &'a ExtCtxt<'_>) -> Self { pub fn new(cx: &'a ExtCtxt<'_>) -> Self {
// No way to determine def location for a proc macro right now, so use call location. // No way to determine def location for a proc macro right now, so use call location.
let location = cx.current_expansion.id.expn_info().unwrap().call_site; let location = cx.current_expansion.id.expn_data().call_site;
let to_span = |transparency| { let to_span = |transparency| {
location.with_ctxt( location.with_ctxt(
SyntaxContext::empty() SyntaxContext::root()
.apply_mark_with_transparency(cx.current_expansion.id, transparency), .apply_mark_with_transparency(cx.current_expansion.id, transparency),
) )
}; };
@ -677,7 +677,7 @@ impl server::Span for Rustc<'_> {
self.sess.source_map().lookup_char_pos(span.lo()).file self.sess.source_map().lookup_char_pos(span.lo()).file
} }
fn parent(&mut self, span: Self::Span) -> Option<Self::Span> { fn parent(&mut self, span: Self::Span) -> Option<Self::Span> {
span.ctxt().outer_expn_info().map(|i| i.call_site) span.parent()
} }
fn source(&mut self, span: Self::Span) -> Self::Span { fn source(&mut self, span: Self::Span) -> Self::Span {
span.source_callsite() span.source_callsite()

View file

@ -4,7 +4,7 @@ use crate::symbol::{sym, Symbol};
use crate::parse::unescape_error_reporting::{emit_unescape_error, push_escaped_char}; use crate::parse::unescape_error_reporting::{emit_unescape_error, push_escaped_char};
use errors::{FatalError, DiagnosticBuilder}; use errors::{FatalError, DiagnosticBuilder};
use syntax_pos::{BytePos, Pos, Span, NO_EXPANSION}; use syntax_pos::{BytePos, Pos, Span};
use rustc_lexer::Base; use rustc_lexer::Base;
use rustc_lexer::unescape; use rustc_lexer::unescape;
@ -84,7 +84,7 @@ impl<'a> StringReader<'a> {
fn mk_sp(&self, lo: BytePos, hi: BytePos) -> Span { fn mk_sp(&self, lo: BytePos, hi: BytePos) -> Span {
self.override_span.unwrap_or_else(|| Span::new(lo, hi, NO_EXPANSION)) self.override_span.unwrap_or_else(|| Span::with_root_ctxt(lo, hi))
} }
/// Returns the next token, including trivia like whitespace or comments. /// Returns the next token, including trivia like whitespace or comments.

View file

@ -1,41 +1,17 @@
use super::*; use super::*;
use crate::ast::CrateConfig;
use crate::symbol::Symbol; use crate::symbol::Symbol;
use crate::source_map::{SourceMap, FilePathMapping}; use crate::source_map::{SourceMap, FilePathMapping};
use crate::feature_gate::UnstableFeatures;
use crate::parse::token; use crate::parse::token;
use crate::diagnostics::plugin::ErrorMap;
use crate::with_default_globals; use crate::with_default_globals;
use std::io; use std::io;
use std::path::PathBuf; use std::path::PathBuf;
use syntax_pos::{BytePos, Span, NO_EXPANSION, edition::Edition}; use errors::{Handler, emitter::EmitterWriter};
use rustc_data_structures::fx::{FxHashSet, FxHashMap}; use syntax_pos::{BytePos, Span};
use rustc_data_structures::sync::{Lock, Once};
fn mk_sess(sm: Lrc<SourceMap>) -> ParseSess { fn mk_sess(sm: Lrc<SourceMap>) -> ParseSess {
let emitter = errors::emitter::EmitterWriter::new(Box::new(io::sink()), let emitter = EmitterWriter::new(Box::new(io::sink()), Some(sm.clone()), false, false, false);
Some(sm.clone()), ParseSess::with_span_handler(Handler::with_emitter(true, None, Box::new(emitter)), sm)
false,
false,
false);
ParseSess {
span_diagnostic: errors::Handler::with_emitter(true, None, Box::new(emitter)),
unstable_features: UnstableFeatures::from_environment(),
config: CrateConfig::default(),
included_mod_stack: Lock::new(Vec::new()),
source_map: sm,
missing_fragment_specifiers: Lock::new(FxHashSet::default()),
raw_identifier_spans: Lock::new(Vec::new()),
registered_diagnostics: Lock::new(ErrorMap::new()),
buffered_lints: Lock::new(vec![]),
edition: Edition::from_session(),
ambiguous_block_expr_parse: Lock::new(FxHashMap::default()),
param_attr_spans: Lock::new(Vec::new()),
let_chains_spans: Lock::new(Vec::new()),
async_closure_spans: Lock::new(Vec::new()),
injected_crate_name: Once::new(),
}
} }
// open a string reader for the given string // open a string reader for the given string
@ -61,7 +37,7 @@ fn t1() {
let tok1 = string_reader.next_token(); let tok1 = string_reader.next_token();
let tok2 = Token::new( let tok2 = Token::new(
mk_ident("fn"), mk_ident("fn"),
Span::new(BytePos(21), BytePos(23), NO_EXPANSION), Span::with_root_ctxt(BytePos(21), BytePos(23)),
); );
assert_eq!(tok1.kind, tok2.kind); assert_eq!(tok1.kind, tok2.kind);
assert_eq!(tok1.span, tok2.span); assert_eq!(tok1.span, tok2.span);
@ -71,7 +47,7 @@ fn t1() {
assert_eq!(string_reader.pos.clone(), BytePos(28)); assert_eq!(string_reader.pos.clone(), BytePos(28));
let tok4 = Token::new( let tok4 = Token::new(
mk_ident("main"), mk_ident("main"),
Span::new(BytePos(24), BytePos(28), NO_EXPANSION), Span::with_root_ctxt(BytePos(24), BytePos(28)),
); );
assert_eq!(tok3.kind, tok4.kind); assert_eq!(tok3.kind, tok4.kind);
assert_eq!(tok3.span, tok4.span); assert_eq!(tok3.span, tok4.span);

View file

@ -3,7 +3,7 @@
use super::StringReader; use super::StringReader;
use errors::{Applicability, DiagnosticBuilder}; use errors::{Applicability, DiagnosticBuilder};
use syntax_pos::{BytePos, Pos, Span, NO_EXPANSION, symbol::kw}; use syntax_pos::{BytePos, Pos, Span, symbol::kw};
use crate::parse::token; use crate::parse::token;
#[rustfmt::skip] // for line breaks #[rustfmt::skip] // for line breaks
@ -343,7 +343,7 @@ crate fn check_for_substitution<'a>(
None => return None, None => return None,
}; };
let span = Span::new(pos, pos + Pos::from_usize(ch.len_utf8()), NO_EXPANSION); let span = Span::with_root_ctxt(pos, pos + Pos::from_usize(ch.len_utf8()));
let (ascii_name, token) = match ASCII_ARRAY.iter().find(|&&(c, _, _)| c == ascii_char) { let (ascii_name, token) = match ASCII_ARRAY.iter().find(|&&(c, _, _)| c == ascii_char) {
Some((_ascii_char, ascii_name, token)) => (ascii_name, token), Some((_ascii_char, ascii_name, token)) => (ascii_name, token),
@ -362,10 +362,9 @@ crate fn check_for_substitution<'a>(
ascii_char, ascii_name ascii_char, ascii_name
); );
err.span_suggestion( err.span_suggestion(
Span::new( Span::with_root_ctxt(
pos, pos,
pos + Pos::from_usize('“'.len_utf8() + s.len() + '”'.len_utf8()), pos + Pos::from_usize('“'.len_utf8() + s.len() + '”'.len_utf8()),
NO_EXPANSION,
), ),
&msg, &msg,
format!("\"{}\"", s), format!("\"{}\"", s),

View file

@ -16,6 +16,7 @@ use errors::{Applicability, FatalError, Level, Handler, ColorConfig, Diagnostic,
use rustc_data_structures::sync::{Lrc, Lock, Once}; use rustc_data_structures::sync::{Lrc, Lock, Once};
use syntax_pos::{Span, SourceFile, FileName, MultiSpan}; use syntax_pos::{Span, SourceFile, FileName, MultiSpan};
use syntax_pos::edition::Edition; use syntax_pos::edition::Edition;
use syntax_pos::hygiene::ExpnId;
use rustc_data_structures::fx::{FxHashSet, FxHashMap}; use rustc_data_structures::fx::{FxHashSet, FxHashMap};
use std::borrow::Cow; use std::borrow::Cow;
@ -86,7 +87,7 @@ impl ParseSess {
included_mod_stack: Lock::new(vec![]), included_mod_stack: Lock::new(vec![]),
source_map, source_map,
buffered_lints: Lock::new(vec![]), buffered_lints: Lock::new(vec![]),
edition: Edition::from_session(), edition: ExpnId::root().expn_data().edition,
ambiguous_block_expr_parse: Lock::new(FxHashMap::default()), ambiguous_block_expr_parse: Lock::new(FxHashMap::default()),
param_attr_spans: Lock::new(Vec::new()), param_attr_spans: Lock::new(Vec::new()),
let_chains_spans: Lock::new(Vec::new()), let_chains_spans: Lock::new(Vec::new()),

View file

@ -13,7 +13,6 @@ mod generics;
use crate::ast::{self, AttrStyle, Attribute, Arg, BindingMode, StrStyle, SelfKind}; use crate::ast::{self, AttrStyle, Attribute, Arg, BindingMode, StrStyle, SelfKind};
use crate::ast::{FnDecl, Ident, IsAsync, MacDelimiter, Mutability, TyKind}; use crate::ast::{FnDecl, Ident, IsAsync, MacDelimiter, Mutability, TyKind};
use crate::ast::{Visibility, VisibilityKind, Unsafety, CrateSugar}; use crate::ast::{Visibility, VisibilityKind, Unsafety, CrateSugar};
use crate::ext::hygiene::SyntaxContext;
use crate::source_map::{self, respan}; use crate::source_map::{self, respan};
use crate::parse::{SeqSep, literal, token}; use crate::parse::{SeqSep, literal, token};
use crate::parse::lexer::UnmatchedBrace; use crate::parse::lexer::UnmatchedBrace;
@ -1101,7 +1100,7 @@ impl<'a> Parser<'a> {
crate fn process_potential_macro_variable(&mut self) { crate fn process_potential_macro_variable(&mut self) {
self.token = match self.token.kind { self.token = match self.token.kind {
token::Dollar if self.token.span.ctxt() != SyntaxContext::empty() && token::Dollar if self.token.span.from_expansion() &&
self.look_ahead(1, |t| t.is_ident()) => { self.look_ahead(1, |t| t.is_ident()) => {
self.bump(); self.bump();
let name = match self.token.kind { let name = match self.token.kind {

View file

@ -60,7 +60,7 @@ impl<'a> Parser<'a> {
// Record that we fetched the mod from an external file // Record that we fetched the mod from an external file
if warn { if warn {
let attr = attr::mk_attr_outer( let attr = attr::mk_attr_outer(
attr::mk_word_item(Ident::with_empty_ctxt(sym::warn_directory_ownership))); attr::mk_word_item(Ident::with_dummy_span(sym::warn_directory_ownership)));
attr::mark_known(&attr); attr::mark_known(&attr);
attrs.push(attr); attrs.push(attr);
} }

View file

@ -12,7 +12,7 @@ use crate::symbol::{kw, sym};
use crate::tests::{matches_codepattern, string_to_stream, with_error_checking_parse}; use crate::tests::{matches_codepattern, string_to_stream, with_error_checking_parse};
use crate::tokenstream::{DelimSpan, TokenTree, TokenStream}; use crate::tokenstream::{DelimSpan, TokenTree, TokenStream};
use crate::with_default_globals; use crate::with_default_globals;
use syntax_pos::{Span, BytePos, Pos, NO_EXPANSION}; use syntax_pos::{Span, BytePos, Pos};
use std::path::PathBuf; use std::path::PathBuf;
@ -27,7 +27,7 @@ fn parse_item_from_source_str(name: FileName, source: String, sess: &ParseSess)
// produce a syntax_pos::span // produce a syntax_pos::span
fn sp(a: u32, b: u32) -> Span { fn sp(a: u32, b: u32) -> Span {
Span::new(BytePos(a), BytePos(b), NO_EXPANSION) Span::with_root_ctxt(BytePos(a), BytePos(b))
} }
/// Parse a string, return an expr /// Parse a string, return an expr

View file

@ -123,13 +123,13 @@ pub fn print_crate<'a>(cm: &'a SourceMap,
// of the feature gate, so we fake them up here. // of the feature gate, so we fake them up here.
// #![feature(prelude_import)] // #![feature(prelude_import)]
let pi_nested = attr::mk_nested_word_item(ast::Ident::with_empty_ctxt(sym::prelude_import)); let pi_nested = attr::mk_nested_word_item(ast::Ident::with_dummy_span(sym::prelude_import));
let list = attr::mk_list_item(ast::Ident::with_empty_ctxt(sym::feature), vec![pi_nested]); let list = attr::mk_list_item(ast::Ident::with_dummy_span(sym::feature), vec![pi_nested]);
let fake_attr = attr::mk_attr_inner(list); let fake_attr = attr::mk_attr_inner(list);
s.print_attribute(&fake_attr); s.print_attribute(&fake_attr);
// #![no_std] // #![no_std]
let no_std_meta = attr::mk_word_item(ast::Ident::with_empty_ctxt(sym::no_std)); let no_std_meta = attr::mk_word_item(ast::Ident::with_dummy_span(sym::no_std));
let fake_attr = attr::mk_attr_inner(no_std_meta); let fake_attr = attr::mk_attr_inner(no_std_meta);
s.print_attribute(&fake_attr); s.print_attribute(&fake_attr);
} }

View file

@ -8,7 +8,7 @@
//! information, source code snippets, etc. //! information, source code snippets, etc.
pub use syntax_pos::*; pub use syntax_pos::*;
pub use syntax_pos::hygiene::{ExpnKind, ExpnInfo}; pub use syntax_pos::hygiene::{ExpnKind, ExpnData};
use rustc_data_structures::fx::FxHashMap; use rustc_data_structures::fx::FxHashMap;
use rustc_data_structures::stable_hasher::StableHasher; use rustc_data_structures::stable_hasher::StableHasher;
@ -29,14 +29,15 @@ mod tests;
/// Returns the span itself if it doesn't come from a macro expansion, /// Returns the span itself if it doesn't come from a macro expansion,
/// otherwise return the call site span up to the `enclosing_sp` by /// otherwise return the call site span up to the `enclosing_sp` by
/// following the `expn_info` chain. /// following the `expn_data` chain.
pub fn original_sp(sp: Span, enclosing_sp: Span) -> Span { pub fn original_sp(sp: Span, enclosing_sp: Span) -> Span {
let call_site1 = sp.ctxt().outer_expn_info().map(|ei| ei.call_site); let expn_data1 = sp.ctxt().outer_expn_data();
let call_site2 = enclosing_sp.ctxt().outer_expn_info().map(|ei| ei.call_site); let expn_data2 = enclosing_sp.ctxt().outer_expn_data();
match (call_site1, call_site2) { if expn_data1.is_root() ||
(None, _) => sp, !expn_data2.is_root() && expn_data1.call_site == expn_data2.call_site {
(Some(call_site1), Some(call_site2)) if call_site1 == call_site2 => sp, sp
(Some(call_site1), _) => original_sp(call_site1, enclosing_sp), } else {
original_sp(expn_data1.call_site, enclosing_sp)
} }
} }

View file

@ -91,7 +91,7 @@ fn t6() {
fn t7() { fn t7() {
// Test span_to_lines for a span ending at the end of source_file // Test span_to_lines for a span ending at the end of source_file
let sm = init_source_map(); let sm = init_source_map();
let span = Span::new(BytePos(12), BytePos(23), NO_EXPANSION); let span = Span::with_root_ctxt(BytePos(12), BytePos(23));
let file_lines = sm.span_to_lines(span).unwrap(); let file_lines = sm.span_to_lines(span).unwrap();
assert_eq!(file_lines.file.name, PathBuf::from("blork.rs").into()); assert_eq!(file_lines.file.name, PathBuf::from("blork.rs").into());
@ -107,7 +107,7 @@ fn span_from_selection(input: &str, selection: &str) -> Span {
assert_eq!(input.len(), selection.len()); assert_eq!(input.len(), selection.len());
let left_index = selection.find('~').unwrap() as u32; let left_index = selection.find('~').unwrap() as u32;
let right_index = selection.rfind('~').map(|x|x as u32).unwrap_or(left_index); let right_index = selection.rfind('~').map(|x|x as u32).unwrap_or(left_index);
Span::new(BytePos(left_index), BytePos(right_index + 1), NO_EXPANSION) Span::with_root_ctxt(BytePos(left_index), BytePos(right_index + 1))
} }
/// Tests span_to_snippet and span_to_lines for a span converting 3 /// Tests span_to_snippet and span_to_lines for a span converting 3
@ -137,7 +137,7 @@ fn span_to_snippet_and_lines_spanning_multiple_lines() {
fn t8() { fn t8() {
// Test span_to_snippet for a span ending at the end of source_file // Test span_to_snippet for a span ending at the end of source_file
let sm = init_source_map(); let sm = init_source_map();
let span = Span::new(BytePos(12), BytePos(23), NO_EXPANSION); let span = Span::with_root_ctxt(BytePos(12), BytePos(23));
let snippet = sm.span_to_snippet(span); let snippet = sm.span_to_snippet(span);
assert_eq!(snippet, Ok("second line".to_string())); assert_eq!(snippet, Ok("second line".to_string()));
@ -147,7 +147,7 @@ fn t8() {
fn t9() { fn t9() {
// Test span_to_str for a span ending at the end of source_file // Test span_to_str for a span ending at the end of source_file
let sm = init_source_map(); let sm = init_source_map();
let span = Span::new(BytePos(12), BytePos(23), NO_EXPANSION); let span = Span::with_root_ctxt(BytePos(12), BytePos(23));
let sstr = sm.span_to_string(span); let sstr = sm.span_to_string(span);
assert_eq!(sstr, "blork.rs:2:1: 2:12"); assert_eq!(sstr, "blork.rs:2:1: 2:12");
@ -198,10 +198,9 @@ impl SourceMapExtension for SourceMap {
let lo = hi + offset; let lo = hi + offset;
hi = lo + substring.len(); hi = lo + substring.len();
if i == n { if i == n {
let span = Span::new( let span = Span::with_root_ctxt(
BytePos(lo as u32 + file.start_pos.0), BytePos(lo as u32 + file.start_pos.0),
BytePos(hi as u32 + file.start_pos.0), BytePos(hi as u32 + file.start_pos.0),
NO_EXPANSION,
); );
assert_eq!(&self.span_to_snippet(span).unwrap()[..], assert_eq!(&self.span_to_snippet(span).unwrap()[..],
substring); substring);

View file

@ -9,7 +9,7 @@ use crate::with_default_globals;
use errors::emitter::EmitterWriter; use errors::emitter::EmitterWriter;
use errors::Handler; use errors::Handler;
use rustc_data_structures::sync::Lrc; use rustc_data_structures::sync::Lrc;
use syntax_pos::{BytePos, NO_EXPANSION, Span, MultiSpan}; use syntax_pos::{BytePos, Span, MultiSpan};
use std::io; use std::io;
use std::io::prelude::*; use std::io::prelude::*;
@ -169,7 +169,7 @@ fn make_span(file_text: &str, start: &Position, end: &Position) -> Span {
let start = make_pos(file_text, start); let start = make_pos(file_text, start);
let end = make_pos(file_text, end) + end.string.len(); // just after matching thing ends let end = make_pos(file_text, end) + end.string.len(); // just after matching thing ends
assert!(start <= end); assert!(start <= end);
Span::new(BytePos(start as u32), BytePos(end as u32), NO_EXPANSION) Span::with_root_ctxt(BytePos(start as u32), BytePos(end as u32))
} }
fn make_pos(file_text: &str, pos: &Position) -> usize { fn make_pos(file_text: &str, pos: &Position) -> usize {

View file

@ -3,14 +3,14 @@ use super::*;
use crate::ast::Name; use crate::ast::Name;
use crate::with_default_globals; use crate::with_default_globals;
use crate::tests::string_to_stream; use crate::tests::string_to_stream;
use syntax_pos::{Span, BytePos, NO_EXPANSION}; use syntax_pos::{Span, BytePos};
fn string_to_ts(string: &str) -> TokenStream { fn string_to_ts(string: &str) -> TokenStream {
string_to_stream(string.to_owned()) string_to_stream(string.to_owned())
} }
fn sp(a: u32, b: u32) -> Span { fn sp(a: u32, b: u32) -> Span {
Span::new(BytePos(a), BytePos(b), NO_EXPANSION) Span::with_root_ctxt(BytePos(a), BytePos(b))
} }
#[test] #[test]

View file

@ -35,7 +35,7 @@ pub fn expand_deriving_clone(cx: &mut ExtCtxt<'_>,
match annitem.node { match annitem.node {
ItemKind::Struct(_, Generics { ref params, .. }) | ItemKind::Struct(_, Generics { ref params, .. }) |
ItemKind::Enum(_, Generics { ref params, .. }) => { ItemKind::Enum(_, Generics { ref params, .. }) => {
let container_id = cx.current_expansion.id.parent(); let container_id = cx.current_expansion.id.expn_data().parent;
if cx.resolver.has_derives(container_id, SpecialDerives::COPY) && if cx.resolver.has_derives(container_id, SpecialDerives::COPY) &&
!params.iter().any(|param| match param.kind { !params.iter().any(|param| match param.kind {
ast::GenericParamKind::Type { .. } => true, ast::GenericParamKind::Type { .. } => true,
@ -129,7 +129,7 @@ fn cs_clone_shallow(name: &str,
if is_union { if is_union {
// let _: AssertParamIsCopy<Self>; // let _: AssertParamIsCopy<Self>;
let self_ty = let self_ty =
cx.ty_path(cx.path_ident(trait_span, ast::Ident::with_empty_ctxt(kw::SelfUpper))); cx.ty_path(cx.path_ident(trait_span, ast::Ident::with_dummy_span(kw::SelfUpper)));
assert_ty_bounds(cx, &mut stmts, self_ty, trait_span, "AssertParamIsCopy"); assert_ty_bounds(cx, &mut stmts, self_ty, trait_span, "AssertParamIsCopy");
} else { } else {
match *substr.fields { match *substr.fields {

View file

@ -13,7 +13,7 @@ pub fn expand_deriving_eq(cx: &mut ExtCtxt<'_>,
mitem: &MetaItem, mitem: &MetaItem,
item: &Annotatable, item: &Annotatable,
push: &mut dyn FnMut(Annotatable)) { push: &mut dyn FnMut(Annotatable)) {
cx.resolver.add_derives(cx.current_expansion.id.parent(), SpecialDerives::EQ); cx.resolver.add_derives(cx.current_expansion.id.expn_data().parent, SpecialDerives::EQ);
let inline = cx.meta_word(span, sym::inline); let inline = cx.meta_word(span, sym::inline);
let hidden = cx.meta_list_item_word(span, sym::hidden); let hidden = cx.meta_list_item_word(span, sym::hidden);

View file

@ -13,7 +13,7 @@ pub fn expand_deriving_partial_eq(cx: &mut ExtCtxt<'_>,
mitem: &MetaItem, mitem: &MetaItem,
item: &Annotatable, item: &Annotatable,
push: &mut dyn FnMut(Annotatable)) { push: &mut dyn FnMut(Annotatable)) {
cx.resolver.add_derives(cx.current_expansion.id.parent(), SpecialDerives::PARTIAL_EQ); cx.resolver.add_derives(cx.current_expansion.id.expn_data().parent, SpecialDerives::PARTIAL_EQ);
// structures are equal if all fields are equal, and non equal, if // structures are equal if all fields are equal, and non equal, if
// any fields are not equal or if the enum variants are different // any fields are not equal or if the enum variants are different

View file

@ -82,7 +82,7 @@ fn show_substructure(cx: &mut ExtCtxt<'_>, span: Span, substr: &Substructure<'_>
let expr = cx.expr_method_call(span, let expr = cx.expr_method_call(span,
builder_expr.clone(), builder_expr.clone(),
Ident::with_empty_ctxt(sym::field), Ident::with_dummy_span(sym::field),
vec![field]); vec![field]);
// Use `let _ = expr;` to avoid triggering the // Use `let _ = expr;` to avoid triggering the
@ -106,7 +106,7 @@ fn show_substructure(cx: &mut ExtCtxt<'_>, span: Span, substr: &Substructure<'_>
let field = cx.expr_addr_of(field.span, field); let field = cx.expr_addr_of(field.span, field);
let expr = cx.expr_method_call(span, let expr = cx.expr_method_call(span,
builder_expr.clone(), builder_expr.clone(),
Ident::with_empty_ctxt(sym::field), Ident::with_dummy_span(sym::field),
vec![name, field]); vec![name, field]);
stmts.push(stmt_let_undescore(cx, span, expr)); stmts.push(stmt_let_undescore(cx, span, expr));
} }

View file

@ -425,7 +425,7 @@ impl<'a> TraitDef<'a> {
return; return;
} }
}; };
let container_id = cx.current_expansion.id.parent(); let container_id = cx.current_expansion.id.expn_data().parent;
let is_always_copy = let is_always_copy =
cx.resolver.has_derives(container_id, SpecialDerives::COPY) && cx.resolver.has_derives(container_id, SpecialDerives::COPY) &&
has_no_type_params; has_no_type_params;
@ -928,7 +928,7 @@ impl<'a> MethodDef<'a> {
let args = { let args = {
let self_args = explicit_self.map(|explicit_self| { let self_args = explicit_self.map(|explicit_self| {
let ident = Ident::with_empty_ctxt(kw::SelfLower).with_span_pos(trait_.span); let ident = Ident::with_dummy_span(kw::SelfLower).with_span_pos(trait_.span);
ast::Arg::from_self(ThinVec::default(), explicit_self, ident) ast::Arg::from_self(ThinVec::default(), explicit_self, ident)
}); });
let nonself_args = arg_types.into_iter() let nonself_args = arg_types.into_iter()

View file

@ -23,13 +23,13 @@ pub fn expand_option_env<'cx>(cx: &'cx mut ExtCtxt<'_>,
let sp = sp.apply_mark(cx.current_expansion.id); let sp = sp.apply_mark(cx.current_expansion.id);
let e = match env::var(&*var.as_str()) { let e = match env::var(&*var.as_str()) {
Err(..) => { Err(..) => {
let lt = cx.lifetime(sp, Ident::with_empty_ctxt(kw::StaticLifetime)); let lt = cx.lifetime(sp, Ident::with_dummy_span(kw::StaticLifetime));
cx.expr_path(cx.path_all(sp, cx.expr_path(cx.path_all(sp,
true, true,
cx.std_path(&[sym::option, sym::Option, sym::None]), cx.std_path(&[sym::option, sym::Option, sym::None]),
vec![GenericArg::Type(cx.ty_rptr(sp, vec![GenericArg::Type(cx.ty_rptr(sp,
cx.ty_ident(sp, cx.ty_ident(sp,
Ident::with_empty_ctxt(sym::str)), Ident::with_dummy_span(sym::str)),
Some(lt), Some(lt),
ast::Mutability::Immutable))], ast::Mutability::Immutable))],
vec![])) vec![]))

View file

@ -29,7 +29,7 @@ pub fn expand(
}; };
// Generate a bunch of new items using the AllocFnFactory // Generate a bunch of new items using the AllocFnFactory
let span = item.span.with_ctxt(SyntaxContext::empty().apply_mark(ecx.current_expansion.id)); let span = item.span.with_ctxt(SyntaxContext::root().apply_mark(ecx.current_expansion.id));
let f = AllocFnFactory { let f = AllocFnFactory {
span, span,
kind: AllocatorKind::Global, kind: AllocatorKind::Global,
@ -44,7 +44,7 @@ pub fn expand(
let const_ty = ecx.ty(span, TyKind::Tup(Vec::new())); let const_ty = ecx.ty(span, TyKind::Tup(Vec::new()));
let const_body = ecx.expr_block(ecx.block(span, stmts)); let const_body = ecx.expr_block(ecx.block(span, stmts));
let const_item = let const_item =
ecx.item_const(span, Ident::with_empty_ctxt(kw::Underscore), const_ty, const_body); ecx.item_const(span, Ident::with_dummy_span(kw::Underscore), const_ty, const_body);
// Return the original item and the new methods. // Return the original item and the new methods.
vec![Annotatable::Item(item), Annotatable::Item(const_item)] vec![Annotatable::Item(item), Annotatable::Item(const_item)]
@ -120,7 +120,7 @@ impl AllocFnFactory<'_, '_> {
) -> P<Expr> { ) -> P<Expr> {
match *ty { match *ty {
AllocatorTy::Layout => { AllocatorTy::Layout => {
let usize = self.cx.path_ident(self.span, Ident::with_empty_ctxt(sym::usize)); let usize = self.cx.path_ident(self.span, Ident::with_dummy_span(sym::usize));
let ty_usize = self.cx.ty_path(usize); let ty_usize = self.cx.ty_path(usize);
let size = ident(); let size = ident();
let align = ident(); let align = ident();
@ -178,12 +178,12 @@ impl AllocFnFactory<'_, '_> {
} }
fn usize(&self) -> P<Ty> { fn usize(&self) -> P<Ty> {
let usize = self.cx.path_ident(self.span, Ident::with_empty_ctxt(sym::usize)); let usize = self.cx.path_ident(self.span, Ident::with_dummy_span(sym::usize));
self.cx.ty_path(usize) self.cx.ty_path(usize)
} }
fn ptr_u8(&self) -> P<Ty> { fn ptr_u8(&self) -> P<Ty> {
let u8 = self.cx.path_ident(self.span, Ident::with_empty_ctxt(sym::u8)); let u8 = self.cx.path_ident(self.span, Ident::with_dummy_span(sym::u8));
let ty_u8 = self.cx.ty_path(u8); let ty_u8 = self.cx.ty_path(u8);
self.cx.ty_ptr(self.span, ty_u8, Mutability::Mutable) self.cx.ty_ptr(self.span, ty_u8, Mutability::Mutable)
} }

View file

@ -42,7 +42,7 @@ pub mod test_harness;
pub fn register_builtin_macros(resolver: &mut dyn syntax::ext::base::Resolver, edition: Edition) { pub fn register_builtin_macros(resolver: &mut dyn syntax::ext::base::Resolver, edition: Edition) {
let mut register = |name, kind| resolver.register_builtin_macro( let mut register = |name, kind| resolver.register_builtin_macro(
Ident::with_empty_ctxt(name), SyntaxExtension { Ident::with_dummy_span(name), SyntaxExtension {
is_builtin: true, ..SyntaxExtension::default(kind, edition) is_builtin: true, ..SyntaxExtension::default(kind, edition)
}, },
); );

View file

@ -11,7 +11,7 @@ use syntax::source_map::respan;
use syntax::symbol::sym; use syntax::symbol::sym;
use syntax::tokenstream::*; use syntax::tokenstream::*;
use syntax_pos::{Span, DUMMY_SP}; use syntax_pos::{Span, DUMMY_SP};
use syntax_pos::hygiene::{ExpnId, ExpnInfo, ExpnKind, MacroKind}; use syntax_pos::hygiene::{ExpnData, ExpnKind, MacroKind};
use std::mem; use std::mem;
@ -43,12 +43,12 @@ pub fn inject(
) { ) {
if !named_exts.is_empty() { if !named_exts.is_empty() {
let mut extra_items = Vec::new(); let mut extra_items = Vec::new();
let span = DUMMY_SP.fresh_expansion(ExpnId::root(), ExpnInfo::allow_unstable( let span = DUMMY_SP.fresh_expansion(ExpnData::allow_unstable(
ExpnKind::Macro(MacroKind::Attr, sym::plugin), DUMMY_SP, edition, ExpnKind::Macro(MacroKind::Attr, sym::plugin), DUMMY_SP, edition,
[sym::rustc_attrs][..].into(), [sym::rustc_attrs][..].into(),
)); ));
for (name, ext) in named_exts { for (name, ext) in named_exts {
resolver.register_builtin_macro(Ident::with_empty_ctxt(name), ext); resolver.register_builtin_macro(Ident::with_dummy_span(name), ext);
extra_items.push(plugin_macro_def(name, span)); extra_items.push(plugin_macro_def(name, span));
} }
// The `macro_rules` items must be inserted before any other items. // The `macro_rules` items must be inserted before any other items.

View file

@ -3,10 +3,9 @@ use std::mem;
use smallvec::smallvec; use smallvec::smallvec;
use syntax::ast::{self, Ident}; use syntax::ast::{self, Ident};
use syntax::attr; use syntax::attr;
use syntax::source_map::{ExpnInfo, ExpnKind, respan}; use syntax::source_map::{ExpnData, ExpnKind, respan};
use syntax::ext::base::{ExtCtxt, MacroKind}; use syntax::ext::base::{ExtCtxt, MacroKind};
use syntax::ext::expand::{AstFragment, ExpansionConfig}; use syntax::ext::expand::{AstFragment, ExpansionConfig};
use syntax::ext::hygiene::ExpnId;
use syntax::ext::proc_macro::is_proc_macro_attr; use syntax::ext::proc_macro::is_proc_macro_attr;
use syntax::parse::ParseSess; use syntax::parse::ParseSess;
use syntax::ptr::P; use syntax::ptr::P;
@ -328,7 +327,7 @@ fn mk_decls(
custom_attrs: &[ProcMacroDef], custom_attrs: &[ProcMacroDef],
custom_macros: &[ProcMacroDef], custom_macros: &[ProcMacroDef],
) -> P<ast::Item> { ) -> P<ast::Item> {
let span = DUMMY_SP.fresh_expansion(ExpnId::root(), ExpnInfo::allow_unstable( let span = DUMMY_SP.fresh_expansion(ExpnData::allow_unstable(
ExpnKind::Macro(MacroKind::Attr, sym::proc_macro), DUMMY_SP, cx.parse_sess.edition, ExpnKind::Macro(MacroKind::Attr, sym::proc_macro), DUMMY_SP, cx.parse_sess.edition,
[sym::rustc_attrs, sym::proc_macro_internals][..].into(), [sym::rustc_attrs, sym::proc_macro_internals][..].into(),
)); ));
@ -337,7 +336,7 @@ fn mk_decls(
let doc = cx.meta_list(span, sym::doc, vec![hidden]); let doc = cx.meta_list(span, sym::doc, vec![hidden]);
let doc_hidden = cx.attribute(doc); let doc_hidden = cx.attribute(doc);
let proc_macro = Ident::with_empty_ctxt(sym::proc_macro); let proc_macro = Ident::with_dummy_span(sym::proc_macro);
let krate = cx.item(span, let krate = cx.item(span,
proc_macro, proc_macro,
Vec::new(), Vec::new(),
@ -349,7 +348,7 @@ fn mk_decls(
let custom_derive = Ident::from_str("custom_derive"); let custom_derive = Ident::from_str("custom_derive");
let attr = Ident::from_str("attr"); let attr = Ident::from_str("attr");
let bang = Ident::from_str("bang"); let bang = Ident::from_str("bang");
let crate_kw = Ident::with_empty_ctxt(kw::Crate); let crate_kw = Ident::with_dummy_span(kw::Crate);
let decls = { let decls = {
let local_path = |sp: Span, name| { let local_path = |sp: Span, name| {

View file

@ -1,8 +1,8 @@
use syntax::{ast, attr}; use syntax::{ast, attr};
use syntax::edition::Edition; use syntax::edition::Edition;
use syntax::ext::hygiene::{ExpnId, MacroKind}; use syntax::ext::hygiene::MacroKind;
use syntax::ptr::P; use syntax::ptr::P;
use syntax::source_map::{ExpnInfo, ExpnKind, dummy_spanned, respan}; use syntax::source_map::{ExpnData, ExpnKind, dummy_spanned, respan};
use syntax::symbol::{Ident, Symbol, kw, sym}; use syntax::symbol::{Ident, Symbol, kw, sym};
use syntax_pos::DUMMY_SP; use syntax_pos::DUMMY_SP;
@ -32,7 +32,7 @@ pub fn inject(
// HACK(eddyb) gensym the injected crates on the Rust 2018 edition, // HACK(eddyb) gensym the injected crates on the Rust 2018 edition,
// so they don't accidentally interfere with the new import paths. // so they don't accidentally interfere with the new import paths.
let orig_name_sym = Symbol::intern(orig_name_str); let orig_name_sym = Symbol::intern(orig_name_str);
let orig_name_ident = Ident::with_empty_ctxt(orig_name_sym); let orig_name_ident = Ident::with_dummy_span(orig_name_sym);
let (rename, orig_name) = if rust_2018 { let (rename, orig_name) = if rust_2018 {
(orig_name_ident.gensym(), Some(orig_name_sym)) (orig_name_ident.gensym(), Some(orig_name_sym))
} else { } else {
@ -40,7 +40,7 @@ pub fn inject(
}; };
krate.module.items.insert(0, P(ast::Item { krate.module.items.insert(0, P(ast::Item {
attrs: vec![attr::mk_attr_outer( attrs: vec![attr::mk_attr_outer(
attr::mk_word_item(ast::Ident::with_empty_ctxt(sym::macro_use)) attr::mk_word_item(ast::Ident::with_dummy_span(sym::macro_use))
)], )],
vis: dummy_spanned(ast::VisibilityKind::Inherited), vis: dummy_spanned(ast::VisibilityKind::Inherited),
node: ast::ItemKind::ExternCrate(alt_std_name.or(orig_name)), node: ast::ItemKind::ExternCrate(alt_std_name.or(orig_name)),
@ -55,7 +55,7 @@ pub fn inject(
// the prelude. // the prelude.
let name = names[0]; let name = names[0];
let span = DUMMY_SP.fresh_expansion(ExpnId::root(), ExpnInfo::allow_unstable( let span = DUMMY_SP.fresh_expansion(ExpnData::allow_unstable(
ExpnKind::Macro(MacroKind::Attr, sym::std_inject), DUMMY_SP, edition, ExpnKind::Macro(MacroKind::Attr, sym::std_inject), DUMMY_SP, edition,
[sym::prelude_import][..].into(), [sym::prelude_import][..].into(),
)); ));
@ -66,7 +66,7 @@ pub fn inject(
vis: respan(span.shrink_to_lo(), ast::VisibilityKind::Inherited), vis: respan(span.shrink_to_lo(), ast::VisibilityKind::Inherited),
node: ast::ItemKind::Use(P(ast::UseTree { node: ast::ItemKind::Use(P(ast::UseTree {
prefix: ast::Path { prefix: ast::Path {
segments: iter::once(ast::Ident::with_empty_ctxt(kw::PathRoot)) segments: iter::once(ast::Ident::with_dummy_span(kw::PathRoot))
.chain( .chain(
[name, "prelude", "v1"].iter().cloned() [name, "prelude", "v1"].iter().cloned()
.map(ast::Ident::from_str) .map(ast::Ident::from_str)

View file

@ -29,7 +29,7 @@ pub fn expand_test_case(
if !ecx.ecfg.should_test { return vec![]; } if !ecx.ecfg.should_test { return vec![]; }
let sp = attr_sp.with_ctxt(SyntaxContext::empty().apply_mark(ecx.current_expansion.id)); let sp = attr_sp.with_ctxt(SyntaxContext::root().apply_mark(ecx.current_expansion.id));
let mut item = anno_item.expect_item(); let mut item = anno_item.expect_item();
item = item.map(|mut item| { item = item.map(|mut item| {
item.vis = respan(item.vis.span, ast::VisibilityKind::Public); item.vis = respan(item.vis.span, ast::VisibilityKind::Public);
@ -93,7 +93,7 @@ pub fn expand_test_or_bench(
return vec![Annotatable::Item(item)]; return vec![Annotatable::Item(item)];
} }
let ctxt = SyntaxContext::empty().apply_mark(cx.current_expansion.id); let ctxt = SyntaxContext::root().apply_mark(cx.current_expansion.id);
let (sp, attr_sp) = (item.span.with_ctxt(ctxt), attr_sp.with_ctxt(ctxt)); let (sp, attr_sp) = (item.span.with_ctxt(ctxt), attr_sp.with_ctxt(ctxt));
// Gensym "test" so we can extern crate without conflicting with any local names // Gensym "test" so we can extern crate without conflicting with any local names

View file

@ -5,14 +5,13 @@ use smallvec::{smallvec, SmallVec};
use syntax::ast::{self, Ident}; use syntax::ast::{self, Ident};
use syntax::attr; use syntax::attr;
use syntax::entry::{self, EntryPointType}; use syntax::entry::{self, EntryPointType};
use syntax::ext::base::{ExtCtxt, Resolver}; use syntax::ext::base::{ExtCtxt, MacroKind, Resolver};
use syntax::ext::expand::{AstFragment, ExpansionConfig}; use syntax::ext::expand::{AstFragment, ExpansionConfig};
use syntax::ext::hygiene::{ExpnId, MacroKind};
use syntax::feature_gate::Features; use syntax::feature_gate::Features;
use syntax::mut_visit::{*, ExpectOne}; use syntax::mut_visit::{*, ExpectOne};
use syntax::parse::ParseSess; use syntax::parse::ParseSess;
use syntax::ptr::P; use syntax::ptr::P;
use syntax::source_map::{ExpnInfo, ExpnKind, dummy_spanned}; use syntax::source_map::{ExpnData, ExpnKind, dummy_spanned};
use syntax::symbol::{kw, sym, Symbol}; use syntax::symbol::{kw, sym, Symbol};
use syntax_pos::{Span, DUMMY_SP}; use syntax_pos::{Span, DUMMY_SP};
@ -150,7 +149,7 @@ impl MutVisitor for EntryPointCleaner {
EntryPointType::MainAttr | EntryPointType::MainAttr |
EntryPointType::Start => EntryPointType::Start =>
item.map(|ast::Item {id, ident, attrs, node, vis, span, tokens}| { item.map(|ast::Item {id, ident, attrs, node, vis, span, tokens}| {
let allow_ident = Ident::with_empty_ctxt(sym::allow); let allow_ident = Ident::with_dummy_span(sym::allow);
let dc_nested = attr::mk_nested_word_item(Ident::from_str("dead_code")); let dc_nested = attr::mk_nested_word_item(Ident::from_str("dead_code"));
let allow_dead_code_item = attr::mk_list_item(allow_ident, vec![dc_nested]); let allow_dead_code_item = attr::mk_list_item(allow_ident, vec![dc_nested]);
let allow_dead_code = attr::mk_attr_outer(allow_dead_code_item); let allow_dead_code = attr::mk_attr_outer(allow_dead_code_item);
@ -191,7 +190,7 @@ fn mk_reexport_mod(cx: &mut TestCtxt<'_>,
tests: Vec<Ident>, tests: Vec<Ident>,
tested_submods: Vec<(Ident, Ident)>) tested_submods: Vec<(Ident, Ident)>)
-> (P<ast::Item>, Ident) { -> (P<ast::Item>, Ident) {
let super_ = Ident::with_empty_ctxt(kw::Super); let super_ = Ident::with_dummy_span(kw::Super);
let items = tests.into_iter().map(|r| { let items = tests.into_iter().map(|r| {
cx.ext_cx.item_use_simple(DUMMY_SP, dummy_spanned(ast::VisibilityKind::Public), cx.ext_cx.item_use_simple(DUMMY_SP, dummy_spanned(ast::VisibilityKind::Public),
@ -269,12 +268,12 @@ fn mk_main(cx: &mut TestCtxt<'_>) -> P<ast::Item> {
// #![main] // #![main]
// test::test_main_static(&[..tests]); // test::test_main_static(&[..tests]);
// } // }
let sp = DUMMY_SP.fresh_expansion(ExpnId::root(), ExpnInfo::allow_unstable( let sp = DUMMY_SP.fresh_expansion(ExpnData::allow_unstable(
ExpnKind::Macro(MacroKind::Attr, sym::test_case), DUMMY_SP, cx.ext_cx.parse_sess.edition, ExpnKind::Macro(MacroKind::Attr, sym::test_case), DUMMY_SP, cx.ext_cx.parse_sess.edition,
[sym::main, sym::test, sym::rustc_attrs][..].into(), [sym::main, sym::test, sym::rustc_attrs][..].into(),
)); ));
let ecx = &cx.ext_cx; let ecx = &cx.ext_cx;
let test_id = Ident::with_empty_ctxt(sym::test); let test_id = Ident::with_dummy_span(sym::test);
// test::test_main_static(...) // test::test_main_static(...)
let mut test_runner = cx.test_runner.clone().unwrap_or( let mut test_runner = cx.test_runner.clone().unwrap_or(

View file

@ -1,7 +1,6 @@
use crate::symbol::{Symbol, sym}; use crate::symbol::{Symbol, sym};
use std::fmt; use std::fmt;
use std::str::FromStr; use std::str::FromStr;
use crate::GLOBALS;
/// The edition of the compiler (RFC 2052) /// The edition of the compiler (RFC 2052)
#[derive(Clone, Copy, Hash, PartialEq, PartialOrd, Debug, RustcEncodable, RustcDecodable, Eq)] #[derive(Clone, Copy, Hash, PartialEq, PartialOrd, Debug, RustcEncodable, RustcDecodable, Eq)]
@ -39,10 +38,6 @@ impl fmt::Display for Edition {
} }
impl Edition { impl Edition {
pub fn from_session() -> Edition {
GLOBALS.with(|globals| globals.edition)
}
pub fn lint_name(&self) -> &'static str { pub fn lint_name(&self) -> &'static str {
match *self { match *self {
Edition::Edition2015 => "rust_2015_compatibility", Edition::Edition2015 => "rust_2015_compatibility",

View file

@ -13,8 +13,8 @@
// //
// This explains why `HygieneData`, `SyntaxContext` and `ExpnId` have interfaces // This explains why `HygieneData`, `SyntaxContext` and `ExpnId` have interfaces
// with a certain amount of redundancy in them. For example, // with a certain amount of redundancy in them. For example,
// `SyntaxContext::outer_expn_info` combines `SyntaxContext::outer` and // `SyntaxContext::outer_expn_data` combines `SyntaxContext::outer` and
// `ExpnId::expn_info` so that two `HygieneData` accesses can be performed within // `ExpnId::expn_data` so that two `HygieneData` accesses can be performed within
// a single `HygieneData::with` call. // a single `HygieneData::with` call.
// //
// It also explains why many functions appear in `HygieneData` and again in // It also explains why many functions appear in `HygieneData` and again in
@ -56,16 +56,6 @@ struct SyntaxContextData {
#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)] #[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
pub struct ExpnId(u32); pub struct ExpnId(u32);
// FIXME: Find a way to merge this with `ExpnInfo`.
#[derive(Debug)]
struct InternalExpnData {
parent: ExpnId,
/// Each expansion should have an associated expansion info, but sometimes there's a delay
/// between creation of an expansion ID and obtaining its info (e.g. macros are collected
/// first and then resolved later), so we use an `Option` here.
expn_info: Option<ExpnInfo>,
}
/// A property of a macro expansion that determines how identifiers /// A property of a macro expansion that determines how identifiers
/// produced by that expansion are resolved. /// produced by that expansion are resolved.
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Hash, Debug, RustcEncodable, RustcDecodable)] #[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Hash, Debug, RustcEncodable, RustcDecodable)]
@ -86,8 +76,8 @@ pub enum Transparency {
} }
impl ExpnId { impl ExpnId {
pub fn fresh(parent: ExpnId, expn_info: Option<ExpnInfo>) -> Self { pub fn fresh(expn_data: Option<ExpnData>) -> Self {
HygieneData::with(|data| data.fresh_expn(parent, expn_info)) HygieneData::with(|data| data.fresh_expn(expn_data))
} }
/// The ID of the theoretical expansion that generates freshly parsed, unexpanded AST. /// The ID of the theoretical expansion that generates freshly parsed, unexpanded AST.
@ -107,21 +97,16 @@ impl ExpnId {
} }
#[inline] #[inline]
pub fn parent(self) -> ExpnId { pub fn expn_data(self) -> ExpnData {
HygieneData::with(|data| data.parent_expn(self)) HygieneData::with(|data| data.expn_data(self).clone())
} }
#[inline] #[inline]
pub fn expn_info(self) -> Option<ExpnInfo> { pub fn set_expn_data(self, expn_data: ExpnData) {
HygieneData::with(|data| data.expn_info(self).cloned())
}
#[inline]
pub fn set_expn_info(self, info: ExpnInfo) {
HygieneData::with(|data| { HygieneData::with(|data| {
let old_info = &mut data.expn_data[self.0 as usize].expn_info; let old_expn_data = &mut data.expn_data[self.0 as usize];
assert!(old_info.is_none(), "expansion info is reset for an expansion ID"); assert!(old_expn_data.is_none(), "expansion data is reset for an expansion ID");
*old_info = Some(info); *old_expn_data = Some(expn_data);
}) })
} }
@ -139,12 +124,9 @@ impl ExpnId {
#[inline] #[inline]
pub fn looks_like_proc_macro_derive(self) -> bool { pub fn looks_like_proc_macro_derive(self) -> bool {
HygieneData::with(|data| { HygieneData::with(|data| {
if data.default_transparency(self) == Transparency::Opaque { let expn_data = data.expn_data(self);
if let Some(expn_info) = data.expn_info(self) { if let ExpnKind::Macro(MacroKind::Derive, _) = expn_data.kind {
if let ExpnKind::Macro(MacroKind::Derive, _) = expn_info.kind { return expn_data.default_transparency == Transparency::Opaque;
return true;
}
}
} }
false false
}) })
@ -153,7 +135,10 @@ impl ExpnId {
#[derive(Debug)] #[derive(Debug)]
crate struct HygieneData { crate struct HygieneData {
expn_data: Vec<InternalExpnData>, /// Each expansion should have an associated expansion data, but sometimes there's a delay
/// between creation of an expansion ID and obtaining its data (e.g. macros are collected
/// first and then resolved later), so we use an `Option` here.
expn_data: Vec<Option<ExpnData>>,
syntax_context_data: Vec<SyntaxContextData>, syntax_context_data: Vec<SyntaxContextData>,
syntax_context_map: FxHashMap<(SyntaxContext, ExpnId, Transparency), SyntaxContext>, syntax_context_map: FxHashMap<(SyntaxContext, ExpnId, Transparency), SyntaxContext>,
} }
@ -161,10 +146,7 @@ crate struct HygieneData {
impl HygieneData { impl HygieneData {
crate fn new(edition: Edition) -> Self { crate fn new(edition: Edition) -> Self {
HygieneData { HygieneData {
expn_data: vec![InternalExpnData { expn_data: vec![Some(ExpnData::default(ExpnKind::Root, DUMMY_SP, edition))],
parent: ExpnId::root(),
expn_info: Some(ExpnInfo::default(ExpnKind::Root, DUMMY_SP, edition)),
}],
syntax_context_data: vec![SyntaxContextData { syntax_context_data: vec![SyntaxContextData {
outer_expn: ExpnId::root(), outer_expn: ExpnId::root(),
outer_transparency: Transparency::Opaque, outer_transparency: Transparency::Opaque,
@ -181,25 +163,14 @@ impl HygieneData {
GLOBALS.with(|globals| f(&mut *globals.hygiene_data.borrow_mut())) GLOBALS.with(|globals| f(&mut *globals.hygiene_data.borrow_mut()))
} }
fn fresh_expn(&mut self, parent: ExpnId, expn_info: Option<ExpnInfo>) -> ExpnId { fn fresh_expn(&mut self, expn_data: Option<ExpnData>) -> ExpnId {
self.expn_data.push(InternalExpnData { parent, expn_info }); self.expn_data.push(expn_data);
ExpnId(self.expn_data.len() as u32 - 1) ExpnId(self.expn_data.len() as u32 - 1)
} }
fn parent_expn(&self, expn_id: ExpnId) -> ExpnId { fn expn_data(&self, expn_id: ExpnId) -> &ExpnData {
self.expn_data[expn_id.0 as usize].parent self.expn_data[expn_id.0 as usize].as_ref()
} .expect("no expansion data for an expansion ID")
fn expn_info(&self, expn_id: ExpnId) -> Option<&ExpnInfo> {
if expn_id != ExpnId::root() {
Some(self.expn_data[expn_id.0 as usize].expn_info.as_ref()
.expect("no expansion info for an expansion ID"))
} else {
// FIXME: Some code relies on `expn_info().is_none()` meaning "no expansion".
// Introduce a method for checking for "no expansion" instead and always return
// `ExpnInfo` from this function instead of the `Option`.
None
}
} }
fn is_descendant_of(&self, mut expn_id: ExpnId, ancestor: ExpnId) -> bool { fn is_descendant_of(&self, mut expn_id: ExpnId, ancestor: ExpnId) -> bool {
@ -207,17 +178,11 @@ impl HygieneData {
if expn_id == ExpnId::root() { if expn_id == ExpnId::root() {
return false; return false;
} }
expn_id = self.parent_expn(expn_id); expn_id = self.expn_data(expn_id).parent;
} }
true true
} }
fn default_transparency(&self, expn_id: ExpnId) -> Transparency {
self.expn_info(expn_id).map_or(
Transparency::SemiTransparent, |einfo| einfo.default_transparency
)
}
fn modern(&self, ctxt: SyntaxContext) -> SyntaxContext { fn modern(&self, ctxt: SyntaxContext) -> SyntaxContext {
self.syntax_context_data[ctxt.0 as usize].opaque self.syntax_context_data[ctxt.0 as usize].opaque
} }
@ -246,7 +211,7 @@ impl HygieneData {
fn marks(&self, mut ctxt: SyntaxContext) -> Vec<(ExpnId, Transparency)> { fn marks(&self, mut ctxt: SyntaxContext) -> Vec<(ExpnId, Transparency)> {
let mut marks = Vec::new(); let mut marks = Vec::new();
while ctxt != SyntaxContext::empty() { while ctxt != SyntaxContext::root() {
marks.push((self.outer_expn(ctxt), self.outer_transparency(ctxt))); marks.push((self.outer_expn(ctxt), self.outer_transparency(ctxt)));
ctxt = self.parent_ctxt(ctxt); ctxt = self.parent_ctxt(ctxt);
} }
@ -255,12 +220,8 @@ impl HygieneData {
} }
fn walk_chain(&self, mut span: Span, to: SyntaxContext) -> Span { fn walk_chain(&self, mut span: Span, to: SyntaxContext) -> Span {
while span.ctxt() != crate::NO_EXPANSION && span.ctxt() != to { while span.from_expansion() && span.ctxt() != to {
if let Some(info) = self.expn_info(self.outer_expn(span.ctxt())) { span = self.expn_data(self.outer_expn(span.ctxt())).call_site;
span = info.call_site;
} else {
break;
}
} }
span span
} }
@ -275,7 +236,9 @@ impl HygieneData {
fn apply_mark(&mut self, ctxt: SyntaxContext, expn_id: ExpnId) -> SyntaxContext { fn apply_mark(&mut self, ctxt: SyntaxContext, expn_id: ExpnId) -> SyntaxContext {
assert_ne!(expn_id, ExpnId::root()); assert_ne!(expn_id, ExpnId::root());
self.apply_mark_with_transparency(ctxt, expn_id, self.default_transparency(expn_id)) self.apply_mark_with_transparency(
ctxt, expn_id, self.expn_data(expn_id).default_transparency
)
} }
fn apply_mark_with_transparency(&mut self, ctxt: SyntaxContext, expn_id: ExpnId, fn apply_mark_with_transparency(&mut self, ctxt: SyntaxContext, expn_id: ExpnId,
@ -285,15 +248,14 @@ impl HygieneData {
return self.apply_mark_internal(ctxt, expn_id, transparency); return self.apply_mark_internal(ctxt, expn_id, transparency);
} }
let call_site_ctxt = let call_site_ctxt = self.expn_data(expn_id).call_site.ctxt();
self.expn_info(expn_id).map_or(SyntaxContext::empty(), |info| info.call_site.ctxt());
let mut call_site_ctxt = if transparency == Transparency::SemiTransparent { let mut call_site_ctxt = if transparency == Transparency::SemiTransparent {
self.modern(call_site_ctxt) self.modern(call_site_ctxt)
} else { } else {
self.modern_and_legacy(call_site_ctxt) self.modern_and_legacy(call_site_ctxt)
}; };
if call_site_ctxt == SyntaxContext::empty() { if call_site_ctxt == SyntaxContext::root() {
return self.apply_mark_internal(ctxt, expn_id, transparency); return self.apply_mark_internal(ctxt, expn_id, transparency);
} }
@ -400,7 +362,7 @@ pub fn update_dollar_crate_names(mut get_name: impl FnMut(SyntaxContext) -> Symb
impl SyntaxContext { impl SyntaxContext {
#[inline] #[inline]
pub const fn empty() -> Self { pub const fn root() -> Self {
SyntaxContext(0) SyntaxContext(0)
} }
@ -578,20 +540,20 @@ impl SyntaxContext {
HygieneData::with(|data| data.outer_expn(self)) HygieneData::with(|data| data.outer_expn(self))
} }
/// `ctxt.outer_expn_info()` is equivalent to but faster than /// `ctxt.outer_expn_data()` is equivalent to but faster than
/// `ctxt.outer_expn().expn_info()`. /// `ctxt.outer_expn().expn_data()`.
#[inline] #[inline]
pub fn outer_expn_info(self) -> Option<ExpnInfo> { pub fn outer_expn_data(self) -> ExpnData {
HygieneData::with(|data| data.expn_info(data.outer_expn(self)).cloned()) HygieneData::with(|data| data.expn_data(data.outer_expn(self)).clone())
} }
/// `ctxt.outer_expn_with_info()` is equivalent to but faster than /// `ctxt.outer_expn_with_data()` is equivalent to but faster than
/// `{ let outer = ctxt.outer_expn(); (outer, outer.expn_info()) }`. /// `{ let outer = ctxt.outer_expn(); (outer, outer.expn_data()) }`.
#[inline] #[inline]
pub fn outer_expn_with_info(self) -> (ExpnId, Option<ExpnInfo>) { pub fn outer_expn_with_data(self) -> (ExpnId, ExpnData) {
HygieneData::with(|data| { HygieneData::with(|data| {
let outer = data.outer_expn(self); let outer = data.outer_expn(self);
(outer, data.expn_info(outer).cloned()) (outer, data.expn_data(outer).clone())
}) })
} }
@ -612,10 +574,10 @@ impl Span {
/// other compiler-generated code to set per-span properties like allowed unstable features. /// other compiler-generated code to set per-span properties like allowed unstable features.
/// The returned span belongs to the created expansion and has the new properties, /// The returned span belongs to the created expansion and has the new properties,
/// but its location is inherited from the current span. /// but its location is inherited from the current span.
pub fn fresh_expansion(self, parent: ExpnId, expn_info: ExpnInfo) -> Span { pub fn fresh_expansion(self, expn_data: ExpnData) -> Span {
HygieneData::with(|data| { HygieneData::with(|data| {
let expn_id = data.fresh_expn(parent, Some(expn_info)); let expn_id = data.fresh_expn(Some(expn_data));
self.with_ctxt(data.apply_mark(SyntaxContext::empty(), expn_id)) self.with_ctxt(data.apply_mark(SyntaxContext::root(), expn_id))
}) })
} }
} }
@ -623,8 +585,12 @@ impl Span {
/// A subset of properties from both macro definition and macro call available through global data. /// A subset of properties from both macro definition and macro call available through global data.
/// Avoid using this if you have access to the original definition or call structures. /// Avoid using this if you have access to the original definition or call structures.
#[derive(Clone, Debug, RustcEncodable, RustcDecodable)] #[derive(Clone, Debug, RustcEncodable, RustcDecodable)]
pub struct ExpnInfo { pub struct ExpnData {
// --- The part unique to each expansion. // --- The part unique to each expansion.
/// The kind of this expansion - macro or compiler desugaring.
pub kind: ExpnKind,
/// The expansion that produced this expansion.
pub parent: ExpnId,
/// The location of the actual macro invocation or syntax sugar , e.g. /// The location of the actual macro invocation or syntax sugar , e.g.
/// `let x = foo!();` or `if let Some(y) = x {}` /// `let x = foo!();` or `if let Some(y) = x {}`
/// ///
@ -632,18 +598,18 @@ pub struct ExpnInfo {
/// `foo!()` invoked `bar!()` internally, and there was an /// `foo!()` invoked `bar!()` internally, and there was an
/// expression inside `bar!`; the call_site of the expression in /// expression inside `bar!`; the call_site of the expression in
/// the expansion would point to the `bar!` invocation; that /// the expansion would point to the `bar!` invocation; that
/// call_site span would have its own ExpnInfo, with the call_site /// call_site span would have its own ExpnData, with the call_site
/// pointing to the `foo!` invocation. /// pointing to the `foo!` invocation.
pub call_site: Span, pub call_site: Span,
/// The kind of this expansion - macro or compiler desugaring.
pub kind: ExpnKind,
// --- The part specific to the macro/desugaring definition. // --- The part specific to the macro/desugaring definition.
// --- FIXME: Share it between expansions with the same definition. // --- It may be reasonable to share this part between expansions with the same definition,
// --- but such sharing is known to bring some minor inconveniences without also bringing
// --- noticeable perf improvements (PR #62898).
/// The span of the macro definition (possibly dummy). /// The span of the macro definition (possibly dummy).
/// This span serves only informational purpose and is not used for resolution. /// This span serves only informational purpose and is not used for resolution.
pub def_site: Span, pub def_site: Span,
/// Transparency used by `apply_mark` for the expansion with this expansion info by default. /// Transparency used by `apply_mark` for the expansion with this expansion data by default.
pub default_transparency: Transparency, pub default_transparency: Transparency,
/// List of #[unstable]/feature-gated features that the macro is allowed to use /// List of #[unstable]/feature-gated features that the macro is allowed to use
/// internally without forcing the whole crate to opt-in /// internally without forcing the whole crate to opt-in
@ -659,12 +625,13 @@ pub struct ExpnInfo {
pub edition: Edition, pub edition: Edition,
} }
impl ExpnInfo { impl ExpnData {
/// Constructs an expansion info with default properties. /// Constructs expansion data with default properties.
pub fn default(kind: ExpnKind, call_site: Span, edition: Edition) -> ExpnInfo { pub fn default(kind: ExpnKind, call_site: Span, edition: Edition) -> ExpnData {
ExpnInfo { ExpnData {
call_site,
kind, kind,
parent: ExpnId::root(),
call_site,
def_site: DUMMY_SP, def_site: DUMMY_SP,
default_transparency: Transparency::SemiTransparent, default_transparency: Transparency::SemiTransparent,
allow_internal_unstable: None, allow_internal_unstable: None,
@ -675,12 +642,17 @@ impl ExpnInfo {
} }
pub fn allow_unstable(kind: ExpnKind, call_site: Span, edition: Edition, pub fn allow_unstable(kind: ExpnKind, call_site: Span, edition: Edition,
allow_internal_unstable: Lrc<[Symbol]>) -> ExpnInfo { allow_internal_unstable: Lrc<[Symbol]>) -> ExpnData {
ExpnInfo { ExpnData {
allow_internal_unstable: Some(allow_internal_unstable), allow_internal_unstable: Some(allow_internal_unstable),
..ExpnInfo::default(kind, call_site, edition) ..ExpnData::default(kind, call_site, edition)
} }
} }
#[inline]
pub fn is_root(&self) -> bool {
if let ExpnKind::Root = self.kind { true } else { false }
}
} }
/// Expansion kind. /// Expansion kind.
@ -767,6 +739,18 @@ impl DesugaringKind {
} }
} }
impl Encodable for ExpnId {
fn encode<E: Encoder>(&self, _: &mut E) -> Result<(), E::Error> {
Ok(()) // FIXME(jseyfried) intercrate hygiene
}
}
impl Decodable for ExpnId {
fn decode<D: Decoder>(_: &mut D) -> Result<Self, D::Error> {
Ok(ExpnId::root()) // FIXME(jseyfried) intercrate hygiene
}
}
impl Encodable for SyntaxContext { impl Encodable for SyntaxContext {
fn encode<E: Encoder>(&self, _: &mut E) -> Result<(), E::Error> { fn encode<E: Encoder>(&self, _: &mut E) -> Result<(), E::Error> {
Ok(()) // FIXME(jseyfried) intercrate hygiene Ok(()) // FIXME(jseyfried) intercrate hygiene
@ -774,7 +758,7 @@ impl Encodable for SyntaxContext {
} }
impl Decodable for SyntaxContext { impl Decodable for SyntaxContext {
fn decode<D: Decoder>(_: &mut D) -> Result<SyntaxContext, D::Error> { fn decode<D: Decoder>(_: &mut D) -> Result<Self, D::Error> {
Ok(SyntaxContext::empty()) // FIXME(jseyfried) intercrate hygiene Ok(SyntaxContext::root()) // FIXME(jseyfried) intercrate hygiene
} }
} }

View file

@ -21,7 +21,7 @@ use rustc_serialize::{Encodable, Decodable, Encoder, Decoder};
pub mod edition; pub mod edition;
use edition::Edition; use edition::Edition;
pub mod hygiene; pub mod hygiene;
pub use hygiene::{ExpnId, SyntaxContext, ExpnInfo, ExpnKind, MacroKind, DesugaringKind}; pub use hygiene::{ExpnId, SyntaxContext, ExpnData, ExpnKind, MacroKind, DesugaringKind};
mod span_encoding; mod span_encoding;
pub use span_encoding::{Span, DUMMY_SP}; pub use span_encoding::{Span, DUMMY_SP};
@ -49,7 +49,6 @@ pub struct Globals {
symbol_interner: Lock<symbol::Interner>, symbol_interner: Lock<symbol::Interner>,
span_interner: Lock<span_encoding::SpanInterner>, span_interner: Lock<span_encoding::SpanInterner>,
hygiene_data: Lock<hygiene::HygieneData>, hygiene_data: Lock<hygiene::HygieneData>,
edition: Edition,
} }
impl Globals { impl Globals {
@ -58,7 +57,6 @@ impl Globals {
symbol_interner: Lock::new(symbol::Interner::fresh()), symbol_interner: Lock::new(symbol::Interner::fresh()),
span_interner: Lock::new(span_encoding::SpanInterner::default()), span_interner: Lock::new(span_encoding::SpanInterner::default()),
hygiene_data: Lock::new(hygiene::HygieneData::new(edition)), hygiene_data: Lock::new(hygiene::HygieneData::new(edition)),
edition,
} }
} }
} }
@ -288,6 +286,17 @@ impl Span {
span.lo.0 == 0 && span.hi.0 == 0 span.lo.0 == 0 && span.hi.0 == 0
} }
/// Returns `true` if this span comes from a macro or desugaring.
#[inline]
pub fn from_expansion(self) -> bool {
self.ctxt() != SyntaxContext::root()
}
#[inline]
pub fn with_root_ctxt(lo: BytePos, hi: BytePos) -> Span {
Span::new(lo, hi, SyntaxContext::root())
}
/// Returns a new span representing an empty span at the beginning of this span /// Returns a new span representing an empty span at the beginning of this span
#[inline] #[inline]
pub fn shrink_to_lo(self) -> Span { pub fn shrink_to_lo(self) -> Span {
@ -344,20 +353,20 @@ impl Span {
/// Returns the source span -- this is either the supplied span, or the span for /// Returns the source span -- this is either the supplied span, or the span for
/// the macro callsite that expanded to it. /// the macro callsite that expanded to it.
pub fn source_callsite(self) -> Span { pub fn source_callsite(self) -> Span {
self.ctxt().outer_expn_info().map(|info| info.call_site.source_callsite()).unwrap_or(self) let expn_data = self.ctxt().outer_expn_data();
if !expn_data.is_root() { expn_data.call_site.source_callsite() } else { self }
} }
/// The `Span` for the tokens in the previous macro expansion from which `self` was generated, /// The `Span` for the tokens in the previous macro expansion from which `self` was generated,
/// if any. /// if any.
pub fn parent(self) -> Option<Span> { pub fn parent(self) -> Option<Span> {
self.ctxt().outer_expn_info().map(|i| i.call_site) let expn_data = self.ctxt().outer_expn_data();
if !expn_data.is_root() { Some(expn_data.call_site) } else { None }
} }
/// Edition of the crate from which this span came. /// Edition of the crate from which this span came.
pub fn edition(self) -> edition::Edition { pub fn edition(self) -> edition::Edition {
self.ctxt().outer_expn_info().map_or_else(|| { self.ctxt().outer_expn_data().edition
Edition::from_session()
}, |einfo| einfo.edition)
} }
#[inline] #[inline]
@ -373,52 +382,42 @@ impl Span {
/// Returns the source callee. /// Returns the source callee.
/// ///
/// Returns `None` if the supplied span has no expansion trace, /// Returns `None` if the supplied span has no expansion trace,
/// else returns the `ExpnInfo` for the macro definition /// else returns the `ExpnData` for the macro definition
/// corresponding to the source callsite. /// corresponding to the source callsite.
pub fn source_callee(self) -> Option<ExpnInfo> { pub fn source_callee(self) -> Option<ExpnData> {
fn source_callee(info: ExpnInfo) -> ExpnInfo { fn source_callee(expn_data: ExpnData) -> ExpnData {
match info.call_site.ctxt().outer_expn_info() { let next_expn_data = expn_data.call_site.ctxt().outer_expn_data();
Some(info) => source_callee(info), if !next_expn_data.is_root() { source_callee(next_expn_data) } else { expn_data }
None => info,
}
} }
self.ctxt().outer_expn_info().map(source_callee) let expn_data = self.ctxt().outer_expn_data();
if !expn_data.is_root() { Some(source_callee(expn_data)) } else { None }
} }
/// Checks if a span is "internal" to a macro in which `#[unstable]` /// Checks if a span is "internal" to a macro in which `#[unstable]`
/// items can be used (that is, a macro marked with /// items can be used (that is, a macro marked with
/// `#[allow_internal_unstable]`). /// `#[allow_internal_unstable]`).
pub fn allows_unstable(&self, feature: Symbol) -> bool { pub fn allows_unstable(&self, feature: Symbol) -> bool {
match self.ctxt().outer_expn_info() { self.ctxt().outer_expn_data().allow_internal_unstable.map_or(false, |features| {
Some(info) => info features.iter().any(|&f| {
.allow_internal_unstable f == feature || f == sym::allow_internal_unstable_backcompat_hack
.map_or(false, |features| features.iter().any(|&f| })
f == feature || f == sym::allow_internal_unstable_backcompat_hack })
)),
None => false,
}
} }
/// Checks if this span arises from a compiler desugaring of kind `kind`. /// Checks if this span arises from a compiler desugaring of kind `kind`.
pub fn is_desugaring(&self, kind: DesugaringKind) -> bool { pub fn is_desugaring(&self, kind: DesugaringKind) -> bool {
match self.ctxt().outer_expn_info() { match self.ctxt().outer_expn_data().kind {
Some(info) => match info.kind { ExpnKind::Desugaring(k) => k == kind,
ExpnKind::Desugaring(k) => k == kind, _ => false,
_ => false,
},
None => false,
} }
} }
/// Returns the compiler desugaring that created this span, or `None` /// Returns the compiler desugaring that created this span, or `None`
/// if this span is not from a desugaring. /// if this span is not from a desugaring.
pub fn desugaring_kind(&self) -> Option<DesugaringKind> { pub fn desugaring_kind(&self) -> Option<DesugaringKind> {
match self.ctxt().outer_expn_info() { match self.ctxt().outer_expn_data().kind {
Some(info) => match info.kind { ExpnKind::Desugaring(k) => Some(k),
ExpnKind::Desugaring(k) => Some(k), _ => None
_ => None
},
None => None
} }
} }
@ -426,19 +425,20 @@ impl Span {
/// can be used without triggering the `unsafe_code` lint /// can be used without triggering the `unsafe_code` lint
// (that is, a macro marked with `#[allow_internal_unsafe]`). // (that is, a macro marked with `#[allow_internal_unsafe]`).
pub fn allows_unsafe(&self) -> bool { pub fn allows_unsafe(&self) -> bool {
match self.ctxt().outer_expn_info() { self.ctxt().outer_expn_data().allow_internal_unsafe
Some(info) => info.allow_internal_unsafe,
None => false,
}
} }
pub fn macro_backtrace(mut self) -> Vec<MacroBacktrace> { pub fn macro_backtrace(mut self) -> Vec<MacroBacktrace> {
let mut prev_span = DUMMY_SP; let mut prev_span = DUMMY_SP;
let mut result = vec![]; let mut result = vec![];
while let Some(info) = self.ctxt().outer_expn_info() { loop {
let expn_data = self.ctxt().outer_expn_data();
if expn_data.is_root() {
break;
}
// Don't print recursive invocations. // Don't print recursive invocations.
if !info.call_site.source_equal(&prev_span) { if !expn_data.call_site.source_equal(&prev_span) {
let (pre, post) = match info.kind { let (pre, post) = match expn_data.kind {
ExpnKind::Root => break, ExpnKind::Root => break,
ExpnKind::Desugaring(..) => ("desugaring of ", ""), ExpnKind::Desugaring(..) => ("desugaring of ", ""),
ExpnKind::Macro(macro_kind, _) => match macro_kind { ExpnKind::Macro(macro_kind, _) => match macro_kind {
@ -448,14 +448,14 @@ impl Span {
} }
}; };
result.push(MacroBacktrace { result.push(MacroBacktrace {
call_site: info.call_site, call_site: expn_data.call_site,
macro_decl_name: format!("{}{}{}", pre, info.kind.descr(), post), macro_decl_name: format!("{}{}{}", pre, expn_data.kind.descr(), post),
def_site_span: info.def_site, def_site_span: expn_data.def_site,
}); });
} }
prev_span = self; prev_span = self;
self = info.call_site; self = expn_data.call_site;
} }
result result
} }
@ -468,9 +468,9 @@ impl Span {
// Return the macro span on its own to avoid weird diagnostic output. It is preferable to // Return the macro span on its own to avoid weird diagnostic output. It is preferable to
// have an incomplete span than a completely nonsensical one. // have an incomplete span than a completely nonsensical one.
if span_data.ctxt != end_data.ctxt { if span_data.ctxt != end_data.ctxt {
if span_data.ctxt == SyntaxContext::empty() { if span_data.ctxt == SyntaxContext::root() {
return end; return end;
} else if end_data.ctxt == SyntaxContext::empty() { } else if end_data.ctxt == SyntaxContext::root() {
return self; return self;
} }
// Both spans fall within a macro. // Both spans fall within a macro.
@ -479,7 +479,7 @@ impl Span {
Span::new( Span::new(
cmp::min(span_data.lo, end_data.lo), cmp::min(span_data.lo, end_data.lo),
cmp::max(span_data.hi, end_data.hi), cmp::max(span_data.hi, end_data.hi),
if span_data.ctxt == SyntaxContext::empty() { end_data.ctxt } else { span_data.ctxt }, if span_data.ctxt == SyntaxContext::root() { end_data.ctxt } else { span_data.ctxt },
) )
} }
@ -490,7 +490,7 @@ impl Span {
Span::new( Span::new(
span.hi, span.hi,
end.lo, end.lo,
if end.ctxt == SyntaxContext::empty() { end.ctxt } else { span.ctxt }, if end.ctxt == SyntaxContext::root() { end.ctxt } else { span.ctxt },
) )
} }
@ -501,7 +501,7 @@ impl Span {
Span::new( Span::new(
span.lo, span.lo,
end.lo, end.lo,
if end.ctxt == SyntaxContext::empty() { end.ctxt } else { span.ctxt }, if end.ctxt == SyntaxContext::root() { end.ctxt } else { span.ctxt },
) )
} }
@ -611,7 +611,7 @@ impl rustc_serialize::UseSpecializedDecodable for Span {
d.read_struct("Span", 2, |d| { d.read_struct("Span", 2, |d| {
let lo = d.read_struct_field("lo", 0, Decodable::decode)?; let lo = d.read_struct_field("lo", 0, Decodable::decode)?;
let hi = d.read_struct_field("hi", 1, Decodable::decode)?; let hi = d.read_struct_field("hi", 1, Decodable::decode)?;
Ok(Span::new(lo, hi, NO_EXPANSION)) Ok(Span::with_root_ctxt(lo, hi))
}) })
} }
} }
@ -755,8 +755,6 @@ impl From<Vec<Span>> for MultiSpan {
} }
} }
pub const NO_EXPANSION: SyntaxContext = SyntaxContext::empty();
/// Identifies an offset of a multi-byte character in a `SourceFile`. /// Identifies an offset of a multi-byte character in a `SourceFile`.
#[derive(Copy, Clone, RustcEncodable, RustcDecodable, Eq, PartialEq, Debug)] #[derive(Copy, Clone, RustcEncodable, RustcDecodable, Eq, PartialEq, Debug)]
pub struct MultiByteChar { pub struct MultiByteChar {

View file

@ -14,7 +14,6 @@ use std::fmt;
use std::hash::{Hash, Hasher}; use std::hash::{Hash, Hasher};
use std::str; use std::str;
use crate::hygiene::SyntaxContext;
use crate::{Span, DUMMY_SP, GLOBALS}; use crate::{Span, DUMMY_SP, GLOBALS};
#[cfg(test)] #[cfg(test)]
@ -745,25 +744,25 @@ impl Ident {
Ident { name, span } Ident { name, span }
} }
/// Constructs a new identifier with an empty syntax context. /// Constructs a new identifier with a dummy span.
#[inline] #[inline]
pub const fn with_empty_ctxt(name: Symbol) -> Ident { pub const fn with_dummy_span(name: Symbol) -> Ident {
Ident::new(name, DUMMY_SP) Ident::new(name, DUMMY_SP)
} }
#[inline] #[inline]
pub fn invalid() -> Ident { pub fn invalid() -> Ident {
Ident::with_empty_ctxt(kw::Invalid) Ident::with_dummy_span(kw::Invalid)
} }
/// Maps an interned string to an identifier with an empty syntax context. /// Maps an interned string to an identifier with an empty syntax context.
pub fn from_interned_str(string: InternedString) -> Ident { pub fn from_interned_str(string: InternedString) -> Ident {
Ident::with_empty_ctxt(string.as_symbol()) Ident::with_dummy_span(string.as_symbol())
} }
/// Maps a string to an identifier with an empty span. /// Maps a string to an identifier with an empty span.
pub fn from_str(string: &str) -> Ident { pub fn from_str(string: &str) -> Ident {
Ident::with_empty_ctxt(Symbol::intern(string)) Ident::with_dummy_span(Symbol::intern(string))
} }
/// Maps a string and a span to an identifier. /// Maps a string and a span to an identifier.
@ -851,7 +850,7 @@ impl fmt::Display for Ident {
impl Encodable for Ident { impl Encodable for Ident {
fn encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> { fn encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> {
if self.span.ctxt().modern() == SyntaxContext::empty() { if !self.span.modern().from_expansion() {
s.emit_str(&self.as_str()) s.emit_str(&self.as_str())
} else { // FIXME(jseyfried): intercrate hygiene } else { // FIXME(jseyfried): intercrate hygiene
let mut string = "#".to_owned(); let mut string = "#".to_owned();

View file

@ -22,7 +22,7 @@ compiler_builtins = "0.1.0"
cfg-if = "0.1.8" cfg-if = "0.1.8"
[build-dependencies] [build-dependencies]
cc = { optional = true, version = "1.0.1" } cc = { version = "1.0.1" }
[features] [features]
llvm-libunwind = ["cc"] llvm-libunwind = []

View file

@ -5,14 +5,14 @@ fn main() {
let target = env::var("TARGET").expect("TARGET was not set"); let target = env::var("TARGET").expect("TARGET was not set");
if cfg!(feature = "llvm-libunwind") && if cfg!(feature = "llvm-libunwind") &&
(target.contains("linux") || ((target.contains("linux") && !target.contains("musl")) ||
target.contains("fuchsia")) { target.contains("fuchsia")) {
// Build the unwinding from libunwind C/C++ source code. // Build the unwinding from libunwind C/C++ source code.
#[cfg(feature = "llvm-libunwind")]
llvm_libunwind::compile(); llvm_libunwind::compile();
} else if target.contains("linux") { } else if target.contains("linux") {
if target.contains("musl") { if target.contains("musl") {
// musl is handled in lib.rs // linking for musl is handled in lib.rs
llvm_libunwind::compile();
} else if !target.contains("android") { } else if !target.contains("android") {
println!("cargo:rustc-link-lib=gcc_s"); println!("cargo:rustc-link-lib=gcc_s");
} }
@ -44,7 +44,6 @@ fn main() {
} }
} }
#[cfg(feature = "llvm-libunwind")]
mod llvm_libunwind { mod llvm_libunwind {
use std::env; use std::env;
use std::path::Path; use std::path::Path;
@ -96,6 +95,15 @@ mod llvm_libunwind {
cfg.file(root.join("src").join(src)); cfg.file(root.join("src").join(src));
} }
if target_env == "musl" {
// use the same C compiler command to compile C++ code so we do not need to setup the
// C++ compiler env variables on the builders
cfg.cpp(false);
// linking for musl is handled in lib.rs
cfg.cargo_metadata(false);
println!("cargo:rustc-link-search=native={}", env::var("OUT_DIR").unwrap());
}
cfg.compile("unwind"); cfg.compile("unwind");
} }
} }

Some files were not shown because too many files have changed in this diff Show more