Auto merge of #36616 - jseyfried:load_macros_in_resolve, r=nrc
Load macros from `#[macro_use]` extern crates in `resolve` r? @nrc
This commit is contained in:
commit
5fc14c1a6f
11 changed files with 117 additions and 124 deletions
|
@ -36,9 +36,10 @@ use session::config::PanicStrategy;
|
|||
use session::search_paths::PathKind;
|
||||
use util::nodemap::{NodeSet, DefIdMap};
|
||||
use std::path::PathBuf;
|
||||
use std::rc::Rc;
|
||||
use syntax::ast;
|
||||
use syntax::attr;
|
||||
use syntax::ext::base::LoadedMacro;
|
||||
use syntax::ext::base::MultiItemModifier;
|
||||
use syntax::ptr::P;
|
||||
use syntax::parse::token::InternedString;
|
||||
use syntax_pos::Span;
|
||||
|
@ -422,6 +423,11 @@ impl<'tcx> CrateStore<'tcx> for DummyCrateStore {
|
|||
fn metadata_encoding_version(&self) -> &[u8] { bug!("metadata_encoding_version") }
|
||||
}
|
||||
|
||||
pub enum LoadedMacro {
|
||||
Def(ast::MacroDef),
|
||||
CustomDerive(String, Rc<MultiItemModifier>),
|
||||
}
|
||||
|
||||
pub trait CrateLoader {
|
||||
fn load_macros(&mut self, extern_crate: &ast::Item, allows_macros: bool) -> Vec<LoadedMacro>;
|
||||
fn process_item(&mut self, item: &ast::Item, defs: &Definitions);
|
||||
|
|
|
@ -43,6 +43,7 @@ use super::Compilation;
|
|||
use serialize::json;
|
||||
|
||||
use std::env;
|
||||
use std::mem;
|
||||
use std::ffi::{OsString, OsStr};
|
||||
use std::fs;
|
||||
use std::io::{self, Write};
|
||||
|
@ -686,6 +687,8 @@ pub fn phase_2_configure_and_expand<'a, F>(sess: &Session,
|
|||
ret
|
||||
});
|
||||
|
||||
krate.exported_macros = mem::replace(&mut resolver.exported_macros, Vec::new());
|
||||
|
||||
krate = time(time_passes, "maybe building test harness", || {
|
||||
syntax::test::modify_for_testing(&sess.parse_sess,
|
||||
&mut resolver,
|
||||
|
|
|
@ -17,6 +17,7 @@ use schema::CrateRoot;
|
|||
|
||||
use rustc::hir::def_id::{CrateNum, DefIndex};
|
||||
use rustc::hir::svh::Svh;
|
||||
use rustc::middle::cstore::LoadedMacro;
|
||||
use rustc::session::{config, Session};
|
||||
use rustc::session::config::PanicStrategy;
|
||||
use rustc::session::search_paths::PathKind;
|
||||
|
@ -32,7 +33,6 @@ use std::rc::Rc;
|
|||
use std::fs;
|
||||
|
||||
use syntax::ast;
|
||||
use syntax::ext::base::LoadedMacro;
|
||||
use syntax::abi::Abi;
|
||||
use syntax::parse;
|
||||
use syntax::attr;
|
||||
|
|
|
@ -11,12 +11,14 @@
|
|||
//! Used by `rustc` when loading a crate with exported macros.
|
||||
|
||||
use std::collections::HashSet;
|
||||
use std::rc::Rc;
|
||||
use std::env;
|
||||
use std::mem;
|
||||
|
||||
use creader::{CrateLoader, Macros};
|
||||
|
||||
use rustc::hir::def_id::DefIndex;
|
||||
use rustc::middle::cstore::LoadedMacro;
|
||||
use rustc::session::Session;
|
||||
use rustc::util::nodemap::FnvHashMap;
|
||||
use rustc_back::dynamic_lib::DynamicLibrary;
|
||||
|
@ -24,7 +26,6 @@ use rustc_macro::TokenStream;
|
|||
use rustc_macro::__internal::Registry;
|
||||
use syntax::ast;
|
||||
use syntax::attr;
|
||||
use syntax::ext::base::LoadedMacro;
|
||||
use syntax::parse::token;
|
||||
use syntax_ext::deriving::custom::CustomDerive;
|
||||
use syntax_pos::Span;
|
||||
|
@ -204,9 +205,8 @@ impl<'a> CrateLoader<'a> {
|
|||
fn register_custom_derive(&mut self,
|
||||
trait_name: &str,
|
||||
expand: fn(TokenStream) -> TokenStream) {
|
||||
let derive = Box::new(CustomDerive::new(expand));
|
||||
self.0.push(LoadedMacro::CustomDerive(trait_name.to_string(),
|
||||
derive));
|
||||
let derive = Rc::new(CustomDerive::new(expand));
|
||||
self.0.push(LoadedMacro::CustomDerive(trait_name.to_string(), derive));
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -53,6 +53,7 @@ use rustc::ty;
|
|||
use rustc::hir::{Freevar, FreevarMap, TraitCandidate, TraitMap, GlobMap};
|
||||
use rustc::util::nodemap::{NodeMap, NodeSet, FnvHashMap, FnvHashSet};
|
||||
|
||||
use syntax::ext::base::MultiItemModifier;
|
||||
use syntax::ext::hygiene::Mark;
|
||||
use syntax::ast::{self, FloatTy};
|
||||
use syntax::ast::{CRATE_NODE_ID, Name, NodeId, IntTy, UintTy};
|
||||
|
@ -71,6 +72,7 @@ use syntax_pos::{Span, DUMMY_SP};
|
|||
use errors::DiagnosticBuilder;
|
||||
|
||||
use std::cell::{Cell, RefCell};
|
||||
use std::rc::Rc;
|
||||
use std::fmt;
|
||||
use std::mem::replace;
|
||||
|
||||
|
@ -1066,6 +1068,8 @@ pub struct Resolver<'a> {
|
|||
dummy_binding: &'a NameBinding<'a>,
|
||||
new_import_semantics: bool, // true if `#![feature(item_like_imports)]`
|
||||
|
||||
pub exported_macros: Vec<ast::MacroDef>,
|
||||
pub derive_modes: FnvHashMap<Name, Rc<MultiItemModifier>>,
|
||||
crate_loader: &'a mut CrateLoader,
|
||||
macro_names: FnvHashSet<Name>,
|
||||
|
||||
|
@ -1240,6 +1244,8 @@ impl<'a> Resolver<'a> {
|
|||
}),
|
||||
new_import_semantics: session.features.borrow().item_like_imports,
|
||||
|
||||
exported_macros: Vec::new(),
|
||||
derive_modes: FnvHashMap(),
|
||||
crate_loader: crate_loader,
|
||||
macro_names: FnvHashSet(),
|
||||
expansion_data: expansion_data,
|
||||
|
|
|
@ -9,19 +9,23 @@
|
|||
// except according to those terms.
|
||||
|
||||
use Resolver;
|
||||
use rustc::middle::cstore::LoadedMacro;
|
||||
use rustc::util::nodemap::FnvHashMap;
|
||||
use std::cell::RefCell;
|
||||
use std::mem;
|
||||
use std::rc::Rc;
|
||||
use syntax::ast::{self, Name};
|
||||
use syntax::errors::DiagnosticBuilder;
|
||||
use syntax::ext::base::{self, LoadedMacro, MultiModifier, MultiDecorator};
|
||||
use syntax::ext::base::{NormalTT, SyntaxExtension};
|
||||
use syntax::ext::base::{self, MultiModifier, MultiDecorator, MultiItemModifier};
|
||||
use syntax::ext::base::{NormalTT, Resolver as SyntaxResolver, SyntaxExtension};
|
||||
use syntax::ext::expand::{Expansion, Invocation, InvocationKind};
|
||||
use syntax::ext::hygiene::Mark;
|
||||
use syntax::parse::token::intern;
|
||||
use syntax::ext::tt::macro_rules;
|
||||
use syntax::feature_gate::{self, emit_feature_err};
|
||||
use syntax::parse::token::{self, intern};
|
||||
use syntax::util::lev_distance::find_best_match_for_name;
|
||||
use syntax::visit::{self, Visitor};
|
||||
use syntax_pos::Span;
|
||||
|
||||
#[derive(Clone, Default)]
|
||||
pub struct ExpansionData {
|
||||
|
@ -37,10 +41,6 @@ struct ModuleData {
|
|||
}
|
||||
|
||||
impl<'a> base::Resolver for Resolver<'a> {
|
||||
fn load_crate(&mut self, extern_crate: &ast::Item, allows_macros: bool) -> Vec<LoadedMacro> {
|
||||
self.crate_loader.load_macros(extern_crate, allows_macros)
|
||||
}
|
||||
|
||||
fn next_node_id(&mut self) -> ast::NodeId {
|
||||
self.session.next_node_id()
|
||||
}
|
||||
|
@ -52,7 +52,18 @@ impl<'a> base::Resolver for Resolver<'a> {
|
|||
});
|
||||
}
|
||||
|
||||
fn add_macro(&mut self, scope: Mark, ident: ast::Ident, ext: Rc<SyntaxExtension>) {
|
||||
fn add_macro(&mut self, scope: Mark, mut def: ast::MacroDef) {
|
||||
if def.use_locally {
|
||||
let ext = macro_rules::compile(&self.session.parse_sess, &def);
|
||||
self.add_ext(scope, def.ident, Rc::new(ext));
|
||||
}
|
||||
if def.export {
|
||||
def.id = self.next_node_id();
|
||||
self.exported_macros.push(def);
|
||||
}
|
||||
}
|
||||
|
||||
fn add_ext(&mut self, scope: Mark, ident: ast::Ident, ext: Rc<SyntaxExtension>) {
|
||||
if let NormalTT(..) = *ext {
|
||||
self.macro_names.insert(ident.name);
|
||||
}
|
||||
|
@ -116,6 +127,10 @@ impl<'a> base::Resolver for Resolver<'a> {
|
|||
err.emit();
|
||||
None
|
||||
}
|
||||
|
||||
fn resolve_derive_mode(&mut self, ident: ast::Ident) -> Option<Rc<MultiItemModifier>> {
|
||||
self.derive_modes.get(&ident.name).cloned()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Resolver<'a> {
|
||||
|
@ -128,6 +143,17 @@ impl<'a> Resolver<'a> {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn insert_custom_derive(&mut self, name: &str, ext: Rc<MultiItemModifier>, sp: Span) {
|
||||
if !self.session.features.borrow().rustc_macro {
|
||||
let diagnostic = &self.session.parse_sess.span_diagnostic;
|
||||
let msg = "loading custom derive macro crates is experimentally supported";
|
||||
emit_feature_err(diagnostic, "rustc_macro", sp, feature_gate::GateIssue::Language, msg);
|
||||
}
|
||||
if self.derive_modes.insert(token::intern(name), ext).is_some() {
|
||||
self.session.span_err(sp, &format!("cannot shadow existing derive mode `{}`", name));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
struct ExpansionVisitor<'b, 'a: 'b> {
|
||||
|
@ -201,6 +227,21 @@ impl<'a, 'b> Visitor for ExpansionVisitor<'a, 'b> {
|
|||
visit::walk_item(self, item);
|
||||
self.current_module = orig_module;
|
||||
}
|
||||
ast::ItemKind::ExternCrate(..) => {
|
||||
// We need to error on `#[macro_use] extern crate` when it isn't at the
|
||||
// crate root, because `$crate` won't work properly.
|
||||
// FIXME(jseyfried): This will be nicer once `ModuleData` is merged with `ModuleS`.
|
||||
let is_crate_root = self.current_module.parent.as_ref().unwrap().parent.is_none();
|
||||
for def in self.resolver.crate_loader.load_macros(item, is_crate_root) {
|
||||
match def {
|
||||
LoadedMacro::Def(def) => self.resolver.add_macro(Mark::root(), def),
|
||||
LoadedMacro::CustomDerive(name, ext) => {
|
||||
self.resolver.insert_custom_derive(&name, ext, item.span);
|
||||
}
|
||||
}
|
||||
}
|
||||
visit::walk_item(self, item);
|
||||
}
|
||||
_ => visit::walk_item(self, item),
|
||||
}
|
||||
}
|
||||
|
|
|
@ -17,8 +17,7 @@ use syntax_pos::{Span, ExpnId, NO_EXPANSION};
|
|||
use errors::DiagnosticBuilder;
|
||||
use ext::expand::{self, Invocation, Expansion};
|
||||
use ext::hygiene::Mark;
|
||||
use ext::tt::macro_rules;
|
||||
use fold;
|
||||
use fold::{self, Folder};
|
||||
use parse;
|
||||
use parse::parser::{self, Parser};
|
||||
use parse::token;
|
||||
|
@ -26,10 +25,7 @@ use parse::token::{InternedString, str_to_ident};
|
|||
use ptr::P;
|
||||
use std_inject;
|
||||
use util::small_vector::SmallVector;
|
||||
use fold::Folder;
|
||||
use feature_gate;
|
||||
|
||||
use std::collections::HashMap;
|
||||
use std::path::PathBuf;
|
||||
use std::rc::Rc;
|
||||
use std::default::Default;
|
||||
|
@ -659,35 +655,30 @@ pub enum SyntaxExtension {
|
|||
pub type NamedSyntaxExtension = (Name, SyntaxExtension);
|
||||
|
||||
pub trait Resolver {
|
||||
fn load_crate(&mut self, extern_crate: &ast::Item, allows_macros: bool) -> Vec<LoadedMacro>;
|
||||
fn next_node_id(&mut self) -> ast::NodeId;
|
||||
|
||||
fn visit_expansion(&mut self, mark: Mark, expansion: &Expansion);
|
||||
fn add_macro(&mut self, scope: Mark, ident: ast::Ident, ext: Rc<SyntaxExtension>);
|
||||
fn add_macro(&mut self, scope: Mark, def: ast::MacroDef);
|
||||
fn add_ext(&mut self, scope: Mark, ident: ast::Ident, ext: Rc<SyntaxExtension>);
|
||||
fn add_expansions_at_stmt(&mut self, id: ast::NodeId, macros: Vec<Mark>);
|
||||
|
||||
fn find_attr_invoc(&mut self, attrs: &mut Vec<Attribute>) -> Option<Attribute>;
|
||||
fn resolve_invoc(&mut self, scope: Mark, invoc: &Invocation) -> Option<Rc<SyntaxExtension>>;
|
||||
}
|
||||
|
||||
pub enum LoadedMacro {
|
||||
Def(ast::MacroDef),
|
||||
CustomDerive(String, Box<MultiItemModifier>),
|
||||
fn resolve_derive_mode(&mut self, ident: ast::Ident) -> Option<Rc<MultiItemModifier>>;
|
||||
}
|
||||
|
||||
pub struct DummyResolver;
|
||||
|
||||
impl Resolver for DummyResolver {
|
||||
fn load_crate(&mut self, _extern_crate: &ast::Item, _allows_macros: bool) -> Vec<LoadedMacro> {
|
||||
Vec::new()
|
||||
}
|
||||
fn next_node_id(&mut self) -> ast::NodeId { ast::DUMMY_NODE_ID }
|
||||
|
||||
fn visit_expansion(&mut self, _invoc: Mark, _expansion: &Expansion) {}
|
||||
fn add_macro(&mut self, _scope: Mark, _ident: ast::Ident, _ext: Rc<SyntaxExtension>) {}
|
||||
fn add_macro(&mut self, _scope: Mark, _def: ast::MacroDef) {}
|
||||
fn add_ext(&mut self, _scope: Mark, _ident: ast::Ident, _ext: Rc<SyntaxExtension>) {}
|
||||
fn add_expansions_at_stmt(&mut self, _id: ast::NodeId, _macros: Vec<Mark>) {}
|
||||
|
||||
fn find_attr_invoc(&mut self, _attrs: &mut Vec<Attribute>) -> Option<Attribute> { None }
|
||||
fn resolve_derive_mode(&mut self, _ident: ast::Ident) -> Option<Rc<MultiItemModifier>> { None }
|
||||
fn resolve_invoc(&mut self, _scope: Mark, _invoc: &Invocation) -> Option<Rc<SyntaxExtension>> {
|
||||
None
|
||||
}
|
||||
|
@ -717,8 +708,6 @@ pub struct ExtCtxt<'a> {
|
|||
pub ecfg: expand::ExpansionConfig<'a>,
|
||||
pub crate_root: Option<&'static str>,
|
||||
pub resolver: &'a mut Resolver,
|
||||
pub exported_macros: Vec<ast::MacroDef>,
|
||||
pub derive_modes: HashMap<InternedString, Box<MultiItemModifier>>,
|
||||
pub current_expansion: ExpansionData,
|
||||
}
|
||||
|
||||
|
@ -732,9 +721,7 @@ impl<'a> ExtCtxt<'a> {
|
|||
cfg: cfg,
|
||||
ecfg: ecfg,
|
||||
crate_root: None,
|
||||
exported_macros: Vec::new(),
|
||||
resolver: resolver,
|
||||
derive_modes: HashMap::new(),
|
||||
current_expansion: ExpansionData {
|
||||
mark: Mark::root(),
|
||||
depth: 0,
|
||||
|
@ -811,31 +798,6 @@ impl<'a> ExtCtxt<'a> {
|
|||
}
|
||||
pub fn bt_pop(&mut self) {}
|
||||
|
||||
pub fn insert_macro(&mut self, def: ast::MacroDef) {
|
||||
if def.export {
|
||||
self.exported_macros.push(def.clone());
|
||||
}
|
||||
if def.use_locally {
|
||||
let ext = macro_rules::compile(self, &def);
|
||||
self.resolver.add_macro(self.current_expansion.mark, def.ident, Rc::new(ext));
|
||||
}
|
||||
}
|
||||
|
||||
pub fn insert_custom_derive(&mut self, name: &str, ext: Box<MultiItemModifier>, sp: Span) {
|
||||
if !self.ecfg.enable_rustc_macro() {
|
||||
feature_gate::emit_feature_err(&self.parse_sess.span_diagnostic,
|
||||
"rustc_macro",
|
||||
sp,
|
||||
feature_gate::GateIssue::Language,
|
||||
"loading custom derive macro crates \
|
||||
is experimentally supported");
|
||||
}
|
||||
let name = token::intern_and_get_ident(name);
|
||||
if self.derive_modes.insert(name.clone(), ext).is_some() {
|
||||
self.span_err(sp, &format!("cannot shadow existing derive mode `{}`", name));
|
||||
}
|
||||
}
|
||||
|
||||
pub fn struct_span_warn(&self,
|
||||
sp: Span,
|
||||
msg: &str)
|
||||
|
@ -922,7 +884,7 @@ impl<'a> ExtCtxt<'a> {
|
|||
|
||||
for (name, extension) in user_exts {
|
||||
let ident = ast::Ident::with_empty_ctxt(name);
|
||||
self.resolver.add_macro(Mark::root(), ident, Rc::new(extension));
|
||||
self.resolver.add_ext(Mark::root(), ident, Rc::new(extension));
|
||||
}
|
||||
|
||||
let mut module = ModuleData {
|
||||
|
|
|
@ -199,11 +199,6 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
|
|||
},
|
||||
_ => unreachable!(),
|
||||
};
|
||||
krate.exported_macros = mem::replace(&mut self.cx.exported_macros, Vec::new());
|
||||
|
||||
for def in &mut krate.exported_macros {
|
||||
def.id = self.cx.resolver.next_node_id()
|
||||
}
|
||||
|
||||
if self.cx.parse_sess.span_diagnostic.err_count() > err_count {
|
||||
self.cx.parse_sess.span_diagnostic.abort_if_errors();
|
||||
|
@ -672,20 +667,6 @@ impl<'a, 'b> Folder for InvocationCollector<'a, 'b> {
|
|||
self.cx.current_expansion.module = orig_module;
|
||||
return result;
|
||||
}
|
||||
ast::ItemKind::ExternCrate(..) => {
|
||||
// We need to error on `#[macro_use] extern crate` when it isn't at the
|
||||
// crate root, because `$crate` won't work properly.
|
||||
let is_crate_root = self.cx.current_expansion.module.mod_path.len() == 1;
|
||||
for def in self.cx.resolver.load_crate(&*item, is_crate_root) {
|
||||
match def {
|
||||
LoadedMacro::Def(def) => self.cx.insert_macro(def),
|
||||
LoadedMacro::CustomDerive(name, ext) => {
|
||||
self.cx.insert_custom_derive(&name, ext, item.span);
|
||||
}
|
||||
}
|
||||
}
|
||||
noop_fold_item(item, self)
|
||||
},
|
||||
_ => noop_fold_item(item, self),
|
||||
}
|
||||
}
|
||||
|
|
|
@ -16,6 +16,7 @@ use ext::placeholders;
|
|||
use ext::tt::macro_parser::{Success, Error, Failure};
|
||||
use ext::tt::macro_parser::{MatchedSeq, MatchedNonterminal};
|
||||
use ext::tt::macro_parser::parse;
|
||||
use parse::ParseSess;
|
||||
use parse::lexer::new_tt_reader;
|
||||
use parse::parser::{Parser, Restrictions};
|
||||
use parse::token::{self, gensym_ident, NtTT, Token};
|
||||
|
@ -204,7 +205,7 @@ fn generic_extension<'cx>(cx: &'cx ExtCtxt,
|
|||
_ => cx.span_bug(sp, "malformed macro rhs"),
|
||||
};
|
||||
// rhs has holes ( `$id` and `$(...)` that need filled)
|
||||
let trncbr = new_tt_reader(&cx.parse_sess().span_diagnostic,
|
||||
let trncbr = new_tt_reader(&cx.parse_sess.span_diagnostic,
|
||||
Some(named_matches),
|
||||
imported_from,
|
||||
rhs);
|
||||
|
@ -261,14 +262,15 @@ impl IdentMacroExpander for MacroRulesExpander {
|
|||
attrs: attrs,
|
||||
};
|
||||
|
||||
cx.insert_macro(def.clone());
|
||||
|
||||
// If keep_macs is true, expands to a MacEager::items instead.
|
||||
if cx.ecfg.keep_macs {
|
||||
let result = if cx.ecfg.keep_macs {
|
||||
MacEager::items(placeholders::reconstructed_macro_rules(&def).make_items())
|
||||
} else {
|
||||
MacEager::items(placeholders::macro_scope_placeholder().make_items())
|
||||
}
|
||||
};
|
||||
|
||||
cx.resolver.add_macro(cx.current_expansion.mark, def);
|
||||
result
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -278,9 +280,7 @@ impl IdentMacroExpander for MacroRulesExpander {
|
|||
// Holy self-referential!
|
||||
|
||||
/// Converts a `macro_rules!` invocation into a syntax extension.
|
||||
pub fn compile<'cx>(cx: &'cx mut ExtCtxt,
|
||||
def: &ast::MacroDef) -> SyntaxExtension {
|
||||
|
||||
pub fn compile(sess: &ParseSess, def: &ast::MacroDef) -> SyntaxExtension {
|
||||
let lhs_nm = gensym_ident("lhs");
|
||||
let rhs_nm = gensym_ident("rhs");
|
||||
|
||||
|
@ -312,19 +312,12 @@ pub fn compile<'cx>(cx: &'cx mut ExtCtxt,
|
|||
];
|
||||
|
||||
// Parse the macro_rules! invocation (`none` is for no interpolations):
|
||||
let arg_reader = new_tt_reader(&cx.parse_sess().span_diagnostic,
|
||||
None,
|
||||
None,
|
||||
def.body.clone());
|
||||
let arg_reader = new_tt_reader(&sess.span_diagnostic, None, None, def.body.clone());
|
||||
|
||||
let argument_map = match parse(cx.parse_sess(),
|
||||
cx.cfg(),
|
||||
arg_reader,
|
||||
&argument_gram) {
|
||||
let argument_map = match parse(sess, Vec::new(), arg_reader, &argument_gram) {
|
||||
Success(m) => m,
|
||||
Failure(sp, str) | Error(sp, str) => {
|
||||
panic!(cx.parse_sess().span_diagnostic
|
||||
.span_fatal(sp.substitute_dummy(def.span), &str[..]));
|
||||
panic!(sess.span_diagnostic.span_fatal(sp.substitute_dummy(def.span), &str));
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -335,27 +328,27 @@ pub fn compile<'cx>(cx: &'cx mut ExtCtxt,
|
|||
MatchedSeq(ref s, _) => {
|
||||
s.iter().map(|m| match **m {
|
||||
MatchedNonterminal(NtTT(ref tt)) => {
|
||||
valid &= check_lhs_nt_follows(cx, tt);
|
||||
valid &= check_lhs_nt_follows(sess, tt);
|
||||
(**tt).clone()
|
||||
}
|
||||
_ => cx.span_bug(def.span, "wrong-structured lhs")
|
||||
_ => sess.span_diagnostic.span_bug(def.span, "wrong-structured lhs")
|
||||
}).collect()
|
||||
}
|
||||
_ => cx.span_bug(def.span, "wrong-structured lhs")
|
||||
_ => sess.span_diagnostic.span_bug(def.span, "wrong-structured lhs")
|
||||
};
|
||||
|
||||
let rhses = match **argument_map.get(&rhs_nm).unwrap() {
|
||||
MatchedSeq(ref s, _) => {
|
||||
s.iter().map(|m| match **m {
|
||||
MatchedNonterminal(NtTT(ref tt)) => (**tt).clone(),
|
||||
_ => cx.span_bug(def.span, "wrong-structured rhs")
|
||||
_ => sess.span_diagnostic.span_bug(def.span, "wrong-structured rhs")
|
||||
}).collect()
|
||||
}
|
||||
_ => cx.span_bug(def.span, "wrong-structured rhs")
|
||||
_ => sess.span_diagnostic.span_bug(def.span, "wrong-structured rhs")
|
||||
};
|
||||
|
||||
for rhs in &rhses {
|
||||
valid &= check_rhs(cx, rhs);
|
||||
valid &= check_rhs(sess, rhs);
|
||||
}
|
||||
|
||||
let exp: Box<_> = Box::new(MacroRulesMacroExpander {
|
||||
|
@ -369,14 +362,14 @@ pub fn compile<'cx>(cx: &'cx mut ExtCtxt,
|
|||
NormalTT(exp, Some(def.span), def.allow_internal_unstable)
|
||||
}
|
||||
|
||||
fn check_lhs_nt_follows(cx: &mut ExtCtxt, lhs: &TokenTree) -> bool {
|
||||
fn check_lhs_nt_follows(sess: &ParseSess, lhs: &TokenTree) -> bool {
|
||||
// lhs is going to be like TokenTree::Delimited(...), where the
|
||||
// entire lhs is those tts. Or, it can be a "bare sequence", not wrapped in parens.
|
||||
match lhs {
|
||||
&TokenTree::Delimited(_, ref tts) => check_matcher(cx, &tts.tts),
|
||||
&TokenTree::Delimited(_, ref tts) => check_matcher(sess, &tts.tts),
|
||||
_ => {
|
||||
cx.span_err(lhs.get_span(), "invalid macro matcher; matchers must \
|
||||
be contained in balanced delimiters");
|
||||
let msg = "invalid macro matcher; matchers must be contained in balanced delimiters";
|
||||
sess.span_diagnostic.span_err(lhs.get_span(), msg);
|
||||
false
|
||||
}
|
||||
}
|
||||
|
@ -384,20 +377,20 @@ fn check_lhs_nt_follows(cx: &mut ExtCtxt, lhs: &TokenTree) -> bool {
|
|||
// after parsing/expansion. we can report every error in every macro this way.
|
||||
}
|
||||
|
||||
fn check_rhs(cx: &mut ExtCtxt, rhs: &TokenTree) -> bool {
|
||||
fn check_rhs(sess: &ParseSess, rhs: &TokenTree) -> bool {
|
||||
match *rhs {
|
||||
TokenTree::Delimited(..) => return true,
|
||||
_ => cx.span_err(rhs.get_span(), "macro rhs must be delimited")
|
||||
_ => sess.span_diagnostic.span_err(rhs.get_span(), "macro rhs must be delimited")
|
||||
}
|
||||
false
|
||||
}
|
||||
|
||||
fn check_matcher(cx: &mut ExtCtxt, matcher: &[TokenTree]) -> bool {
|
||||
fn check_matcher(sess: &ParseSess, matcher: &[TokenTree]) -> bool {
|
||||
let first_sets = FirstSets::new(matcher);
|
||||
let empty_suffix = TokenSet::empty();
|
||||
let err = cx.parse_sess.span_diagnostic.err_count();
|
||||
check_matcher_core(cx, &first_sets, matcher, &empty_suffix);
|
||||
err == cx.parse_sess.span_diagnostic.err_count()
|
||||
let err = sess.span_diagnostic.err_count();
|
||||
check_matcher_core(sess, &first_sets, matcher, &empty_suffix);
|
||||
err == sess.span_diagnostic.err_count()
|
||||
}
|
||||
|
||||
// The FirstSets for a matcher is a mapping from subsequences in the
|
||||
|
@ -635,7 +628,7 @@ impl TokenSet {
|
|||
//
|
||||
// Requires that `first_sets` is pre-computed for `matcher`;
|
||||
// see `FirstSets::new`.
|
||||
fn check_matcher_core(cx: &mut ExtCtxt,
|
||||
fn check_matcher_core(sess: &ParseSess,
|
||||
first_sets: &FirstSets,
|
||||
matcher: &[TokenTree],
|
||||
follow: &TokenSet) -> TokenSet {
|
||||
|
@ -667,7 +660,8 @@ fn check_matcher_core(cx: &mut ExtCtxt,
|
|||
TokenTree::Token(sp, ref tok) => {
|
||||
let can_be_followed_by_any;
|
||||
if let Err(bad_frag) = has_legal_fragment_specifier(tok) {
|
||||
cx.struct_span_err(sp, &format!("invalid fragment specifier `{}`", bad_frag))
|
||||
let msg = format!("invalid fragment specifier `{}`", bad_frag);
|
||||
sess.span_diagnostic.struct_span_err(sp, &msg)
|
||||
.help("valid fragment specifiers are `ident`, `block`, \
|
||||
`stmt`, `expr`, `pat`, `ty`, `path`, `meta`, `tt` \
|
||||
and `item`")
|
||||
|
@ -692,7 +686,7 @@ fn check_matcher_core(cx: &mut ExtCtxt,
|
|||
}
|
||||
TokenTree::Delimited(_, ref d) => {
|
||||
let my_suffix = TokenSet::singleton((d.close_span, Token::CloseDelim(d.delim)));
|
||||
check_matcher_core(cx, first_sets, &d.tts, &my_suffix);
|
||||
check_matcher_core(sess, first_sets, &d.tts, &my_suffix);
|
||||
// don't track non NT tokens
|
||||
last.replace_with_irrelevant();
|
||||
|
||||
|
@ -724,7 +718,7 @@ fn check_matcher_core(cx: &mut ExtCtxt,
|
|||
// At this point, `suffix_first` is built, and
|
||||
// `my_suffix` is some TokenSet that we can use
|
||||
// for checking the interior of `seq_rep`.
|
||||
let next = check_matcher_core(cx, first_sets, &seq_rep.tts, my_suffix);
|
||||
let next = check_matcher_core(sess, first_sets, &seq_rep.tts, my_suffix);
|
||||
if next.maybe_empty {
|
||||
last.add_all(&next);
|
||||
} else {
|
||||
|
@ -744,9 +738,9 @@ fn check_matcher_core(cx: &mut ExtCtxt,
|
|||
'each_last: for &(_sp, ref t) in &last.tokens {
|
||||
if let MatchNt(ref name, ref frag_spec) = *t {
|
||||
for &(sp, ref next_token) in &suffix_first.tokens {
|
||||
match is_in_follow(cx, next_token, &frag_spec.name.as_str()) {
|
||||
match is_in_follow(next_token, &frag_spec.name.as_str()) {
|
||||
Err((msg, help)) => {
|
||||
cx.struct_span_err(sp, &msg).help(help).emit();
|
||||
sess.span_diagnostic.struct_span_err(sp, &msg).help(help).emit();
|
||||
// don't bother reporting every source of
|
||||
// conflict for a particular element of `last`.
|
||||
continue 'each_last;
|
||||
|
@ -761,7 +755,7 @@ fn check_matcher_core(cx: &mut ExtCtxt,
|
|||
"may be"
|
||||
};
|
||||
|
||||
cx.span_err(
|
||||
sess.span_diagnostic.span_err(
|
||||
sp,
|
||||
&format!("`${name}:{frag}` {may_be} followed by `{next}`, which \
|
||||
is not allowed for `{frag}` fragments",
|
||||
|
@ -818,7 +812,7 @@ fn frag_can_be_followed_by_any(frag: &str) -> bool {
|
|||
/// break macros that were relying on that binary operator as a
|
||||
/// separator.
|
||||
// when changing this do not forget to update doc/book/macros.md!
|
||||
fn is_in_follow(_: &ExtCtxt, tok: &Token, frag: &str) -> Result<bool, (String, &'static str)> {
|
||||
fn is_in_follow(tok: &Token, frag: &str) -> Result<bool, (String, &'static str)> {
|
||||
if let &CloseDelim(_) = tok {
|
||||
// closing a token tree can never be matched by any fragment;
|
||||
// iow, we always require that `(` and `)` match, etc.
|
||||
|
|
|
@ -195,7 +195,8 @@ pub fn expand_derive(cx: &mut ExtCtxt,
|
|||
// If custom derive extensions end up threading through the `#[derive]`
|
||||
// attribute, we'll get called again later on to continue expanding
|
||||
// those modes.
|
||||
} else if let Some(ext) = cx.derive_modes.remove(&tname) {
|
||||
} else if let Some(ext) =
|
||||
cx.resolver.resolve_derive_mode(ast::Ident::with_empty_ctxt(intern(&tname))) {
|
||||
let remaining_derives = iter.cloned().collect::<Vec<_>>();
|
||||
if remaining_derives.len() > 0 {
|
||||
let list = cx.meta_list(titem.span,
|
||||
|
@ -214,7 +215,6 @@ pub fn expand_derive(cx: &mut ExtCtxt,
|
|||
let item = Annotatable::Item(item);
|
||||
let mut items = ext.expand(cx, mitem.span, &mitem, item);
|
||||
items.extend(other_items);
|
||||
cx.derive_modes.insert(tname.clone(), ext);
|
||||
return items
|
||||
|
||||
// If we've gotten this far then it means that we're in the territory of
|
||||
|
|
|
@ -57,7 +57,7 @@ use syntax::parse::token::intern;
|
|||
|
||||
pub fn register_builtins(resolver: &mut syntax::ext::base::Resolver, enable_quotes: bool) {
|
||||
let mut register = |name, ext| {
|
||||
resolver.add_macro(Mark::root(), ast::Ident::with_empty_ctxt(intern(name)), Rc::new(ext));
|
||||
resolver.add_ext(Mark::root(), ast::Ident::with_empty_ctxt(intern(name)), Rc::new(ext));
|
||||
};
|
||||
|
||||
register("macro_rules", IdentTT(Box::new(MacroRulesExpander), None, false));
|
||||
|
|
Loading…
Add table
Reference in a new issue