diff --git a/Cargo.lock b/Cargo.lock index ff3362be99e..18d2fb9d56e 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1003,7 +1003,7 @@ dependencies = [ "ra_arena 0.1.0", "ra_cfg 0.1.0", "ra_db 0.1.0", - "ra_hir_def 0.1.0", + "ra_hir_expand 0.1.0", "ra_mbe 0.1.0", "ra_prof 0.1.0", "ra_syntax 0.1.0", @@ -1014,12 +1014,16 @@ dependencies = [ ] [[package]] -name = "ra_hir_def" +name = "ra_hir_expand" version = "0.1.0" dependencies = [ + "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", "ra_arena 0.1.0", "ra_db 0.1.0", + "ra_mbe 0.1.0", + "ra_prof 0.1.0", "ra_syntax 0.1.0", + "ra_tt 0.1.0", ] [[package]] diff --git a/crates/ra_hir/Cargo.toml b/crates/ra_hir/Cargo.toml index 67f8c4946ca..143dae6bdcd 100644 --- a/crates/ra_hir/Cargo.toml +++ b/crates/ra_hir/Cargo.toml @@ -19,7 +19,7 @@ ra_cfg = { path = "../ra_cfg" } ra_db = { path = "../ra_db" } mbe = { path = "../ra_mbe", package = "ra_mbe" } tt = { path = "../ra_tt", package = "ra_tt" } -hir_def = { path = "../ra_hir_def", package = "ra_hir_def" } +hir_expand = { path = "../ra_hir_expand", package = "ra_hir_expand" } test_utils = { path = "../test_utils" } ra_prof = { path = "../ra_prof" } diff --git a/crates/ra_hir/src/db.rs b/crates/ra_hir/src/db.rs index 7abbf8dca1f..6d34c671dce 100644 --- a/crates/ra_hir/src/db.rs +++ b/crates/ra_hir/src/db.rs @@ -3,7 +3,7 @@ use std::sync::Arc; use ra_db::{salsa, SourceDatabase}; -use ra_syntax::{ast, Parse, SmolStr, SyntaxNode}; +use ra_syntax::{ast, SmolStr}; use crate::{ adt::{EnumData, StructData}, @@ -19,9 +19,13 @@ use crate::{ InferenceResult, Substs, Ty, TypableDef, TypeCtor, }, type_alias::TypeAliasData, - AstIdMap, Const, ConstData, Crate, DefWithBody, Enum, ErasedFileAstId, ExprScopes, FnData, - Function, HirFileId, MacroCallLoc, MacroDefId, Module, Static, Struct, StructField, Trait, - TypeAlias, + Const, ConstData, Crate, DefWithBody, Enum, ExprScopes, FnData, Function, HirFileId, Module, + Static, Struct, StructField, Trait, TypeAlias, +}; + +pub use hir_expand::db::{ + AstDatabase, AstDatabaseStorage, AstIdMapQuery, MacroArgQuery, MacroDefQuery, MacroExpandQuery, + ParseMacroQuery, }; /// We store all interned things in the single QueryGroup. @@ -31,8 +35,6 @@ use crate::{ /// two. #[salsa::query_group(InternDatabaseStorage)] pub trait InternDatabase: SourceDatabase { - #[salsa::interned] - fn intern_macro(&self, macro_call: MacroCallLoc) -> ids::MacroCallId; #[salsa::interned] fn intern_function(&self, loc: ids::ItemLoc) -> ids::FunctionId; #[salsa::interned] @@ -55,38 +57,10 @@ pub trait InternDatabase: SourceDatabase { fn intern_impl(&self, impl_: Impl) -> ids::GlobalImplId; } -/// This database has access to source code, so queries here are not really -/// incremental. -#[salsa::query_group(AstDatabaseStorage)] -pub trait AstDatabase: InternDatabase { - #[salsa::invoke(crate::source_id::ast_id_map_query)] - fn ast_id_map(&self, file_id: HirFileId) -> Arc; - - #[salsa::transparent] - #[salsa::invoke(crate::source_id::file_item_query)] - fn ast_id_to_node(&self, file_id: HirFileId, ast_id: ErasedFileAstId) -> SyntaxNode; - - #[salsa::transparent] - #[salsa::invoke(crate::ids::HirFileId::parse_or_expand_query)] - fn parse_or_expand(&self, file_id: HirFileId) -> Option; - - #[salsa::invoke(crate::ids::HirFileId::parse_macro_query)] - fn parse_macro(&self, macro_file: ids::MacroFile) -> Option>; - - #[salsa::invoke(crate::ids::macro_def_query)] - fn macro_def(&self, macro_id: MacroDefId) -> Option>; - - #[salsa::invoke(crate::ids::macro_arg_query)] - fn macro_arg(&self, macro_call: ids::MacroCallId) -> Option>; - - #[salsa::invoke(crate::ids::macro_expand_query)] - fn macro_expand(&self, macro_call: ids::MacroCallId) -> Result, String>; -} - // This database uses `AstDatabase` internally, #[salsa::query_group(DefDatabaseStorage)] #[salsa::requires(AstDatabase)] -pub trait DefDatabase: InternDatabase + HirDebugDatabase { +pub trait DefDatabase: InternDatabase + HirDebugDatabase + AstDatabase { #[salsa::invoke(crate::adt::StructData::struct_data_query)] fn struct_data(&self, s: Struct) -> Arc; diff --git a/crates/ra_hir/src/debug.rs b/crates/ra_hir/src/debug.rs index 48b69000bcd..4f3e922c3c5 100644 --- a/crates/ra_hir/src/debug.rs +++ b/crates/ra_hir/src/debug.rs @@ -36,12 +36,6 @@ impl Module { } } -impl HirFileId { - pub fn debug(self, db: &impl HirDebugDatabase) -> impl fmt::Debug + '_ { - debug_fn(move |fmt| db.debug_hir_file_id(self, fmt)) - } -} - pub trait HirDebugHelper: HirDatabase { fn crate_name(&self, _krate: CrateId) -> Option { None diff --git a/crates/ra_hir/src/expr/lower.rs b/crates/ra_hir/src/expr/lower.rs index 24733b3de38..b3a9a2e6b9a 100644 --- a/crates/ra_hir/src/expr/lower.rs +++ b/crates/ra_hir/src/expr/lower.rs @@ -465,7 +465,7 @@ where if let Some(path) = e.path().and_then(|path| self.parse_path(path)) { if let Some(def) = self.resolver.resolve_path_as_macro(self.db, &path) { - let call_id = MacroCallLoc { def: def.id, ast_id }.id(self.db); + let call_id = self.db.intern_macro(MacroCallLoc { def: def.id, ast_id }); let file_id = call_id.as_file(MacroFileKind::Expr); if let Some(node) = self.db.parse_or_expand(file_id) { if let Some(expr) = ast::Expr::cast(node) { diff --git a/crates/ra_hir/src/ids.rs b/crates/ra_hir/src/ids.rs index f141206c648..dea288eb7bc 100644 --- a/crates/ra_hir/src/ids.rs +++ b/crates/ra_hir/src/ids.rs @@ -1,168 +1,21 @@ -//! FIXME: write short doc here +//! hir makes heavy use of ids: integer (u32) handlers to various things. You +//! can think of id as a pointer (but without a lifetime) or a file descriptor +//! (but for hir objects). +//! +//! This module defines a bunch of ids we are using. The most important ones are +//! probably `HirFileId` and `DefId`. -use std::{ - hash::{Hash, Hasher}, - sync::Arc, -}; +use std::hash::{Hash, Hasher}; -use mbe::MacroRules; -use ra_db::{salsa, FileId}; -use ra_prof::profile; -use ra_syntax::{ast, AstNode, Parse, SyntaxNode}; +use ra_db::salsa; +use ra_syntax::{ast, AstNode}; use crate::{ db::{AstDatabase, InternDatabase}, - AstId, Crate, FileAstId, Module, Source, + AstId, FileAstId, Module, Source, }; -/// hir makes heavy use of ids: integer (u32) handlers to various things. You -/// can think of id as a pointer (but without a lifetime) or a file descriptor -/// (but for hir objects). -/// -/// This module defines a bunch of ids we are using. The most important ones are -/// probably `HirFileId` and `DefId`. - -/// Input to the analyzer is a set of files, where each file is identified by -/// `FileId` and contains source code. However, another source of source code in -/// Rust are macros: each macro can be thought of as producing a "temporary -/// file". To assign an id to such a file, we use the id of the macro call that -/// produced the file. So, a `HirFileId` is either a `FileId` (source code -/// written by user), or a `MacroCallId` (source code produced by macro). -/// -/// What is a `MacroCallId`? Simplifying, it's a `HirFileId` of a file -/// containing the call plus the offset of the macro call in the file. Note that -/// this is a recursive definition! However, the size_of of `HirFileId` is -/// finite (because everything bottoms out at the real `FileId`) and small -/// (`MacroCallId` uses the location interner). -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -pub struct HirFileId(HirFileIdRepr); - -impl HirFileId { - /// For macro-expansion files, returns the file original source file the - /// expansion originated from. - pub fn original_file(self, db: &impl InternDatabase) -> FileId { - match self.0 { - HirFileIdRepr::File(file_id) => file_id, - HirFileIdRepr::Macro(macro_file) => { - let loc = macro_file.macro_call_id.loc(db); - loc.ast_id.file_id().original_file(db) - } - } - } - - /// Get the crate which the macro lives in, if it is a macro file. - pub(crate) fn macro_crate(self, db: &impl AstDatabase) -> Option { - match self.0 { - HirFileIdRepr::File(_) => None, - HirFileIdRepr::Macro(macro_file) => { - let loc = macro_file.macro_call_id.loc(db); - Some(loc.def.krate) - } - } - } - - pub(crate) fn parse_or_expand_query( - db: &impl AstDatabase, - file_id: HirFileId, - ) -> Option { - match file_id.0 { - HirFileIdRepr::File(file_id) => Some(db.parse(file_id).tree().syntax().clone()), - HirFileIdRepr::Macro(macro_file) => { - db.parse_macro(macro_file).map(|it| it.syntax_node()) - } - } - } - - pub(crate) fn parse_macro_query( - db: &impl AstDatabase, - macro_file: MacroFile, - ) -> Option> { - let _p = profile("parse_macro_query"); - let macro_call_id = macro_file.macro_call_id; - let tt = db - .macro_expand(macro_call_id) - .map_err(|err| { - // Note: - // The final goal we would like to make all parse_macro success, - // such that the following log will not call anyway. - log::warn!("fail on macro_parse: (reason: {})", err,); - }) - .ok()?; - match macro_file.macro_file_kind { - MacroFileKind::Items => mbe::token_tree_to_items(&tt).ok().map(Parse::to_syntax), - MacroFileKind::Expr => mbe::token_tree_to_expr(&tt).ok().map(Parse::to_syntax), - } - } -} - -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -enum HirFileIdRepr { - File(FileId), - Macro(MacroFile), -} - -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -pub struct MacroFile { - macro_call_id: MacroCallId, - macro_file_kind: MacroFileKind, -} - -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -pub(crate) enum MacroFileKind { - Items, - Expr, -} - -impl From for HirFileId { - fn from(file_id: FileId) -> HirFileId { - HirFileId(HirFileIdRepr::File(file_id)) - } -} - -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -pub struct MacroDefId { - pub(crate) ast_id: AstId, - pub(crate) krate: Crate, -} - -pub(crate) fn macro_def_query(db: &impl AstDatabase, id: MacroDefId) -> Option> { - let macro_call = id.ast_id.to_node(db); - let arg = macro_call.token_tree()?; - let (tt, _) = mbe::ast_to_token_tree(&arg).or_else(|| { - log::warn!("fail on macro_def to token tree: {:#?}", arg); - None - })?; - let rules = MacroRules::parse(&tt).ok().or_else(|| { - log::warn!("fail on macro_def parse: {:#?}", tt); - None - })?; - Some(Arc::new(rules)) -} - -pub(crate) fn macro_arg_query(db: &impl AstDatabase, id: MacroCallId) -> Option> { - let loc = id.loc(db); - let macro_call = loc.ast_id.to_node(db); - let arg = macro_call.token_tree()?; - let (tt, _) = mbe::ast_to_token_tree(&arg)?; - Some(Arc::new(tt)) -} - -pub(crate) fn macro_expand_query( - db: &impl AstDatabase, - id: MacroCallId, -) -> Result, String> { - let loc = id.loc(db); - let macro_arg = db.macro_arg(id).ok_or("Fail to args in to tt::TokenTree")?; - - let macro_rules = db.macro_def(loc.def).ok_or("Fail to find macro definition")?; - let tt = macro_rules.expand(¯o_arg).map_err(|err| format!("{:?}", err))?; - // Set a hard limit for the expanded tt - let count = tt.count(); - if count > 65536 { - return Err(format!("Total tokens count exceed limit : count = {}", count)); - } - Ok(Arc::new(tt)) -} +pub use hir_expand::{HirFileId, MacroCallId, MacroCallLoc, MacroDefId, MacroFile, MacroFileKind}; macro_rules! impl_intern_key { ($name:ident) => { @@ -177,35 +30,6 @@ macro_rules! impl_intern_key { }; } -/// `MacroCallId` identifies a particular macro invocation, like -/// `println!("Hello, {}", world)`. -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -pub struct MacroCallId(salsa::InternId); -impl_intern_key!(MacroCallId); - -#[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub struct MacroCallLoc { - pub(crate) def: MacroDefId, - pub(crate) ast_id: AstId, -} - -impl MacroCallId { - pub(crate) fn loc(self, db: &impl InternDatabase) -> MacroCallLoc { - db.lookup_intern_macro(self) - } - - pub(crate) fn as_file(self, kind: MacroFileKind) -> HirFileId { - let macro_file = MacroFile { macro_call_id: self, macro_file_kind: kind }; - HirFileId(HirFileIdRepr::Macro(macro_file)) - } -} - -impl MacroCallLoc { - pub(crate) fn id(self, db: &impl InternDatabase) -> MacroCallId { - db.intern_macro(self) - } -} - #[derive(Debug)] pub struct ItemLoc { pub(crate) module: Module, @@ -244,7 +68,7 @@ impl<'a, DB> LocationCtx<&'a DB> { } } -impl<'a, DB: AstDatabase> LocationCtx<&'a DB> { +impl<'a, DB: AstDatabase + InternDatabase> LocationCtx<&'a DB> { pub(crate) fn to_def(self, ast: &N) -> DEF where N: AstNode, @@ -258,7 +82,7 @@ pub(crate) trait AstItemDef: salsa::InternKey + Clone { fn intern(db: &impl InternDatabase, loc: ItemLoc) -> Self; fn lookup_intern(self, db: &impl InternDatabase) -> ItemLoc; - fn from_ast(ctx: LocationCtx<&impl AstDatabase>, ast: &N) -> Self { + fn from_ast(ctx: LocationCtx<&(impl AstDatabase + InternDatabase)>, ast: &N) -> Self { let items = ctx.db.ast_id_map(ctx.file_id); let item_id = items.ast_id(ast); Self::from_ast_id(ctx, item_id) @@ -267,7 +91,7 @@ pub(crate) trait AstItemDef: salsa::InternKey + Clone { let loc = ItemLoc { module: ctx.module, ast_id: AstId::new(ctx.file_id, ast_id) }; Self::intern(ctx.db, loc) } - fn source(self, db: &impl AstDatabase) -> Source { + fn source(self, db: &(impl AstDatabase + InternDatabase)) -> Source { let loc = self.lookup_intern(db); let ast = loc.ast_id.to_node(db); Source { file_id: loc.ast_id.file_id(), ast } diff --git a/crates/ra_hir/src/impl_block.rs b/crates/ra_hir/src/impl_block.rs index 9c739f3f166..1a52236806d 100644 --- a/crates/ra_hir/src/impl_block.rs +++ b/crates/ra_hir/src/impl_block.rs @@ -263,7 +263,7 @@ impl ModuleImplBlocks { { if let Some(def) = self.module.resolver(db).resolve_path_as_macro(db, &path) { - let call_id = MacroCallLoc { def: def.id, ast_id }.id(db); + let call_id = db.intern_macro(MacroCallLoc { def: def.id, ast_id }); let file_id = call_id.as_file(MacroFileKind::Items); if let Some(item_list) = db.parse_or_expand(file_id).and_then(ast::MacroItems::cast) diff --git a/crates/ra_hir/src/lib.rs b/crates/ra_hir/src/lib.rs index ca261e8f541..0f2d233bb1b 100644 --- a/crates/ra_hir/src/lib.rs +++ b/crates/ra_hir/src/lib.rs @@ -35,7 +35,6 @@ pub mod mock; mod path; pub mod source_binder; -mod source_id; mod ids; mod name; mod nameres; @@ -60,14 +59,14 @@ pub mod from_source; #[cfg(test)] mod marks; -use crate::{ - ids::MacroFileKind, - name::AsName, - resolve::Resolver, - source_id::{AstId, FileAstId}, +use hir_expand::{ + ast_id_map::{AstIdMap, FileAstId}, + AstId, }; -pub use self::{ +use crate::{ids::MacroFileKind, name::AsName, resolve::Resolver}; + +pub use crate::{ adt::VariantDef, either::Either, expr::ExprScopes, @@ -80,7 +79,6 @@ pub use self::{ path::{Path, PathKind}, resolve::ScopeDef, source_binder::{PathResolution, ScopeEntryWithSyntax, SourceAnalyzer}, - source_id::{AstIdMap, ErasedFileAstId}, ty::{ display::HirDisplay, ApplicationTy, CallableDef, Substs, TraitRef, Ty, TypeCtor, TypeWalk, }, diff --git a/crates/ra_hir/src/nameres/collector.rs b/crates/ra_hir/src/nameres/collector.rs index 4f363df3691..dc591e8d349 100644 --- a/crates/ra_hir/src/nameres/collector.rs +++ b/crates/ra_hir/src/nameres/collector.rs @@ -448,7 +448,7 @@ where ); if let Some(def) = resolved_res.resolved_def.get_macros() { - let call_id = MacroCallLoc { def: def.id, ast_id: *ast_id }.id(self.db); + let call_id = self.db.intern_macro(MacroCallLoc { def: def.id, ast_id: *ast_id }); resolved.push((*module_id, call_id, def.id)); res = ReachedFixedPoint::No; return false; @@ -676,7 +676,8 @@ where // Case 1: macro rules, define a macro in crate-global mutable scope if is_macro_rules(&mac.path) { if let Some(name) = &mac.name { - let macro_id = MacroDefId { ast_id, krate: self.def_collector.def_map.krate }; + let macro_id = + MacroDefId { ast_id, krate: self.def_collector.def_map.krate.crate_id }; let macro_ = MacroDef { id: macro_id }; self.def_collector.define_macro(self.module_id, name.clone(), macro_, mac.export); } @@ -689,7 +690,7 @@ where self.def_collector.def_map[self.module_id].scope.get_legacy_macro(&name) }) { let def = macro_def.id; - let macro_call_id = MacroCallLoc { def, ast_id }.id(self.def_collector.db); + let macro_call_id = self.def_collector.db.intern_macro(MacroCallLoc { def, ast_id }); self.def_collector.collect_macro_expansion(self.module_id, macro_call_id, def); return; diff --git a/crates/ra_hir/src/path.rs b/crates/ra_hir/src/path.rs index 394617e1a4c..bbe536bcb4a 100644 --- a/crates/ra_hir/src/path.rs +++ b/crates/ra_hir/src/path.rs @@ -66,7 +66,12 @@ impl Path { mut cb: impl FnMut(Path, &ast::UseTree, bool, Option), ) { if let Some(tree) = item_src.ast.use_tree() { - expand_use_tree(None, tree, &|| item_src.file_id.macro_crate(db), &mut cb); + expand_use_tree( + None, + tree, + &|| item_src.file_id.macro_crate(db).map(|crate_id| Crate { crate_id }), + &mut cb, + ); } } @@ -90,7 +95,7 @@ impl Path { /// It correctly handles `$crate` based path from macro call. pub fn from_src(source: Source, db: &impl AstDatabase) -> Option { let file_id = source.file_id; - Path::parse(source.ast, &|| file_id.macro_crate(db)) + Path::parse(source.ast, &|| file_id.macro_crate(db).map(|crate_id| Crate { crate_id })) } fn parse(mut path: ast::Path, macro_crate: &impl Fn() -> Option) -> Option { diff --git a/crates/ra_hir/src/source_id.rs b/crates/ra_hir/src/source_id.rs deleted file mode 100644 index 260b7966106..00000000000 --- a/crates/ra_hir/src/source_id.rs +++ /dev/null @@ -1,73 +0,0 @@ -//! FIXME: write short doc here - -use std::{ - hash::{Hash, Hasher}, - sync::Arc, -}; - -pub use hir_def::ast_id_map::{AstIdMap, ErasedFileAstId, FileAstId}; -use ra_syntax::{AstNode, SyntaxNode}; - -use crate::{db::AstDatabase, HirFileId}; - -/// `AstId` points to an AST node in any file. -/// -/// It is stable across reparses, and can be used as salsa key/value. -// FIXME: isn't this just a `Source>` ? -#[derive(Debug)] -pub(crate) struct AstId { - file_id: HirFileId, - file_ast_id: FileAstId, -} - -impl Clone for AstId { - fn clone(&self) -> AstId { - *self - } -} -impl Copy for AstId {} - -impl PartialEq for AstId { - fn eq(&self, other: &Self) -> bool { - (self.file_id, self.file_ast_id) == (other.file_id, other.file_ast_id) - } -} -impl Eq for AstId {} -impl Hash for AstId { - fn hash(&self, hasher: &mut H) { - (self.file_id, self.file_ast_id).hash(hasher); - } -} - -impl AstId { - pub fn new(file_id: HirFileId, file_ast_id: FileAstId) -> AstId { - AstId { file_id, file_ast_id } - } - - pub(crate) fn file_id(&self) -> HirFileId { - self.file_id - } - - pub(crate) fn to_node(&self, db: &impl AstDatabase) -> N { - let syntax_node = db.ast_id_to_node(self.file_id, self.file_ast_id.into()); - N::cast(syntax_node).unwrap() - } -} - -pub(crate) fn ast_id_map_query(db: &impl AstDatabase, file_id: HirFileId) -> Arc { - let map = if let Some(node) = db.parse_or_expand(file_id) { - AstIdMap::from_source(&node) - } else { - AstIdMap::default() - }; - Arc::new(map) -} - -pub(crate) fn file_item_query( - db: &impl AstDatabase, - file_id: HirFileId, - ast_id: ErasedFileAstId, -) -> SyntaxNode { - let node = db.parse_or_expand(file_id).unwrap(); - db.ast_id_map(file_id)[ast_id].to_node(&node) -} diff --git a/crates/ra_hir_def/src/lib.rs b/crates/ra_hir_def/src/lib.rs deleted file mode 100644 index 4d4d2cb19c7..00000000000 --- a/crates/ra_hir_def/src/lib.rs +++ /dev/null @@ -1,7 +0,0 @@ -//! `ra_hir_def` contains initial "phases" of the compiler. Roughly, everything -//! before types. -//! -//! Note that we are in the process of moving parts of `ra_hir` into -//! `ra_hir_def`, so this crates doesn't contain a lot at the moment. - -pub mod ast_id_map; diff --git a/crates/ra_hir_def/Cargo.toml b/crates/ra_hir_expand/Cargo.toml similarity index 54% rename from crates/ra_hir_def/Cargo.toml rename to crates/ra_hir_expand/Cargo.toml index 7c57d56bd48..9bf5b791875 100644 --- a/crates/ra_hir_def/Cargo.toml +++ b/crates/ra_hir_expand/Cargo.toml @@ -1,10 +1,15 @@ [package] edition = "2018" -name = "ra_hir_def" +name = "ra_hir_expand" version = "0.1.0" authors = ["rust-analyzer developers"] [dependencies] +log = "0.4.5" + ra_arena = { path = "../ra_arena" } ra_db = { path = "../ra_db" } ra_syntax = { path = "../ra_syntax" } +ra_prof = { path = "../ra_prof" } +tt = { path = "../ra_tt", package = "ra_tt" } +mbe = { path = "../ra_mbe", package = "ra_mbe" } diff --git a/crates/ra_hir_def/src/ast_id_map.rs b/crates/ra_hir_expand/src/ast_id_map.rs similarity index 82% rename from crates/ra_hir_def/src/ast_id_map.rs rename to crates/ra_hir_expand/src/ast_id_map.rs index c3b389102ac..cb464c3ff17 100644 --- a/crates/ra_hir_def/src/ast_id_map.rs +++ b/crates/ra_hir_expand/src/ast_id_map.rs @@ -8,11 +8,10 @@ use std::{ hash::{Hash, Hasher}, marker::PhantomData, - ops, }; use ra_arena::{impl_arena_id, Arena, RawId}; -use ra_syntax::{ast, AstNode, SyntaxNode, SyntaxNodePtr}; +use ra_syntax::{ast, AstNode, AstPtr, SyntaxNode, SyntaxNodePtr}; /// `AstId` points to an AST node in a specific file. #[derive(Debug)] @@ -40,14 +39,8 @@ impl Hash for FileAstId { } } -impl From> for ErasedFileAstId { - fn from(id: FileAstId) -> Self { - id.raw - } -} - #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] -pub struct ErasedFileAstId(RawId); +struct ErasedFileAstId(RawId); impl_arena_id!(ErasedFileAstId); /// Maps items' `SyntaxNode`s to `ErasedFileAstId`s and back. @@ -57,7 +50,7 @@ pub struct AstIdMap { } impl AstIdMap { - pub fn from_source(node: &SyntaxNode) -> AstIdMap { + pub(crate) fn from_source(node: &SyntaxNode) -> AstIdMap { assert!(node.parent().is_none()); let mut res = AstIdMap { arena: Arena::default() }; // By walking the tree in bread-first order we make sure that parents @@ -75,17 +68,23 @@ impl AstIdMap { } pub fn ast_id(&self, item: &N) -> FileAstId { - let ptr = SyntaxNodePtr::new(item.syntax()); - let raw = match self.arena.iter().find(|(_id, i)| **i == ptr) { + let raw = self.erased_ast_id(item.syntax()); + FileAstId { raw, _ty: PhantomData } + } + fn erased_ast_id(&self, item: &SyntaxNode) -> ErasedFileAstId { + let ptr = SyntaxNodePtr::new(item); + match self.arena.iter().find(|(_id, i)| **i == ptr) { Some((it, _)) => it, None => panic!( "Can't find {:?} in AstIdMap:\n{:?}", - item.syntax(), + item, self.arena.iter().map(|(_id, i)| i).collect::>(), ), - }; + } + } - FileAstId { raw, _ty: PhantomData } + pub(crate) fn get(&self, id: FileAstId) -> AstPtr { + self.arena[id.raw].cast::().unwrap() } fn alloc(&mut self, item: &SyntaxNode) -> ErasedFileAstId { @@ -93,13 +92,6 @@ impl AstIdMap { } } -impl ops::Index for AstIdMap { - type Output = SyntaxNodePtr; - fn index(&self, index: ErasedFileAstId) -> &SyntaxNodePtr { - &self.arena[index] - } -} - /// Walks the subtree in bfs order, calling `f` for each node. fn bfs(node: &SyntaxNode, mut f: impl FnMut(SyntaxNode)) { let mut curr_layer = vec![node.clone()]; diff --git a/crates/ra_hir_expand/src/db.rs b/crates/ra_hir_expand/src/db.rs new file mode 100644 index 00000000000..a4ee9a529a1 --- /dev/null +++ b/crates/ra_hir_expand/src/db.rs @@ -0,0 +1,104 @@ +//! Defines database & queries for macro expansion. + +use std::sync::Arc; + +use mbe::MacroRules; +use ra_db::{salsa, SourceDatabase}; +use ra_prof::profile; +use ra_syntax::{AstNode, Parse, SyntaxNode}; + +use crate::{ + ast_id_map::AstIdMap, HirFileId, HirFileIdRepr, MacroCallId, MacroCallLoc, MacroDefId, + MacroFile, MacroFileKind, +}; + +// FIXME: rename to ExpandDatabase +#[salsa::query_group(AstDatabaseStorage)] +pub trait AstDatabase: SourceDatabase { + fn ast_id_map(&self, file_id: HirFileId) -> Arc; + + #[salsa::transparent] + fn parse_or_expand(&self, file_id: HirFileId) -> Option; + + #[salsa::interned] + fn intern_macro(&self, macro_call: MacroCallLoc) -> MacroCallId; + fn macro_arg(&self, id: MacroCallId) -> Option>; + fn macro_def(&self, id: MacroDefId) -> Option>; + fn parse_macro(&self, macro_file: MacroFile) -> Option>; + fn macro_expand(&self, macro_call: MacroCallId) -> Result, String>; +} + +pub(crate) fn ast_id_map(db: &dyn AstDatabase, file_id: HirFileId) -> Arc { + let map = + db.parse_or_expand(file_id).map_or_else(AstIdMap::default, |it| AstIdMap::from_source(&it)); + Arc::new(map) +} + +pub(crate) fn macro_def(db: &dyn AstDatabase, id: MacroDefId) -> Option> { + let macro_call = id.ast_id.to_node(db); + let arg = macro_call.token_tree()?; + let (tt, _) = mbe::ast_to_token_tree(&arg).or_else(|| { + log::warn!("fail on macro_def to token tree: {:#?}", arg); + None + })?; + let rules = MacroRules::parse(&tt).ok().or_else(|| { + log::warn!("fail on macro_def parse: {:#?}", tt); + None + })?; + Some(Arc::new(rules)) +} + +pub(crate) fn macro_arg(db: &dyn AstDatabase, id: MacroCallId) -> Option> { + let loc = db.lookup_intern_macro(id); + let macro_call = loc.ast_id.to_node(db); + let arg = macro_call.token_tree()?; + let (tt, _) = mbe::ast_to_token_tree(&arg)?; + Some(Arc::new(tt)) +} + +pub(crate) fn macro_expand( + db: &dyn AstDatabase, + id: MacroCallId, +) -> Result, String> { + let loc = db.lookup_intern_macro(id); + let macro_arg = db.macro_arg(id).ok_or("Fail to args in to tt::TokenTree")?; + + let macro_rules = db.macro_def(loc.def).ok_or("Fail to find macro definition")?; + let tt = macro_rules.expand(¯o_arg).map_err(|err| format!("{:?}", err))?; + // Set a hard limit for the expanded tt + let count = tt.count(); + if count > 65536 { + return Err(format!("Total tokens count exceed limit : count = {}", count)); + } + Ok(Arc::new(tt)) +} + +pub(crate) fn parse_or_expand(db: &dyn AstDatabase, file_id: HirFileId) -> Option { + match file_id.0 { + HirFileIdRepr::FileId(file_id) => Some(db.parse(file_id).tree().syntax().clone()), + HirFileIdRepr::MacroFile(macro_file) => { + db.parse_macro(macro_file).map(|it| it.syntax_node()) + } + } +} + +pub(crate) fn parse_macro( + db: &dyn AstDatabase, + macro_file: MacroFile, +) -> Option> { + let _p = profile("parse_macro_query"); + let macro_call_id = macro_file.macro_call_id; + let tt = db + .macro_expand(macro_call_id) + .map_err(|err| { + // Note: + // The final goal we would like to make all parse_macro success, + // such that the following log will not call anyway. + log::warn!("fail on macro_parse: (reason: {})", err,); + }) + .ok()?; + match macro_file.macro_file_kind { + MacroFileKind::Items => mbe::token_tree_to_items(&tt).ok().map(Parse::to_syntax), + MacroFileKind::Expr => mbe::token_tree_to_expr(&tt).ok().map(Parse::to_syntax), + } +} diff --git a/crates/ra_hir_expand/src/lib.rs b/crates/ra_hir_expand/src/lib.rs new file mode 100644 index 00000000000..6b35386730c --- /dev/null +++ b/crates/ra_hir_expand/src/lib.rs @@ -0,0 +1,161 @@ +//! `ra_hir_expand` deals with macro expansion. +//! +//! Specifically, it implements a concept of `MacroFile` -- a file whose syntax +//! tree originates not from the text of some `FileId`, but from some macro +//! expansion. + +pub mod db; +pub mod ast_id_map; + +use std::hash::{Hash, Hasher}; + +use ra_db::{salsa, CrateId, FileId}; +use ra_syntax::ast::{self, AstNode}; + +use crate::{ast_id_map::FileAstId, db::AstDatabase}; + +/// Input to the analyzer is a set of files, where each file is identified by +/// `FileId` and contains source code. However, another source of source code in +/// Rust are macros: each macro can be thought of as producing a "temporary +/// file". To assign an id to such a file, we use the id of the macro call that +/// produced the file. So, a `HirFileId` is either a `FileId` (source code +/// written by user), or a `MacroCallId` (source code produced by macro). +/// +/// What is a `MacroCallId`? Simplifying, it's a `HirFileId` of a file +/// containing the call plus the offset of the macro call in the file. Note that +/// this is a recursive definition! However, the size_of of `HirFileId` is +/// finite (because everything bottoms out at the real `FileId`) and small +/// (`MacroCallId` uses the location interner). +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct HirFileId(HirFileIdRepr); + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +enum HirFileIdRepr { + FileId(FileId), + MacroFile(MacroFile), +} + +impl From for HirFileId { + fn from(id: FileId) -> Self { + HirFileId(HirFileIdRepr::FileId(id)) + } +} + +impl From for HirFileId { + fn from(id: MacroFile) -> Self { + HirFileId(HirFileIdRepr::MacroFile(id)) + } +} + +impl HirFileId { + /// For macro-expansion files, returns the file original source file the + /// expansion originated from. + pub fn original_file(self, db: &dyn AstDatabase) -> FileId { + match self.0 { + HirFileIdRepr::FileId(file_id) => file_id, + HirFileIdRepr::MacroFile(macro_file) => { + let loc = db.lookup_intern_macro(macro_file.macro_call_id); + loc.ast_id.file_id().original_file(db) + } + } + } + + /// Get the crate which the macro lives in, if it is a macro file. + pub fn macro_crate(self, db: &dyn AstDatabase) -> Option { + match self.0 { + HirFileIdRepr::FileId(_) => None, + HirFileIdRepr::MacroFile(macro_file) => { + let loc = db.lookup_intern_macro(macro_file.macro_call_id); + Some(loc.def.krate) + } + } + } +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct MacroFile { + macro_call_id: MacroCallId, + macro_file_kind: MacroFileKind, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub enum MacroFileKind { + Items, + Expr, +} + +/// `MacroCallId` identifies a particular macro invocation, like +/// `println!("Hello, {}", world)`. +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct MacroCallId(salsa::InternId); +impl salsa::InternKey for MacroCallId { + fn from_intern_id(v: salsa::InternId) -> Self { + MacroCallId(v) + } + fn as_intern_id(&self) -> salsa::InternId { + self.0 + } +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct MacroDefId { + pub krate: CrateId, + pub ast_id: AstId, +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct MacroCallLoc { + pub def: MacroDefId, + pub ast_id: AstId, +} + +impl MacroCallId { + pub fn as_file(self, kind: MacroFileKind) -> HirFileId { + let macro_file = MacroFile { macro_call_id: self, macro_file_kind: kind }; + macro_file.into() + } +} + +/// `AstId` points to an AST node in any file. +/// +/// It is stable across reparses, and can be used as salsa key/value. +// FIXME: isn't this just a `Source>` ? +#[derive(Debug)] +pub struct AstId { + file_id: HirFileId, + file_ast_id: FileAstId, +} + +impl Clone for AstId { + fn clone(&self) -> AstId { + *self + } +} +impl Copy for AstId {} + +impl PartialEq for AstId { + fn eq(&self, other: &Self) -> bool { + (self.file_id, self.file_ast_id) == (other.file_id, other.file_ast_id) + } +} +impl Eq for AstId {} +impl Hash for AstId { + fn hash(&self, hasher: &mut H) { + (self.file_id, self.file_ast_id).hash(hasher); + } +} + +impl AstId { + pub fn new(file_id: HirFileId, file_ast_id: FileAstId) -> AstId { + AstId { file_id, file_ast_id } + } + + pub fn file_id(&self) -> HirFileId { + self.file_id + } + + pub fn to_node(&self, db: &dyn AstDatabase) -> N { + let root = db.parse_or_expand(self.file_id).unwrap(); + db.ast_id_map(self.file_id).get(self.file_ast_id).to_node(&root) + } +} diff --git a/crates/ra_ide_api/Cargo.toml b/crates/ra_ide_api/Cargo.toml index f66f0a6bade..bf6ef12f3ff 100644 --- a/crates/ra_ide_api/Cargo.toml +++ b/crates/ra_ide_api/Cargo.toml @@ -27,10 +27,13 @@ ra_db = { path = "../ra_db" } ra_cfg = { path = "../ra_cfg" } ra_fmt = { path = "../ra_fmt" } ra_prof = { path = "../ra_prof" } -hir = { path = "../ra_hir", package = "ra_hir" } test_utils = { path = "../test_utils" } ra_assists = { path = "../ra_assists" } +# ra_ide_api should depend only on the top-level `hir` package. if you need +# something from some `hir_xxx` subpackage, reexport the API via `hir`. +hir = { path = "../ra_hir", package = "ra_hir" } + [dev-dependencies] insta = "0.12.0"