Merge #2629
2629: Remove imports from hir r=matklad a=matklad We only used them to avoid self-confirming completions (`use self::foo`), but that can be handled more locally. bors r+ Co-authored-by: Aleksey Kladov <aleksey.kladov@gmail.com>
This commit is contained in:
commit
cdc9d682b0
13 changed files with 71 additions and 166 deletions
|
@ -12,8 +12,8 @@ use hir_def::{
|
|||
resolver::HasResolver,
|
||||
type_ref::{Mutability, TypeRef},
|
||||
AdtId, ConstId, DefWithBodyId, EnumId, FunctionId, HasModule, ImplId, LocalEnumVariantId,
|
||||
LocalImportId, LocalModuleId, LocalStructFieldId, Lookup, ModuleId, StaticId, StructId,
|
||||
TraitId, TypeAliasId, TypeParamId, UnionId,
|
||||
LocalModuleId, LocalStructFieldId, Lookup, ModuleId, StaticId, StructId, TraitId, TypeAliasId,
|
||||
TypeParamId, UnionId,
|
||||
};
|
||||
use hir_expand::{
|
||||
diagnostics::DiagnosticSink,
|
||||
|
@ -180,13 +180,11 @@ impl Module {
|
|||
}
|
||||
|
||||
/// Returns a `ModuleScope`: a set of items, visible in this module.
|
||||
pub fn scope(self, db: &impl HirDatabase) -> Vec<(Name, ScopeDef, Option<Import>)> {
|
||||
pub fn scope(self, db: &impl HirDatabase) -> Vec<(Name, ScopeDef)> {
|
||||
db.crate_def_map(self.id.krate)[self.id.local_id]
|
||||
.scope
|
||||
.entries()
|
||||
.map(|(name, res)| {
|
||||
(name.clone(), res.def.into(), res.import.map(|id| Import { parent: self, id }))
|
||||
})
|
||||
.map(|(name, res)| (name.clone(), res.def.into()))
|
||||
.collect()
|
||||
}
|
||||
|
||||
|
@ -229,10 +227,10 @@ impl Module {
|
|||
}
|
||||
}
|
||||
|
||||
pub struct Import {
|
||||
pub(crate) parent: Module,
|
||||
pub(crate) id: LocalImportId,
|
||||
}
|
||||
// pub struct Import {
|
||||
// pub(crate) parent: Module,
|
||||
// pub(crate) id: LocalImportId,
|
||||
// }
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
pub struct StructField {
|
||||
|
|
|
@ -4,8 +4,8 @@ pub use hir_def::db::{
|
|||
BodyQuery, BodyWithSourceMapQuery, ConstDataQuery, CrateDefMapQuery, CrateLangItemsQuery,
|
||||
DefDatabase, DefDatabaseStorage, DocumentationQuery, EnumDataQuery, ExprScopesQuery,
|
||||
FunctionDataQuery, GenericParamsQuery, ImplDataQuery, InternDatabase, InternDatabaseStorage,
|
||||
LangItemQuery, ModuleLangItemsQuery, RawItemsQuery, RawItemsWithSourceMapQuery,
|
||||
StaticDataQuery, StructDataQuery, TraitDataQuery, TypeAliasDataQuery,
|
||||
LangItemQuery, ModuleLangItemsQuery, RawItemsQuery, StaticDataQuery, StructDataQuery,
|
||||
TraitDataQuery, TypeAliasDataQuery,
|
||||
};
|
||||
pub use hir_expand::db::{
|
||||
AstDatabase, AstDatabaseStorage, AstIdMapQuery, MacroArgQuery, MacroDefQuery, MacroExpandQuery,
|
||||
|
|
|
@ -9,8 +9,8 @@ use hir_def::{
|
|||
use ra_syntax::ast;
|
||||
|
||||
use crate::{
|
||||
db::DefDatabase, Const, Enum, EnumVariant, FieldSource, Function, ImplBlock, Import, MacroDef,
|
||||
Module, Static, Struct, StructField, Trait, TypeAlias, TypeParam, Union,
|
||||
db::DefDatabase, Const, Enum, EnumVariant, FieldSource, Function, ImplBlock, MacroDef, Module,
|
||||
Static, Struct, StructField, Trait, TypeAlias, TypeParam, Union,
|
||||
};
|
||||
|
||||
pub use hir_expand::InFile;
|
||||
|
@ -117,18 +117,6 @@ impl HasSource for ImplBlock {
|
|||
self.id.lookup(db).source(db)
|
||||
}
|
||||
}
|
||||
impl HasSource for Import {
|
||||
type Ast = Either<ast::UseTree, ast::ExternCrateItem>;
|
||||
|
||||
/// Returns the syntax of the last path segment corresponding to this import
|
||||
fn source(self, db: &impl DefDatabase) -> InFile<Self::Ast> {
|
||||
let src = self.parent.definition_source(db);
|
||||
let (_, source_map) = db.raw_items_with_source_map(src.file_id);
|
||||
let root = db.parse_or_expand(src.file_id).unwrap();
|
||||
let ptr = source_map.get(self.id);
|
||||
src.with_value(ptr.map_left(|it| it.to_node(&root)).map_right(|it| it.to_node(&root)))
|
||||
}
|
||||
}
|
||||
|
||||
impl HasSource for TypeParam {
|
||||
type Ast = Either<ast::TraitDef, ast::TypeParam>;
|
||||
|
|
|
@ -40,8 +40,8 @@ mod from_source;
|
|||
pub use crate::{
|
||||
code_model::{
|
||||
Adt, AssocItem, AttrDef, Const, Crate, CrateDependency, DefWithBody, Docs, Enum,
|
||||
EnumVariant, FieldSource, Function, GenericDef, HasAttrs, ImplBlock, Import, Local,
|
||||
MacroDef, Module, ModuleDef, ScopeDef, Static, Struct, StructField, Trait, Type, TypeAlias,
|
||||
EnumVariant, FieldSource, Function, GenericDef, HasAttrs, ImplBlock, Local, MacroDef,
|
||||
Module, ModuleDef, ScopeDef, Static, Struct, StructField, Trait, Type, TypeAlias,
|
||||
TypeParam, Union, VariantDef,
|
||||
},
|
||||
from_source::FromSource,
|
||||
|
|
|
@ -13,10 +13,7 @@ use crate::{
|
|||
docs::Documentation,
|
||||
generics::GenericParams,
|
||||
lang_item::{LangItemTarget, LangItems},
|
||||
nameres::{
|
||||
raw::{ImportSourceMap, RawItems},
|
||||
CrateDefMap,
|
||||
},
|
||||
nameres::{raw::RawItems, CrateDefMap},
|
||||
AttrDefId, ConstId, ConstLoc, DefWithBodyId, EnumId, EnumLoc, FunctionId, FunctionLoc,
|
||||
GenericDefId, ImplId, ImplLoc, ModuleId, StaticId, StaticLoc, StructId, StructLoc, TraitId,
|
||||
TraitLoc, TypeAliasId, TypeAliasLoc, UnionId, UnionLoc,
|
||||
|
@ -46,12 +43,6 @@ pub trait InternDatabase: SourceDatabase {
|
|||
|
||||
#[salsa::query_group(DefDatabaseStorage)]
|
||||
pub trait DefDatabase: InternDatabase + AstDatabase {
|
||||
#[salsa::invoke(RawItems::raw_items_with_source_map_query)]
|
||||
fn raw_items_with_source_map(
|
||||
&self,
|
||||
file_id: HirFileId,
|
||||
) -> (Arc<RawItems>, Arc<ImportSourceMap>);
|
||||
|
||||
#[salsa::invoke(RawItems::raw_items_query)]
|
||||
fn raw_items(&self, file_id: HirFileId) -> Arc<RawItems>;
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@ use hir_expand::name::Name;
|
|||
use once_cell::sync::Lazy;
|
||||
use rustc_hash::FxHashMap;
|
||||
|
||||
use crate::{per_ns::PerNs, BuiltinType, ImplId, LocalImportId, MacroDefId, ModuleDefId, TraitId};
|
||||
use crate::{per_ns::PerNs, BuiltinType, ImplId, MacroDefId, ModuleDefId, TraitId};
|
||||
|
||||
#[derive(Debug, Default, PartialEq, Eq)]
|
||||
pub struct ItemScope {
|
||||
|
@ -30,7 +30,7 @@ static BUILTIN_SCOPE: Lazy<FxHashMap<Name, Resolution>> = Lazy::new(|| {
|
|||
BuiltinType::ALL
|
||||
.iter()
|
||||
.map(|(name, ty)| {
|
||||
(name.clone(), Resolution { def: PerNs::types(ty.clone().into()), import: None })
|
||||
(name.clone(), Resolution { def: PerNs::types(ty.clone().into()), declaration: false })
|
||||
})
|
||||
.collect()
|
||||
});
|
||||
|
@ -53,11 +53,9 @@ impl ItemScope {
|
|||
}
|
||||
|
||||
pub fn declarations(&self) -> impl Iterator<Item = ModuleDefId> + '_ {
|
||||
self.entries()
|
||||
.filter_map(|(_name, res)| if res.import.is_none() { Some(res.def) } else { None })
|
||||
.flat_map(|per_ns| {
|
||||
per_ns.take_types().into_iter().chain(per_ns.take_values().into_iter())
|
||||
})
|
||||
self.entries().filter(|(_name, res)| res.declaration).flat_map(|(_name, res)| {
|
||||
res.def.take_types().into_iter().chain(res.def.take_values().into_iter())
|
||||
})
|
||||
}
|
||||
|
||||
pub fn impls(&self) -> impl Iterator<Item = ImplId> + ExactSizeIterator + '_ {
|
||||
|
@ -112,38 +110,26 @@ impl ItemScope {
|
|||
self.legacy_macros.insert(name, mac);
|
||||
}
|
||||
|
||||
pub(crate) fn push_res(
|
||||
&mut self,
|
||||
name: Name,
|
||||
res: &Resolution,
|
||||
import: Option<LocalImportId>,
|
||||
) -> bool {
|
||||
pub(crate) fn push_res(&mut self, name: Name, res: &Resolution, declaration: bool) -> bool {
|
||||
let mut changed = false;
|
||||
let existing = self.items.entry(name.clone()).or_default();
|
||||
|
||||
if existing.def.types.is_none() && res.def.types.is_some() {
|
||||
existing.def.types = res.def.types;
|
||||
existing.import = import.or(res.import);
|
||||
existing.declaration |= declaration;
|
||||
changed = true;
|
||||
}
|
||||
if existing.def.values.is_none() && res.def.values.is_some() {
|
||||
existing.def.values = res.def.values;
|
||||
existing.import = import.or(res.import);
|
||||
existing.declaration |= declaration;
|
||||
changed = true;
|
||||
}
|
||||
if existing.def.macros.is_none() && res.def.macros.is_some() {
|
||||
existing.def.macros = res.def.macros;
|
||||
existing.import = import.or(res.import);
|
||||
existing.declaration |= declaration;
|
||||
changed = true;
|
||||
}
|
||||
|
||||
if existing.def.is_none()
|
||||
&& res.def.is_none()
|
||||
&& existing.import.is_none()
|
||||
&& res.import.is_some()
|
||||
{
|
||||
existing.import = res.import;
|
||||
}
|
||||
changed
|
||||
}
|
||||
|
||||
|
@ -160,6 +146,5 @@ impl ItemScope {
|
|||
pub struct Resolution {
|
||||
/// None for unresolved
|
||||
pub def: PerNs,
|
||||
/// ident by which this is imported into local scope.
|
||||
pub import: Option<LocalImportId>,
|
||||
pub declaration: bool,
|
||||
}
|
||||
|
|
|
@ -51,10 +51,6 @@ use ra_syntax::{ast, AstNode};
|
|||
use crate::body::Expander;
|
||||
use crate::builtin_type::BuiltinType;
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
pub struct LocalImportId(RawId);
|
||||
impl_arena_id!(LocalImportId);
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
pub struct ModuleId {
|
||||
pub krate: CrateId,
|
||||
|
|
|
@ -26,8 +26,7 @@ use crate::{
|
|||
path::{ModPath, PathKind},
|
||||
per_ns::PerNs,
|
||||
AdtId, AstId, ConstLoc, ContainerId, EnumLoc, EnumVariantId, FunctionLoc, ImplLoc, Intern,
|
||||
LocalImportId, LocalModuleId, ModuleDefId, ModuleId, StaticLoc, StructLoc, TraitLoc,
|
||||
TypeAliasLoc, UnionLoc,
|
||||
LocalModuleId, ModuleDefId, ModuleId, StaticLoc, StructLoc, TraitLoc, TypeAliasLoc, UnionLoc,
|
||||
};
|
||||
|
||||
pub(super) fn collect_defs(db: &impl DefDatabase, mut def_map: CrateDefMap) -> CrateDefMap {
|
||||
|
@ -93,7 +92,7 @@ impl PartialResolvedImport {
|
|||
#[derive(Clone, Debug, Eq, PartialEq)]
|
||||
struct ImportDirective {
|
||||
module_id: LocalModuleId,
|
||||
import_id: LocalImportId,
|
||||
import_id: raw::LocalImportId,
|
||||
import: raw::ImportData,
|
||||
status: PartialResolvedImport,
|
||||
}
|
||||
|
@ -110,7 +109,7 @@ struct MacroDirective {
|
|||
struct DefCollector<'a, DB> {
|
||||
db: &'a DB,
|
||||
def_map: CrateDefMap,
|
||||
glob_imports: FxHashMap<LocalModuleId, Vec<(LocalModuleId, LocalImportId)>>,
|
||||
glob_imports: FxHashMap<LocalModuleId, Vec<(LocalModuleId, raw::LocalImportId)>>,
|
||||
unresolved_imports: Vec<ImportDirective>,
|
||||
resolved_imports: Vec<ImportDirective>,
|
||||
unexpanded_macros: Vec<MacroDirective>,
|
||||
|
@ -218,8 +217,7 @@ where
|
|||
if export {
|
||||
self.update(
|
||||
self.def_map.root,
|
||||
None,
|
||||
&[(name, Resolution { def: PerNs::macros(macro_), import: None })],
|
||||
&[(name, Resolution { def: PerNs::macros(macro_), declaration: false })],
|
||||
);
|
||||
}
|
||||
}
|
||||
|
@ -374,7 +372,7 @@ where
|
|||
// Module scoped macros is included
|
||||
let items = scope.collect_resolutions();
|
||||
|
||||
self.update(module_id, Some(import_id), &items);
|
||||
self.update(module_id, &items);
|
||||
} else {
|
||||
// glob import from same crate => we do an initial
|
||||
// import, and then need to propagate any further
|
||||
|
@ -384,7 +382,7 @@ where
|
|||
// Module scoped macros is included
|
||||
let items = scope.collect_resolutions();
|
||||
|
||||
self.update(module_id, Some(import_id), &items);
|
||||
self.update(module_id, &items);
|
||||
// record the glob import in case we add further items
|
||||
let glob = self.glob_imports.entry(m.local_id).or_default();
|
||||
if !glob.iter().any(|it| *it == (module_id, import_id)) {
|
||||
|
@ -404,12 +402,12 @@ where
|
|||
let variant = EnumVariantId { parent: e, local_id };
|
||||
let res = Resolution {
|
||||
def: PerNs::both(variant.into(), variant.into()),
|
||||
import: Some(import_id),
|
||||
declaration: false,
|
||||
};
|
||||
(name, res)
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
self.update(module_id, Some(import_id), &resolutions);
|
||||
self.update(module_id, &resolutions);
|
||||
}
|
||||
Some(d) => {
|
||||
log::debug!("glob import {:?} from non-module/enum {:?}", import, d);
|
||||
|
@ -431,27 +429,21 @@ where
|
|||
}
|
||||
}
|
||||
|
||||
let resolution = Resolution { def, import: Some(import_id) };
|
||||
self.update(module_id, Some(import_id), &[(name, resolution)]);
|
||||
let resolution = Resolution { def, declaration: false };
|
||||
self.update(module_id, &[(name, resolution)]);
|
||||
}
|
||||
None => tested_by!(bogus_paths),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn update(
|
||||
&mut self,
|
||||
module_id: LocalModuleId,
|
||||
import: Option<LocalImportId>,
|
||||
resolutions: &[(Name, Resolution)],
|
||||
) {
|
||||
self.update_recursive(module_id, import, resolutions, 0)
|
||||
fn update(&mut self, module_id: LocalModuleId, resolutions: &[(Name, Resolution)]) {
|
||||
self.update_recursive(module_id, resolutions, 0)
|
||||
}
|
||||
|
||||
fn update_recursive(
|
||||
&mut self,
|
||||
module_id: LocalModuleId,
|
||||
import: Option<LocalImportId>,
|
||||
resolutions: &[(Name, Resolution)],
|
||||
depth: usize,
|
||||
) {
|
||||
|
@ -462,7 +454,7 @@ where
|
|||
let scope = &mut self.def_map.modules[module_id].scope;
|
||||
let mut changed = false;
|
||||
for (name, res) in resolutions {
|
||||
changed |= scope.push_res(name.clone(), res, import);
|
||||
changed |= scope.push_res(name.clone(), res, depth == 0 && res.declaration);
|
||||
}
|
||||
|
||||
if !changed {
|
||||
|
@ -475,9 +467,9 @@ where
|
|||
.flat_map(|v| v.iter())
|
||||
.cloned()
|
||||
.collect::<Vec<_>>();
|
||||
for (glob_importing_module, glob_import) in glob_imports {
|
||||
for (glob_importing_module, _glob_import) in glob_imports {
|
||||
// We pass the glob import so that the tracked import in those modules is that glob import
|
||||
self.update_recursive(glob_importing_module, Some(glob_import), resolutions, depth + 1);
|
||||
self.update_recursive(glob_importing_module, resolutions, depth + 1);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -719,9 +711,9 @@ where
|
|||
def: PerNs::types(
|
||||
ModuleId { krate: self.def_collector.def_map.krate, local_id: res }.into(),
|
||||
),
|
||||
import: None,
|
||||
declaration: true,
|
||||
};
|
||||
self.def_collector.update(self.module_id, None, &[(name, resolution)]);
|
||||
self.def_collector.update(self.module_id, &[(name, resolution)]);
|
||||
res
|
||||
}
|
||||
|
||||
|
@ -791,8 +783,8 @@ where
|
|||
PerNs::types(def.into())
|
||||
}
|
||||
};
|
||||
let resolution = Resolution { def, import: None };
|
||||
self.def_collector.update(self.module_id, None, &[(name, resolution)])
|
||||
let resolution = Resolution { def, declaration: true };
|
||||
self.def_collector.update(self.module_id, &[(name, resolution)])
|
||||
}
|
||||
|
||||
fn collect_derives(&mut self, attrs: &Attrs, def: &raw::DefData) {
|
||||
|
|
|
@ -7,24 +7,24 @@
|
|||
|
||||
use std::{ops::Index, sync::Arc};
|
||||
|
||||
use either::Either;
|
||||
use hir_expand::{
|
||||
ast_id_map::AstIdMap,
|
||||
db::AstDatabase,
|
||||
hygiene::Hygiene,
|
||||
name::{AsName, Name},
|
||||
};
|
||||
use ra_arena::{impl_arena_id, map::ArenaMap, Arena, RawId};
|
||||
use ra_arena::{impl_arena_id, Arena, RawId};
|
||||
use ra_syntax::{
|
||||
ast::{self, AttrsOwner, NameOwner},
|
||||
AstNode, AstPtr,
|
||||
AstNode,
|
||||
};
|
||||
use test_utils::tested_by;
|
||||
|
||||
use crate::{
|
||||
attr::Attrs, db::DefDatabase, path::ModPath, trace::Trace, FileAstId, HirFileId, InFile,
|
||||
LocalImportId,
|
||||
};
|
||||
use crate::{attr::Attrs, db::DefDatabase, path::ModPath, FileAstId, HirFileId, InFile};
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
pub(super) struct LocalImportId(RawId);
|
||||
impl_arena_id!(LocalImportId);
|
||||
|
||||
/// `RawItems` is a set of top-level items in a file (except for impls).
|
||||
///
|
||||
|
@ -41,35 +41,14 @@ pub struct RawItems {
|
|||
items: Vec<RawItem>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Default, PartialEq, Eq)]
|
||||
pub struct ImportSourceMap {
|
||||
map: ArenaMap<LocalImportId, ImportSourcePtr>,
|
||||
}
|
||||
|
||||
type ImportSourcePtr = Either<AstPtr<ast::UseTree>, AstPtr<ast::ExternCrateItem>>;
|
||||
|
||||
impl ImportSourceMap {
|
||||
pub fn get(&self, import: LocalImportId) -> ImportSourcePtr {
|
||||
self.map[import].clone()
|
||||
}
|
||||
}
|
||||
|
||||
impl RawItems {
|
||||
pub(crate) fn raw_items_query(
|
||||
db: &(impl DefDatabase + AstDatabase),
|
||||
file_id: HirFileId,
|
||||
) -> Arc<RawItems> {
|
||||
db.raw_items_with_source_map(file_id).0
|
||||
}
|
||||
|
||||
pub(crate) fn raw_items_with_source_map_query(
|
||||
db: &(impl DefDatabase + AstDatabase),
|
||||
file_id: HirFileId,
|
||||
) -> (Arc<RawItems>, Arc<ImportSourceMap>) {
|
||||
let mut collector = RawItemsCollector {
|
||||
raw_items: RawItems::default(),
|
||||
source_ast_id_map: db.ast_id_map(file_id),
|
||||
imports: Trace::new(),
|
||||
file_id,
|
||||
hygiene: Hygiene::new(db, file_id),
|
||||
};
|
||||
|
@ -80,11 +59,8 @@ impl RawItems {
|
|||
collector.process_module(None, item_list);
|
||||
}
|
||||
}
|
||||
let mut raw_items = collector.raw_items;
|
||||
let (arena, map) = collector.imports.into_arena_and_map();
|
||||
raw_items.imports = arena;
|
||||
let source_map = ImportSourceMap { map };
|
||||
(Arc::new(raw_items), Arc::new(source_map))
|
||||
let raw_items = collector.raw_items;
|
||||
Arc::new(raw_items)
|
||||
}
|
||||
|
||||
pub(super) fn items(&self) -> &[RawItem] {
|
||||
|
@ -223,7 +199,6 @@ pub(super) struct ImplData {
|
|||
|
||||
struct RawItemsCollector {
|
||||
raw_items: RawItems,
|
||||
imports: Trace<LocalImportId, ImportData, ImportSourcePtr>,
|
||||
source_ast_id_map: Arc<AstIdMap>,
|
||||
file_id: HirFileId,
|
||||
hygiene: Hygiene,
|
||||
|
@ -330,7 +305,7 @@ impl RawItemsCollector {
|
|||
ModPath::expand_use_item(
|
||||
InFile { value: use_item, file_id: self.file_id },
|
||||
&self.hygiene,
|
||||
|path, use_tree, is_glob, alias| {
|
||||
|path, _use_tree, is_glob, alias| {
|
||||
let import_data = ImportData {
|
||||
path,
|
||||
alias,
|
||||
|
@ -339,11 +314,11 @@ impl RawItemsCollector {
|
|||
is_extern_crate: false,
|
||||
is_macro_use: false,
|
||||
};
|
||||
buf.push((import_data, Either::Left(AstPtr::new(use_tree))));
|
||||
buf.push(import_data);
|
||||
},
|
||||
);
|
||||
for (import_data, ptr) in buf {
|
||||
self.push_import(current_module, attrs.clone(), import_data, ptr);
|
||||
for import_data in buf {
|
||||
self.push_import(current_module, attrs.clone(), import_data);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -366,12 +341,7 @@ impl RawItemsCollector {
|
|||
is_extern_crate: true,
|
||||
is_macro_use,
|
||||
};
|
||||
self.push_import(
|
||||
current_module,
|
||||
attrs,
|
||||
import_data,
|
||||
Either::Right(AstPtr::new(&extern_crate)),
|
||||
);
|
||||
self.push_import(current_module, attrs, import_data);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -402,14 +372,8 @@ impl RawItemsCollector {
|
|||
self.push_item(current_module, attrs, RawItemKind::Impl(imp))
|
||||
}
|
||||
|
||||
fn push_import(
|
||||
&mut self,
|
||||
current_module: Option<Module>,
|
||||
attrs: Attrs,
|
||||
data: ImportData,
|
||||
source: ImportSourcePtr,
|
||||
) {
|
||||
let import = self.imports.alloc(|| source, || data);
|
||||
fn push_import(&mut self, current_module: Option<Module>, attrs: Attrs, data: ImportData) {
|
||||
let import = self.raw_items.imports.alloc(data);
|
||||
self.push_item(current_module, attrs, RawItemKind::Import(import))
|
||||
}
|
||||
|
||||
|
|
|
@ -18,10 +18,6 @@ pub(crate) struct Trace<ID: ArenaId, T, V> {
|
|||
}
|
||||
|
||||
impl<ID: ra_arena::ArenaId + Copy, T, V> Trace<ID, T, V> {
|
||||
pub(crate) fn new() -> Trace<ID, T, V> {
|
||||
Trace { arena: Some(Arena::default()), map: Some(ArenaMap::default()), len: 0 }
|
||||
}
|
||||
|
||||
pub(crate) fn new_for_arena() -> Trace<ID, T, V> {
|
||||
Trace { arena: Some(Arena::default()), map: None, len: 0 }
|
||||
}
|
||||
|
@ -52,8 +48,4 @@ impl<ID: ra_arena::ArenaId + Copy, T, V> Trace<ID, T, V> {
|
|||
pub(crate) fn into_map(mut self) -> ArenaMap<ID, V> {
|
||||
self.map.take().unwrap()
|
||||
}
|
||||
|
||||
pub(crate) fn into_arena_and_map(mut self) -> (Arena<ID, T>, ArenaMap<ID, V>) {
|
||||
(self.arena.take().unwrap(), self.map.take().unwrap())
|
||||
}
|
||||
}
|
||||
|
|
|
@ -270,7 +270,6 @@ impl RootDatabase {
|
|||
|
||||
self.query(hir::db::AstIdMapQuery).sweep(sweep);
|
||||
|
||||
self.query(hir::db::RawItemsWithSourceMapQuery).sweep(sweep);
|
||||
self.query(hir::db::BodyWithSourceMapQuery).sweep(sweep);
|
||||
|
||||
self.query(hir::db::ExprScopesQuery).sweep(sweep);
|
||||
|
@ -309,7 +308,6 @@ impl RootDatabase {
|
|||
hir::db::StructDataQuery
|
||||
hir::db::EnumDataQuery
|
||||
hir::db::TraitDataQuery
|
||||
hir::db::RawItemsWithSourceMapQuery
|
||||
hir::db::RawItemsQuery
|
||||
hir::db::CrateDefMapQuery
|
||||
hir::db::GenericParamsQuery
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
//! FIXME: write short doc here
|
||||
|
||||
use either::Either;
|
||||
use hir::{Adt, HasSource, PathResolution};
|
||||
use hir::{Adt, PathResolution, ScopeDef};
|
||||
use ra_syntax::AstNode;
|
||||
use test_utils::tested_by;
|
||||
|
||||
|
@ -19,17 +18,15 @@ pub(super) fn complete_path(acc: &mut Completions, ctx: &CompletionContext) {
|
|||
match def {
|
||||
hir::ModuleDef::Module(module) => {
|
||||
let module_scope = module.scope(ctx.db);
|
||||
for (name, def, import) in module_scope {
|
||||
if let hir::ScopeDef::ModuleDef(hir::ModuleDef::BuiltinType(..)) = def {
|
||||
if ctx.use_item_syntax.is_some() {
|
||||
for (name, def) in module_scope {
|
||||
if ctx.use_item_syntax.is_some() {
|
||||
if let hir::ScopeDef::ModuleDef(hir::ModuleDef::BuiltinType(..)) = def {
|
||||
tested_by!(dont_complete_primitive_in_use);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
if Some(module) == ctx.module {
|
||||
if let Some(import) = import {
|
||||
if let Either::Left(use_tree) = import.source(ctx.db).value {
|
||||
if use_tree.syntax().text_range().contains_inclusive(ctx.offset) {
|
||||
if let ScopeDef::Unknown = def {
|
||||
if let Some(name_ref) = ctx.name_ref.as_ref() {
|
||||
if &name_ref.syntax().text() == name.to_string().as_str() {
|
||||
// for `use self::foo<|>`, don't suggest `foo` as a completion
|
||||
tested_by!(dont_complete_current_use);
|
||||
continue;
|
||||
|
|
|
@ -18,6 +18,7 @@ pub(crate) struct CompletionContext<'a> {
|
|||
pub(super) analyzer: hir::SourceAnalyzer,
|
||||
pub(super) offset: TextUnit,
|
||||
pub(super) token: SyntaxToken,
|
||||
pub(super) name_ref: Option<ast::NameRef>,
|
||||
pub(super) module: Option<hir::Module>,
|
||||
pub(super) function_syntax: Option<ast::FnDef>,
|
||||
pub(super) use_item_syntax: Option<ast::UseItem>,
|
||||
|
@ -68,6 +69,7 @@ impl<'a> CompletionContext<'a> {
|
|||
analyzer,
|
||||
token,
|
||||
offset: position.offset,
|
||||
name_ref: None,
|
||||
module,
|
||||
function_syntax: None,
|
||||
use_item_syntax: None,
|
||||
|
@ -142,6 +144,8 @@ impl<'a> CompletionContext<'a> {
|
|||
}
|
||||
|
||||
fn classify_name_ref(&mut self, original_file: SourceFile, name_ref: ast::NameRef) {
|
||||
self.name_ref =
|
||||
find_node_at_offset(original_file.syntax(), name_ref.syntax().text_range().start());
|
||||
let name_range = name_ref.syntax().text_range();
|
||||
if name_ref.syntax().parent().and_then(ast::RecordField::cast).is_some() {
|
||||
self.record_lit_syntax = find_node_at_offset(original_file.syntax(), self.offset);
|
||||
|
|
Loading…
Add table
Reference in a new issue