Make the rustc driver and interface demand driven
This commit is contained in:
parent
26b4cb4848
commit
51938c61f6
60 changed files with 2521 additions and 2755 deletions
|
@ -946,11 +946,6 @@ impl_stable_hash_for!(struct ty::CrateInherentImpls {
|
||||||
inherent_impls
|
inherent_impls
|
||||||
});
|
});
|
||||||
|
|
||||||
impl_stable_hash_for!(enum crate::session::CompileIncomplete {
|
|
||||||
Stopped,
|
|
||||||
Errored(error_reported)
|
|
||||||
});
|
|
||||||
|
|
||||||
impl_stable_hash_for!(struct crate::util::common::ErrorReported {});
|
impl_stable_hash_for!(struct crate::util::common::ErrorReported {});
|
||||||
|
|
||||||
impl_stable_hash_for!(tuple_struct crate::middle::reachable::ReachableSet {
|
impl_stable_hash_for!(tuple_struct crate::middle::reachable::ReachableSet {
|
||||||
|
|
|
@ -11,6 +11,7 @@ use rustc_target::spec::{Target, TargetTriple};
|
||||||
use crate::lint;
|
use crate::lint;
|
||||||
use crate::middle::cstore;
|
use crate::middle::cstore;
|
||||||
|
|
||||||
|
use syntax;
|
||||||
use syntax::ast::{self, IntTy, UintTy, MetaItemKind};
|
use syntax::ast::{self, IntTy, UintTy, MetaItemKind};
|
||||||
use syntax::source_map::{FileName, FilePathMapping};
|
use syntax::source_map::{FileName, FilePathMapping};
|
||||||
use syntax::edition::{Edition, EDITION_NAME_LIST, DEFAULT_EDITION};
|
use syntax::edition::{Edition, EDITION_NAME_LIST, DEFAULT_EDITION};
|
||||||
|
@ -1494,6 +1495,15 @@ pub fn default_configuration(sess: &Session) -> ast::CrateConfig {
|
||||||
ret
|
ret
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Converts the crate cfg! configuration from String to Symbol.
|
||||||
|
/// `rustc_interface::interface::Config` accepts this in the compiler configuration,
|
||||||
|
/// but the symbol interner is not yet set up then, so we must convert it later.
|
||||||
|
pub fn to_crate_config(cfg: FxHashSet<(String, Option<String>)>) -> ast::CrateConfig {
|
||||||
|
cfg.into_iter()
|
||||||
|
.map(|(a, b)| (Symbol::intern(&a), b.map(|b| Symbol::intern(&b))))
|
||||||
|
.collect()
|
||||||
|
}
|
||||||
|
|
||||||
pub fn build_configuration(sess: &Session, mut user_cfg: ast::CrateConfig) -> ast::CrateConfig {
|
pub fn build_configuration(sess: &Session, mut user_cfg: ast::CrateConfig) -> ast::CrateConfig {
|
||||||
// Combine the configuration requested by the session (command line) with
|
// Combine the configuration requested by the session (command line) with
|
||||||
// some default and generated configuration items
|
// some default and generated configuration items
|
||||||
|
@ -1800,10 +1810,9 @@ pub fn rustc_optgroups() -> Vec<RustcOptGroup> {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Convert strings provided as --cfg [cfgspec] into a crate_cfg
|
// Convert strings provided as --cfg [cfgspec] into a crate_cfg
|
||||||
pub fn parse_cfgspecs(cfgspecs: Vec<String>) -> ast::CrateConfig {
|
pub fn parse_cfgspecs(cfgspecs: Vec<String>) -> FxHashSet<(String, Option<String>)> {
|
||||||
cfgspecs
|
syntax::with_globals(move || {
|
||||||
.into_iter()
|
let cfg = cfgspecs.into_iter().map(|s| {
|
||||||
.map(|s| {
|
|
||||||
let sess = parse::ParseSess::new(FilePathMapping::empty());
|
let sess = parse::ParseSess::new(FilePathMapping::empty());
|
||||||
let filename = FileName::cfg_spec_source_code(&s);
|
let filename = FileName::cfg_spec_source_code(&s);
|
||||||
let mut parser = parse::new_parser_from_source_str(&sess, filename, s.to_string());
|
let mut parser = parse::new_parser_from_source_str(&sess, filename, s.to_string());
|
||||||
|
@ -1835,8 +1844,11 @@ pub fn parse_cfgspecs(cfgspecs: Vec<String>) -> ast::CrateConfig {
|
||||||
}
|
}
|
||||||
|
|
||||||
error!(r#"expected `key` or `key="value"`"#);
|
error!(r#"expected `key` or `key="value"`"#);
|
||||||
})
|
}).collect::<ast::CrateConfig>();
|
||||||
.collect::<ast::CrateConfig>()
|
cfg.into_iter().map(|(a, b)| {
|
||||||
|
(a.to_string(), b.map(|b| b.to_string()))
|
||||||
|
}).collect()
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_cmd_lint_options(matches: &getopts::Matches,
|
pub fn get_cmd_lint_options(matches: &getopts::Matches,
|
||||||
|
@ -1864,7 +1876,7 @@ pub fn get_cmd_lint_options(matches: &getopts::Matches,
|
||||||
|
|
||||||
pub fn build_session_options_and_crate_config(
|
pub fn build_session_options_and_crate_config(
|
||||||
matches: &getopts::Matches,
|
matches: &getopts::Matches,
|
||||||
) -> (Options, ast::CrateConfig) {
|
) -> (Options, FxHashSet<(String, Option<String>)>) {
|
||||||
let color = match matches.opt_str("color").as_ref().map(|s| &s[..]) {
|
let color = match matches.opt_str("color").as_ref().map(|s| &s[..]) {
|
||||||
Some("auto") => ColorConfig::Auto,
|
Some("auto") => ColorConfig::Auto,
|
||||||
Some("always") => ColorConfig::Always,
|
Some("always") => ColorConfig::Always,
|
||||||
|
@ -2590,7 +2602,11 @@ mod tests {
|
||||||
use getopts;
|
use getopts;
|
||||||
use crate::lint;
|
use crate::lint;
|
||||||
use crate::middle::cstore;
|
use crate::middle::cstore;
|
||||||
use crate::session::config::{build_configuration, build_session_options_and_crate_config};
|
use crate::session::config::{
|
||||||
|
build_configuration,
|
||||||
|
build_session_options_and_crate_config,
|
||||||
|
to_crate_config
|
||||||
|
};
|
||||||
use crate::session::config::{LtoCli, LinkerPluginLto};
|
use crate::session::config::{LtoCli, LinkerPluginLto};
|
||||||
use crate::session::build_session;
|
use crate::session::build_session;
|
||||||
use crate::session::search_paths::SearchPath;
|
use crate::session::search_paths::SearchPath;
|
||||||
|
@ -2631,7 +2647,7 @@ mod tests {
|
||||||
let registry = errors::registry::Registry::new(&[]);
|
let registry = errors::registry::Registry::new(&[]);
|
||||||
let (sessopts, cfg) = build_session_options_and_crate_config(matches);
|
let (sessopts, cfg) = build_session_options_and_crate_config(matches);
|
||||||
let sess = build_session(sessopts, None, registry);
|
let sess = build_session(sessopts, None, registry);
|
||||||
let cfg = build_configuration(&sess, cfg);
|
let cfg = build_configuration(&sess, to_crate_config(cfg));
|
||||||
assert!(cfg.contains(&(Symbol::intern("test"), None)));
|
assert!(cfg.contains(&(Symbol::intern("test"), None)));
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -2649,7 +2665,7 @@ mod tests {
|
||||||
let registry = errors::registry::Registry::new(&[]);
|
let registry = errors::registry::Registry::new(&[]);
|
||||||
let (sessopts, cfg) = build_session_options_and_crate_config(matches);
|
let (sessopts, cfg) = build_session_options_and_crate_config(matches);
|
||||||
let sess = build_session(sessopts, None, registry);
|
let sess = build_session(sessopts, None, registry);
|
||||||
let cfg = build_configuration(&sess, cfg);
|
let cfg = build_configuration(&sess, to_crate_config(cfg));
|
||||||
let mut test_items = cfg.iter().filter(|&&(name, _)| name == "test");
|
let mut test_items = cfg.iter().filter(|&&(name, _)| name == "test");
|
||||||
assert!(test_items.next().is_some());
|
assert!(test_items.next().is_some());
|
||||||
assert!(test_items.next().is_none());
|
assert!(test_items.next().is_none());
|
||||||
|
|
|
@ -311,7 +311,7 @@ impl Session {
|
||||||
pub fn abort_if_errors(&self) {
|
pub fn abort_if_errors(&self) {
|
||||||
self.diagnostic().abort_if_errors();
|
self.diagnostic().abort_if_errors();
|
||||||
}
|
}
|
||||||
pub fn compile_status(&self) -> Result<(), CompileIncomplete> {
|
pub fn compile_status(&self) -> Result<(), ErrorReported> {
|
||||||
compile_result_from_err_count(self.err_count())
|
compile_result_from_err_count(self.err_count())
|
||||||
}
|
}
|
||||||
pub fn track_errors<F, T>(&self, f: F) -> Result<T, ErrorReported>
|
pub fn track_errors<F, T>(&self, f: F) -> Result<T, ErrorReported>
|
||||||
|
@ -1124,7 +1124,7 @@ pub fn build_session_with_source_map(
|
||||||
build_session_(sopts, local_crate_source_file, diagnostic_handler, source_map, lint_caps)
|
build_session_(sopts, local_crate_source_file, diagnostic_handler, source_map, lint_caps)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn build_session_(
|
fn build_session_(
|
||||||
sopts: config::Options,
|
sopts: config::Options,
|
||||||
local_crate_source_file: Option<PathBuf>,
|
local_crate_source_file: Option<PathBuf>,
|
||||||
span_diagnostic: errors::Handler,
|
span_diagnostic: errors::Handler,
|
||||||
|
@ -1334,22 +1334,12 @@ pub fn early_warn(output: config::ErrorOutputType, msg: &str) {
|
||||||
handler.emit(&MultiSpan::new(), msg, errors::Level::Warning);
|
handler.emit(&MultiSpan::new(), msg, errors::Level::Warning);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Copy, Clone, Debug)]
|
pub type CompileResult = Result<(), ErrorReported>;
|
||||||
pub enum CompileIncomplete {
|
|
||||||
Stopped,
|
|
||||||
Errored(ErrorReported),
|
|
||||||
}
|
|
||||||
impl From<ErrorReported> for CompileIncomplete {
|
|
||||||
fn from(err: ErrorReported) -> CompileIncomplete {
|
|
||||||
CompileIncomplete::Errored(err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
pub type CompileResult = Result<(), CompileIncomplete>;
|
|
||||||
|
|
||||||
pub fn compile_result_from_err_count(err_count: usize) -> CompileResult {
|
pub fn compile_result_from_err_count(err_count: usize) -> CompileResult {
|
||||||
if err_count == 0 {
|
if err_count == 0 {
|
||||||
Ok(())
|
Ok(())
|
||||||
} else {
|
} else {
|
||||||
Err(CompileIncomplete::Errored(ErrorReported))
|
Err(ErrorReported)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -78,7 +78,6 @@ use crate::hir;
|
||||||
pub struct AllArenas<'tcx> {
|
pub struct AllArenas<'tcx> {
|
||||||
pub global: WorkerLocal<GlobalArenas<'tcx>>,
|
pub global: WorkerLocal<GlobalArenas<'tcx>>,
|
||||||
pub interner: SyncDroplessArena,
|
pub interner: SyncDroplessArena,
|
||||||
global_ctxt: Option<GlobalCtxt<'tcx>>,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'tcx> AllArenas<'tcx> {
|
impl<'tcx> AllArenas<'tcx> {
|
||||||
|
@ -86,7 +85,6 @@ impl<'tcx> AllArenas<'tcx> {
|
||||||
AllArenas {
|
AllArenas {
|
||||||
global: WorkerLocal::new(|_| GlobalArenas::default()),
|
global: WorkerLocal::new(|_| GlobalArenas::default()),
|
||||||
interner: SyncDroplessArena::default(),
|
interner: SyncDroplessArena::default(),
|
||||||
global_ctxt: None,
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1182,20 +1180,19 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
|
||||||
/// to the context. The closure enforces that the type context and any interned
|
/// to the context. The closure enforces that the type context and any interned
|
||||||
/// value (types, substs, etc.) can only be used while `ty::tls` has a valid
|
/// value (types, substs, etc.) can only be used while `ty::tls` has a valid
|
||||||
/// reference to the context, to allow formatting values that need it.
|
/// reference to the context, to allow formatting values that need it.
|
||||||
pub fn create_and_enter<F, R>(s: &'tcx Session,
|
pub fn create_global_ctxt(
|
||||||
cstore: &'tcx CrateStoreDyn,
|
s: &'tcx Session,
|
||||||
local_providers: ty::query::Providers<'tcx>,
|
cstore: &'tcx CrateStoreDyn,
|
||||||
extern_providers: ty::query::Providers<'tcx>,
|
local_providers: ty::query::Providers<'tcx>,
|
||||||
arenas: &'tcx mut AllArenas<'tcx>,
|
extern_providers: ty::query::Providers<'tcx>,
|
||||||
resolutions: ty::Resolutions,
|
arenas: &'tcx AllArenas<'tcx>,
|
||||||
hir: hir_map::Map<'tcx>,
|
resolutions: ty::Resolutions,
|
||||||
on_disk_query_result_cache: query::OnDiskCache<'tcx>,
|
hir: hir_map::Map<'tcx>,
|
||||||
crate_name: &str,
|
on_disk_query_result_cache: query::OnDiskCache<'tcx>,
|
||||||
tx: mpsc::Sender<Box<dyn Any + Send>>,
|
crate_name: &str,
|
||||||
output_filenames: &OutputFilenames,
|
tx: mpsc::Sender<Box<dyn Any + Send>>,
|
||||||
f: F) -> R
|
output_filenames: &OutputFilenames,
|
||||||
where F: for<'b> FnOnce(TyCtxt<'b, 'tcx, 'tcx>) -> R
|
) -> GlobalCtxt<'tcx> {
|
||||||
{
|
|
||||||
let data_layout = TargetDataLayout::parse(&s.target.target).unwrap_or_else(|err| {
|
let data_layout = TargetDataLayout::parse(&s.target.target).unwrap_or_else(|err| {
|
||||||
s.fatal(&err);
|
s.fatal(&err);
|
||||||
});
|
});
|
||||||
|
@ -1247,7 +1244,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
|
||||||
Lrc::new(StableVec::new(v)));
|
Lrc::new(StableVec::new(v)));
|
||||||
}
|
}
|
||||||
|
|
||||||
arenas.global_ctxt = Some(GlobalCtxt {
|
GlobalCtxt {
|
||||||
sess: s,
|
sess: s,
|
||||||
cstore,
|
cstore,
|
||||||
global_arenas: &arenas.global,
|
global_arenas: &arenas.global,
|
||||||
|
@ -1293,15 +1290,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
|
||||||
alloc_map: Lock::new(interpret::AllocMap::new()),
|
alloc_map: Lock::new(interpret::AllocMap::new()),
|
||||||
tx_to_llvm_workers: Lock::new(tx),
|
tx_to_llvm_workers: Lock::new(tx),
|
||||||
output_filenames: Arc::new(output_filenames.clone()),
|
output_filenames: Arc::new(output_filenames.clone()),
|
||||||
});
|
}
|
||||||
|
|
||||||
let gcx = arenas.global_ctxt.as_ref().unwrap();
|
|
||||||
|
|
||||||
let r = tls::enter_global(gcx, f);
|
|
||||||
|
|
||||||
gcx.queries.record_computed_queries(s);
|
|
||||||
|
|
||||||
r
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn consider_optimizing<T: Fn() -> String>(&self, msg: T) -> bool {
|
pub fn consider_optimizing<T: Fn() -> String>(&self, msg: T) -> bool {
|
||||||
|
@ -1985,31 +1974,29 @@ pub mod tls {
|
||||||
pub fn enter_global<'gcx, F, R>(gcx: &'gcx GlobalCtxt<'gcx>, f: F) -> R
|
pub fn enter_global<'gcx, F, R>(gcx: &'gcx GlobalCtxt<'gcx>, f: F) -> R
|
||||||
where F: FnOnce(TyCtxt<'gcx, 'gcx, 'gcx>) -> R
|
where F: FnOnce(TyCtxt<'gcx, 'gcx, 'gcx>) -> R
|
||||||
{
|
{
|
||||||
with_thread_locals(|| {
|
// Update GCX_PTR to indicate there's a GlobalCtxt available
|
||||||
// Update GCX_PTR to indicate there's a GlobalCtxt available
|
GCX_PTR.with(|lock| {
|
||||||
GCX_PTR.with(|lock| {
|
*lock.lock() = gcx as *const _ as usize;
|
||||||
*lock.lock() = gcx as *const _ as usize;
|
});
|
||||||
});
|
// Set GCX_PTR back to 0 when we exit
|
||||||
// Set GCX_PTR back to 0 when we exit
|
let _on_drop = OnDrop(move || {
|
||||||
let _on_drop = OnDrop(move || {
|
GCX_PTR.with(|lock| *lock.lock() = 0);
|
||||||
GCX_PTR.with(|lock| *lock.lock() = 0);
|
});
|
||||||
});
|
|
||||||
|
|
||||||
let tcx = TyCtxt {
|
let tcx = TyCtxt {
|
||||||
gcx,
|
gcx,
|
||||||
interners: &gcx.global_interners,
|
interners: &gcx.global_interners,
|
||||||
dummy: PhantomData,
|
dummy: PhantomData,
|
||||||
};
|
};
|
||||||
let icx = ImplicitCtxt {
|
let icx = ImplicitCtxt {
|
||||||
tcx,
|
tcx,
|
||||||
query: None,
|
query: None,
|
||||||
diagnostics: None,
|
diagnostics: None,
|
||||||
layout_depth: 0,
|
layout_depth: 0,
|
||||||
task_deps: None,
|
task_deps: None,
|
||||||
};
|
};
|
||||||
enter_context(&icx, |_| {
|
enter_context(&icx, |_| {
|
||||||
f(tcx)
|
f(tcx)
|
||||||
})
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -71,7 +71,7 @@ pub use self::binding::BindingMode;
|
||||||
pub use self::binding::BindingMode::*;
|
pub use self::binding::BindingMode::*;
|
||||||
|
|
||||||
pub use self::context::{TyCtxt, FreeRegionInfo, GlobalArenas, AllArenas, tls, keep_local};
|
pub use self::context::{TyCtxt, FreeRegionInfo, GlobalArenas, AllArenas, tls, keep_local};
|
||||||
pub use self::context::{Lift, TypeckTables, CtxtInterners};
|
pub use self::context::{Lift, TypeckTables, CtxtInterners, GlobalCtxt};
|
||||||
pub use self::context::{
|
pub use self::context::{
|
||||||
UserTypeAnnotationIndex, UserType, CanonicalUserType,
|
UserTypeAnnotationIndex, UserType, CanonicalUserType,
|
||||||
CanonicalUserTypeAnnotation, CanonicalUserTypeAnnotations, ResolvedOpaqueTy,
|
CanonicalUserTypeAnnotation, CanonicalUserTypeAnnotations, ResolvedOpaqueTy,
|
||||||
|
|
|
@ -63,11 +63,12 @@ use std::sync::{mpsc, Arc};
|
||||||
use rustc::dep_graph::DepGraph;
|
use rustc::dep_graph::DepGraph;
|
||||||
use rustc::middle::allocator::AllocatorKind;
|
use rustc::middle::allocator::AllocatorKind;
|
||||||
use rustc::middle::cstore::{EncodedMetadata, MetadataLoader};
|
use rustc::middle::cstore::{EncodedMetadata, MetadataLoader};
|
||||||
use rustc::session::{Session, CompileIncomplete};
|
use rustc::session::Session;
|
||||||
use rustc::session::config::{OutputFilenames, OutputType, PrintRequest, OptLevel};
|
use rustc::session::config::{OutputFilenames, OutputType, PrintRequest, OptLevel};
|
||||||
use rustc::ty::{self, TyCtxt};
|
use rustc::ty::{self, TyCtxt};
|
||||||
use rustc::util::time_graph;
|
use rustc::util::time_graph;
|
||||||
use rustc::util::profiling::ProfileCategory;
|
use rustc::util::profiling::ProfileCategory;
|
||||||
|
use rustc::util::common::ErrorReported;
|
||||||
use rustc_mir::monomorphize;
|
use rustc_mir::monomorphize;
|
||||||
use rustc_codegen_ssa::ModuleCodegen;
|
use rustc_codegen_ssa::ModuleCodegen;
|
||||||
use rustc_codegen_utils::codegen_backend::CodegenBackend;
|
use rustc_codegen_utils::codegen_backend::CodegenBackend;
|
||||||
|
@ -311,7 +312,7 @@ impl CodegenBackend for LlvmCodegenBackend {
|
||||||
sess: &Session,
|
sess: &Session,
|
||||||
dep_graph: &DepGraph,
|
dep_graph: &DepGraph,
|
||||||
outputs: &OutputFilenames,
|
outputs: &OutputFilenames,
|
||||||
) -> Result<(), CompileIncomplete>{
|
) -> Result<(), ErrorReported>{
|
||||||
use rustc::util::common::time;
|
use rustc::util::common::time;
|
||||||
let (codegen_results, work_products) =
|
let (codegen_results, work_products) =
|
||||||
ongoing_codegen.downcast::
|
ongoing_codegen.downcast::
|
||||||
|
|
|
@ -21,7 +21,8 @@ use flate2::write::DeflateEncoder;
|
||||||
|
|
||||||
use syntax::symbol::Symbol;
|
use syntax::symbol::Symbol;
|
||||||
use rustc::hir::def_id::LOCAL_CRATE;
|
use rustc::hir::def_id::LOCAL_CRATE;
|
||||||
use rustc::session::{Session, CompileIncomplete};
|
use rustc::session::Session;
|
||||||
|
use rustc::util::common::ErrorReported;
|
||||||
use rustc::session::config::{CrateType, OutputFilenames, PrintRequest};
|
use rustc::session::config::{CrateType, OutputFilenames, PrintRequest};
|
||||||
use rustc::ty::TyCtxt;
|
use rustc::ty::TyCtxt;
|
||||||
use rustc::ty::query::Providers;
|
use rustc::ty::query::Providers;
|
||||||
|
@ -61,7 +62,7 @@ pub trait CodegenBackend {
|
||||||
sess: &Session,
|
sess: &Session,
|
||||||
dep_graph: &DepGraph,
|
dep_graph: &DepGraph,
|
||||||
outputs: &OutputFilenames,
|
outputs: &OutputFilenames,
|
||||||
) -> Result<(), CompileIncomplete>;
|
) -> Result<(), ErrorReported>;
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct NoLlvmMetadataLoader;
|
pub struct NoLlvmMetadataLoader;
|
||||||
|
@ -163,7 +164,7 @@ impl CodegenBackend for MetadataOnlyCodegenBackend {
|
||||||
sess: &Session,
|
sess: &Session,
|
||||||
_dep_graph: &DepGraph,
|
_dep_graph: &DepGraph,
|
||||||
outputs: &OutputFilenames,
|
outputs: &OutputFilenames,
|
||||||
) -> Result<(), CompileIncomplete> {
|
) -> Result<(), ErrorReported> {
|
||||||
let ongoing_codegen = ongoing_codegen.downcast::<OngoingCodegen>()
|
let ongoing_codegen = ongoing_codegen.downcast::<OngoingCodegen>()
|
||||||
.expect("Expected MetadataOnlyCodegenBackend's OngoingCodegen, found Box<dyn Any>");
|
.expect("Expected MetadataOnlyCodegenBackend's OngoingCodegen, found Box<dyn Any>");
|
||||||
for &crate_type in sess.opts.crate_types.iter() {
|
for &crate_type in sess.opts.crate_types.iter() {
|
||||||
|
|
172
src/librustc_data_structures/box_region.rs
Normal file
172
src/librustc_data_structures/box_region.rs
Normal file
|
@ -0,0 +1,172 @@
|
||||||
|
use std::cell::Cell;
|
||||||
|
use std::marker::PhantomData;
|
||||||
|
use std::pin::Pin;
|
||||||
|
use std::ops::{Generator, GeneratorState};
|
||||||
|
|
||||||
|
#[derive(Copy, Clone)]
|
||||||
|
pub struct AccessAction(*mut dyn FnMut());
|
||||||
|
|
||||||
|
impl AccessAction {
|
||||||
|
pub fn get(self) -> *mut dyn FnMut() {
|
||||||
|
self.0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Copy, Clone)]
|
||||||
|
pub enum Action {
|
||||||
|
Access(AccessAction),
|
||||||
|
Complete,
|
||||||
|
}
|
||||||
|
|
||||||
|
thread_local!(pub static BOX_REGION_ARG: Cell<Action> = Cell::new(Action::Complete));
|
||||||
|
|
||||||
|
pub struct PinnedGenerator<I, A, R> {
|
||||||
|
generator: Pin<Box<dyn Generator<Yield = YieldType<I, A>, Return = R>>>
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<I, A, R> PinnedGenerator<I, A, R> {
|
||||||
|
pub fn new<
|
||||||
|
T: Generator<Yield = YieldType<I, A>, Return = R> + 'static
|
||||||
|
>(generator: T) -> (I, Self) {
|
||||||
|
let mut result = PinnedGenerator {
|
||||||
|
generator: Box::pin(generator)
|
||||||
|
};
|
||||||
|
|
||||||
|
// Run it to the first yield to set it up
|
||||||
|
let init = match Pin::new(&mut result.generator).resume() {
|
||||||
|
GeneratorState::Yielded(
|
||||||
|
YieldType::Initial(y)
|
||||||
|
) => y,
|
||||||
|
_ => panic!()
|
||||||
|
};
|
||||||
|
|
||||||
|
(init, result)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub unsafe fn access(&mut self, closure: *mut dyn FnMut()) {
|
||||||
|
BOX_REGION_ARG.with(|i| {
|
||||||
|
i.set(Action::Access(AccessAction(closure)));
|
||||||
|
});
|
||||||
|
|
||||||
|
// Call the generator, which in turn will call the closure in BOX_REGION_ARG
|
||||||
|
if let GeneratorState::Complete(_) = Pin::new(&mut self.generator).resume() {
|
||||||
|
panic!()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn complete(&mut self) -> R {
|
||||||
|
// Tell the generator we want it to complete, consuming it and yielding a result
|
||||||
|
BOX_REGION_ARG.with(|i| {
|
||||||
|
i.set(Action::Complete)
|
||||||
|
});
|
||||||
|
|
||||||
|
let result = Pin::new(&mut self.generator).resume();
|
||||||
|
if let GeneratorState::Complete(r) = result {
|
||||||
|
r
|
||||||
|
} else {
|
||||||
|
panic!()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(PartialEq)]
|
||||||
|
pub struct Marker<T>(PhantomData<T>);
|
||||||
|
|
||||||
|
impl<T> Marker<T> {
|
||||||
|
pub unsafe fn new() -> Self {
|
||||||
|
Marker(PhantomData)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub enum YieldType<I, A> {
|
||||||
|
Initial(I),
|
||||||
|
Accessor(Marker<A>),
|
||||||
|
}
|
||||||
|
|
||||||
|
#[macro_export]
|
||||||
|
#[allow_internal_unstable(fn_traits)]
|
||||||
|
macro_rules! declare_box_region_type {
|
||||||
|
(impl $v:vis
|
||||||
|
$name: ident,
|
||||||
|
$yield_type:ty,
|
||||||
|
for($($lifetimes:tt)*),
|
||||||
|
($($args:ty),*) -> ($reti:ty, $retc:ty)
|
||||||
|
) => {
|
||||||
|
$v struct $name($crate::box_region::PinnedGenerator<
|
||||||
|
$reti,
|
||||||
|
for<$($lifetimes)*> fn(($($args,)*)),
|
||||||
|
$retc
|
||||||
|
>);
|
||||||
|
|
||||||
|
impl $name {
|
||||||
|
fn new<T: ::std::ops::Generator<Yield = $yield_type, Return = $retc> + 'static>(
|
||||||
|
generator: T
|
||||||
|
) -> ($reti, Self) {
|
||||||
|
let (initial, pinned) = $crate::box_region::PinnedGenerator::new(generator);
|
||||||
|
(initial, $name(pinned))
|
||||||
|
}
|
||||||
|
|
||||||
|
$v fn access<F: for<$($lifetimes)*> FnOnce($($args,)*) -> R, R>(&mut self, f: F) -> R {
|
||||||
|
// Turn the FnOnce closure into *mut dyn FnMut()
|
||||||
|
// so we can pass it in to the generator using the BOX_REGION_ARG thread local
|
||||||
|
let mut r = None;
|
||||||
|
let mut f = Some(f);
|
||||||
|
let mut_f: &mut dyn for<$($lifetimes)*> FnMut(($($args,)*)) =
|
||||||
|
&mut |args| {
|
||||||
|
let f = f.take().unwrap();
|
||||||
|
r = Some(FnOnce::call_once(f, args));
|
||||||
|
};
|
||||||
|
let mut_f = mut_f as *mut dyn for<$($lifetimes)*> FnMut(($($args,)*));
|
||||||
|
|
||||||
|
// Get the generator to call our closure
|
||||||
|
unsafe {
|
||||||
|
self.0.access(::std::mem::transmute(mut_f));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Unwrap the result
|
||||||
|
r.unwrap()
|
||||||
|
}
|
||||||
|
|
||||||
|
$v fn complete(mut self) -> $retc {
|
||||||
|
self.0.complete()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn initial_yield(value: $reti) -> $yield_type {
|
||||||
|
$crate::box_region::YieldType::Initial(value)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
($v:vis $name: ident, for($($lifetimes:tt)*), ($($args:ty),*) -> ($reti:ty, $retc:ty)) => {
|
||||||
|
declare_box_region_type!(
|
||||||
|
impl $v $name,
|
||||||
|
$crate::box_region::YieldType<$reti, for<$($lifetimes)*> fn(($($args,)*))>,
|
||||||
|
for($($lifetimes)*),
|
||||||
|
($($args),*) -> ($reti, $retc)
|
||||||
|
);
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
#[macro_export]
|
||||||
|
#[allow_internal_unstable(fn_traits)]
|
||||||
|
macro_rules! box_region_allow_access {
|
||||||
|
(for($($lifetimes:tt)*), ($($args:ty),*), ($($exprs:expr),*) ) => {
|
||||||
|
loop {
|
||||||
|
match $crate::box_region::BOX_REGION_ARG.with(|i| i.get()) {
|
||||||
|
$crate::box_region::Action::Access(accessor) => {
|
||||||
|
let accessor: &mut dyn for<$($lifetimes)*> FnMut($($args),*) = unsafe {
|
||||||
|
::std::mem::transmute(accessor.get())
|
||||||
|
};
|
||||||
|
(*accessor)(($($exprs),*));
|
||||||
|
unsafe {
|
||||||
|
let marker = $crate::box_region::Marker::<
|
||||||
|
for<$($lifetimes)*> fn(($($args,)*))
|
||||||
|
>::new();
|
||||||
|
yield $crate::box_region::YieldType::Accessor(marker)
|
||||||
|
};
|
||||||
|
}
|
||||||
|
$crate::box_region::Action::Complete => break,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -10,6 +10,8 @@
|
||||||
|
|
||||||
#![feature(in_band_lifetimes)]
|
#![feature(in_band_lifetimes)]
|
||||||
#![feature(unboxed_closures)]
|
#![feature(unboxed_closures)]
|
||||||
|
#![feature(generators)]
|
||||||
|
#![feature(generator_trait)]
|
||||||
#![feature(fn_traits)]
|
#![feature(fn_traits)]
|
||||||
#![feature(unsize)]
|
#![feature(unsize)]
|
||||||
#![feature(specialization)]
|
#![feature(specialization)]
|
||||||
|
@ -71,6 +73,7 @@ pub mod macros;
|
||||||
pub mod svh;
|
pub mod svh;
|
||||||
pub mod base_n;
|
pub mod base_n;
|
||||||
pub mod bit_set;
|
pub mod bit_set;
|
||||||
|
pub mod box_region;
|
||||||
pub mod const_cstr;
|
pub mod const_cstr;
|
||||||
pub mod flock;
|
pub mod flock;
|
||||||
pub mod fx;
|
pub mod fx;
|
||||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -51,45 +51,42 @@ extern crate syntax;
|
||||||
extern crate syntax_ext;
|
extern crate syntax_ext;
|
||||||
extern crate syntax_pos;
|
extern crate syntax_pos;
|
||||||
|
|
||||||
use driver::CompileController;
|
|
||||||
use pretty::{PpMode, UserIdentifiedItem};
|
use pretty::{PpMode, UserIdentifiedItem};
|
||||||
|
|
||||||
|
//use rustc_resolve as resolve;
|
||||||
use rustc_save_analysis as save;
|
use rustc_save_analysis as save;
|
||||||
use rustc_save_analysis::DumpHandler;
|
use rustc_save_analysis::DumpHandler;
|
||||||
use rustc_data_structures::sync::{self, Lrc, Ordering::SeqCst};
|
use rustc::session::{config, Session, DiagnosticOutput};
|
||||||
use rustc_data_structures::OnDrop;
|
use rustc::session::config::{Input, PrintRequest, ErrorOutputType, OutputType};
|
||||||
use rustc::session::{self, config, Session, build_session, CompileResult, DiagnosticOutput};
|
|
||||||
use rustc::session::CompileIncomplete;
|
|
||||||
use rustc::session::config::{Input, PrintRequest, ErrorOutputType};
|
|
||||||
use rustc::session::config::nightly_options;
|
use rustc::session::config::nightly_options;
|
||||||
use rustc::session::{early_error, early_warn};
|
use rustc::session::{early_error, early_warn};
|
||||||
use rustc::lint::Lint;
|
use rustc::lint::Lint;
|
||||||
use rustc::lint;
|
use rustc::lint;
|
||||||
|
use rustc::hir::def_id::LOCAL_CRATE;
|
||||||
|
use rustc::util::common::{time, ErrorReported, install_panic_hook};
|
||||||
use rustc_metadata::locator;
|
use rustc_metadata::locator;
|
||||||
use rustc_metadata::cstore::CStore;
|
use rustc_metadata::cstore::CStore;
|
||||||
use rustc::util::common::{time, ErrorReported};
|
|
||||||
use rustc_codegen_utils::codegen_backend::CodegenBackend;
|
use rustc_codegen_utils::codegen_backend::CodegenBackend;
|
||||||
use rustc_interface::util::{self, get_codegen_sysroot};
|
use rustc_interface::interface;
|
||||||
|
use rustc_interface::util::get_codegen_sysroot;
|
||||||
|
use rustc_data_structures::sync::SeqCst;
|
||||||
|
|
||||||
use serialize::json::ToJson;
|
use serialize::json::ToJson;
|
||||||
|
|
||||||
use std::any::Any;
|
|
||||||
use std::borrow::Cow;
|
use std::borrow::Cow;
|
||||||
use std::cmp::max;
|
use std::cmp::max;
|
||||||
use std::default::Default;
|
use std::default::Default;
|
||||||
use std::env;
|
use std::env;
|
||||||
use std::error::Error;
|
|
||||||
use std::ffi::OsString;
|
use std::ffi::OsString;
|
||||||
use std::fmt::{self, Display};
|
|
||||||
use std::io::{self, Read, Write};
|
use std::io::{self, Read, Write};
|
||||||
use std::panic;
|
use std::panic::{self, catch_unwind};
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
use std::process::{self, Command, Stdio};
|
use std::process::{self, Command, Stdio};
|
||||||
use std::str;
|
use std::str;
|
||||||
use std::thread;
|
use std::mem;
|
||||||
|
|
||||||
use syntax::ast;
|
use syntax::ast;
|
||||||
use syntax::source_map::{SourceMap, FileLoader, RealFileLoader};
|
use syntax::source_map::FileLoader;
|
||||||
use syntax::feature_gate::{GatedCfg, UnstableFeatures};
|
use syntax::feature_gate::{GatedCfg, UnstableFeatures};
|
||||||
use syntax::parse::{self, PResult};
|
use syntax::parse::{self, PResult};
|
||||||
use syntax_pos::{DUMMY_SP, MultiSpan, FileName};
|
use syntax_pos::{DUMMY_SP, MultiSpan, FileName};
|
||||||
|
@ -97,14 +94,13 @@ use syntax_pos::{DUMMY_SP, MultiSpan, FileName};
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod test;
|
mod test;
|
||||||
|
|
||||||
pub mod driver;
|
|
||||||
pub mod pretty;
|
pub mod pretty;
|
||||||
|
|
||||||
/// Exit status code used for successful compilation and help output.
|
/// Exit status code used for successful compilation and help output.
|
||||||
pub const EXIT_SUCCESS: isize = 0;
|
pub const EXIT_SUCCESS: i32 = 0;
|
||||||
|
|
||||||
/// Exit status code used for compilation failures and invalid flags.
|
/// Exit status code used for compilation failures and invalid flags.
|
||||||
pub const EXIT_FAILURE: isize = 1;
|
pub const EXIT_FAILURE: i32 = 1;
|
||||||
|
|
||||||
const BUG_REPORT_URL: &str = "https://github.com/rust-lang/rust/blob/master/CONTRIBUTING.\
|
const BUG_REPORT_URL: &str = "https://github.com/rust-lang/rust/blob/master/CONTRIBUTING.\
|
||||||
md#bug-reports";
|
md#bug-reports";
|
||||||
|
@ -115,172 +111,290 @@ const ICE_REPORT_COMPILER_FLAGS_EXCLUDE: &[&str] = &["metadata", "extra-filename
|
||||||
|
|
||||||
const ICE_REPORT_COMPILER_FLAGS_STRIP_VALUE: &[&str] = &["incremental"];
|
const ICE_REPORT_COMPILER_FLAGS_STRIP_VALUE: &[&str] = &["incremental"];
|
||||||
|
|
||||||
pub fn abort_on_err<T>(result: Result<T, CompileIncomplete>, sess: &Session) -> T {
|
pub fn source_name(input: &Input) -> FileName {
|
||||||
|
match *input {
|
||||||
|
Input::File(ref ifile) => ifile.clone().into(),
|
||||||
|
Input::Str { ref name, .. } => name.clone(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn abort_on_err<T>(result: Result<T, ErrorReported>, sess: &Session) -> T {
|
||||||
match result {
|
match result {
|
||||||
Err(CompileIncomplete::Errored(ErrorReported)) => {
|
Err(..) => {
|
||||||
sess.abort_if_errors();
|
sess.abort_if_errors();
|
||||||
panic!("error reported but abort_if_errors didn't abort???");
|
panic!("error reported but abort_if_errors didn't abort???");
|
||||||
}
|
}
|
||||||
Err(CompileIncomplete::Stopped) => {
|
|
||||||
sess.fatal("compilation terminated");
|
|
||||||
}
|
|
||||||
Ok(x) => x,
|
Ok(x) => x,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn run<F>(run_compiler: F) -> isize
|
pub trait Callbacks {
|
||||||
where F: FnOnce() -> (CompileResult, Option<Session>) + Send + 'static
|
/// Called before creating the compiler instance
|
||||||
{
|
fn config(&mut self, _config: &mut interface::Config) {}
|
||||||
let result = monitor(move || {
|
/// Called after parsing and returns true to continue execution
|
||||||
syntax::with_globals(|| {
|
fn after_parsing(&mut self, _compiler: &interface::Compiler) -> bool {
|
||||||
let (result, session) = run_compiler();
|
true
|
||||||
if let Err(CompileIncomplete::Errored(_)) = result {
|
}
|
||||||
match session {
|
/// Called after analysis and returns true to continue execution
|
||||||
Some(sess) => {
|
fn after_analysis(&mut self, _compiler: &interface::Compiler) -> bool {
|
||||||
sess.abort_if_errors();
|
true
|
||||||
panic!("error reported but abort_if_errors didn't abort???");
|
|
||||||
}
|
|
||||||
None => {
|
|
||||||
let emitter =
|
|
||||||
errors::emitter::EmitterWriter::stderr(
|
|
||||||
errors::ColorConfig::Auto,
|
|
||||||
None,
|
|
||||||
true,
|
|
||||||
false
|
|
||||||
);
|
|
||||||
let handler = errors::Handler::with_emitter(true, None, Box::new(emitter));
|
|
||||||
handler.emit(&MultiSpan::new(),
|
|
||||||
"aborting due to previous error(s)",
|
|
||||||
errors::Level::Fatal);
|
|
||||||
panic::resume_unwind(Box::new(errors::FatalErrorMarker));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
match result {
|
|
||||||
Ok(()) => EXIT_SUCCESS,
|
|
||||||
Err(_) => EXIT_FAILURE,
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub struct DefaultCallbacks;
|
||||||
|
|
||||||
|
impl Callbacks for DefaultCallbacks {}
|
||||||
|
|
||||||
// Parse args and run the compiler. This is the primary entry point for rustc.
|
// Parse args and run the compiler. This is the primary entry point for rustc.
|
||||||
// See comments on CompilerCalls below for details about the callbacks argument.
|
// See comments on CompilerCalls below for details about the callbacks argument.
|
||||||
// The FileLoader provides a way to load files from sources other than the file system.
|
// The FileLoader provides a way to load files from sources other than the file system.
|
||||||
pub fn run_compiler<'a>(args: &[String],
|
pub fn run_compiler(
|
||||||
callbacks: Box<dyn CompilerCalls<'a> + sync::Send + 'a>,
|
args: &[String],
|
||||||
file_loader: Option<Box<dyn FileLoader + Send + Sync + 'static>>,
|
callbacks: &mut (dyn Callbacks + Send),
|
||||||
emitter_dest: Option<Box<dyn Write + Send>>)
|
file_loader: Option<Box<dyn FileLoader + Send + Sync>>,
|
||||||
-> (CompileResult, Option<Session>)
|
emitter: Option<Box<dyn Write + Send>>
|
||||||
{
|
) -> interface::Result<()> {
|
||||||
|
let diagnostic_output = emitter.map(|emitter| DiagnosticOutput::Raw(emitter))
|
||||||
|
.unwrap_or(DiagnosticOutput::Default);
|
||||||
let matches = match handle_options(args) {
|
let matches = match handle_options(args) {
|
||||||
Some(matches) => matches,
|
Some(matches) => matches,
|
||||||
None => return (Ok(()), None),
|
None => return Ok(()),
|
||||||
};
|
};
|
||||||
|
|
||||||
|
install_panic_hook();
|
||||||
|
|
||||||
let (sopts, cfg) = config::build_session_options_and_crate_config(&matches);
|
let (sopts, cfg) = config::build_session_options_and_crate_config(&matches);
|
||||||
|
|
||||||
driver::spawn_thread_pool(sopts, |sopts| {
|
let mut dummy_config = |sopts, cfg, diagnostic_output| {
|
||||||
run_compiler_with_pool(matches, sopts, cfg, callbacks, file_loader, emitter_dest)
|
let mut config = interface::Config {
|
||||||
})
|
opts: sopts,
|
||||||
}
|
crate_cfg: cfg,
|
||||||
|
input: Input::File(PathBuf::new()),
|
||||||
|
input_path: None,
|
||||||
|
output_file: None,
|
||||||
|
output_dir: None,
|
||||||
|
file_loader: None,
|
||||||
|
diagnostic_output,
|
||||||
|
stderr: None,
|
||||||
|
crate_name: None,
|
||||||
|
lint_caps: Default::default(),
|
||||||
|
};
|
||||||
|
callbacks.config(&mut config);
|
||||||
|
config
|
||||||
|
};
|
||||||
|
|
||||||
fn run_compiler_with_pool<'a>(
|
if let Some(ref code) = matches.opt_str("explain") {
|
||||||
matches: getopts::Matches,
|
handle_explain(code, sopts.error_format);
|
||||||
sopts: config::Options,
|
return Ok(());
|
||||||
cfg: ast::CrateConfig,
|
}
|
||||||
mut callbacks: Box<dyn CompilerCalls<'a> + sync::Send + 'a>,
|
|
||||||
file_loader: Option<Box<dyn FileLoader + Send + Sync + 'static>>,
|
|
||||||
emitter_dest: Option<Box<dyn Write + Send>>
|
|
||||||
) -> (CompileResult, Option<Session>) {
|
|
||||||
macro_rules! do_or_return {($expr: expr, $sess: expr) => {
|
|
||||||
match $expr {
|
|
||||||
Compilation::Stop => return (Ok(()), $sess),
|
|
||||||
Compilation::Continue => {}
|
|
||||||
}
|
|
||||||
}}
|
|
||||||
|
|
||||||
let descriptions = diagnostics_registry();
|
|
||||||
|
|
||||||
do_or_return!(callbacks.early_callback(&matches,
|
|
||||||
&sopts,
|
|
||||||
&cfg,
|
|
||||||
&descriptions,
|
|
||||||
sopts.error_format),
|
|
||||||
None);
|
|
||||||
|
|
||||||
let (odir, ofile) = make_output(&matches);
|
let (odir, ofile) = make_output(&matches);
|
||||||
let (input, input_file_path, input_err) = match make_input(&matches.free) {
|
let (input, input_file_path, input_err) = match make_input(&matches.free) {
|
||||||
Some((input, input_file_path, input_err)) => {
|
Some(v) => v,
|
||||||
let (input, input_file_path) = callbacks.some_input(input, input_file_path);
|
None => {
|
||||||
(input, input_file_path, input_err)
|
match matches.free.len() {
|
||||||
},
|
0 => {
|
||||||
None => match callbacks.no_input(&matches, &sopts, &cfg, &odir, &ofile, &descriptions) {
|
let config = dummy_config(sopts, cfg, diagnostic_output);
|
||||||
Some((input, input_file_path)) => (input, input_file_path, None),
|
interface::run_compiler(config, |compiler| {
|
||||||
None => return (Ok(()), None),
|
let sopts = &compiler.session().opts;
|
||||||
},
|
if sopts.describe_lints {
|
||||||
};
|
describe_lints(
|
||||||
|
compiler.session(),
|
||||||
|
&*compiler.session().lint_store.borrow(),
|
||||||
|
false
|
||||||
|
);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
let should_stop = RustcDefaultCalls::print_crate_info(
|
||||||
|
&***compiler.codegen_backend(),
|
||||||
|
compiler.session(),
|
||||||
|
None,
|
||||||
|
&odir,
|
||||||
|
&ofile
|
||||||
|
);
|
||||||
|
|
||||||
let loader = file_loader.unwrap_or(box RealFileLoader);
|
if should_stop == Compilation::Stop {
|
||||||
let source_map = Lrc::new(SourceMap::with_file_loader(loader, sopts.file_path_mapping()));
|
return;
|
||||||
let mut sess = session::build_session_with_source_map(
|
}
|
||||||
sopts,
|
early_error(sopts.error_format, "no input filename given")
|
||||||
input_file_path.clone(),
|
});
|
||||||
descriptions,
|
return Ok(());
|
||||||
source_map,
|
}
|
||||||
emitter_dest.map(|e| DiagnosticOutput::Raw(e)).unwrap_or(DiagnosticOutput::Default),
|
1 => panic!("make_input should have provided valid inputs"),
|
||||||
Default::default(),
|
_ => early_error(sopts.error_format, &format!(
|
||||||
);
|
"multiple input filenames provided (first two filenames are `{}` and `{}`)",
|
||||||
|
matches.free[0],
|
||||||
|
matches.free[1],
|
||||||
|
)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
if let Some(err) = input_err {
|
if let Some(err) = input_err {
|
||||||
// Immediately stop compilation if there was an issue reading
|
// Immediately stop compilation if there was an issue reading
|
||||||
// the input (for example if the input stream is not UTF-8).
|
// the input (for example if the input stream is not UTF-8).
|
||||||
sess.err(&err.to_string());
|
interface::run_compiler(dummy_config(sopts, cfg, diagnostic_output), |compiler| {
|
||||||
return (Err(CompileIncomplete::Stopped), Some(sess));
|
compiler.session().err(&err.to_string());
|
||||||
|
});
|
||||||
|
return Err(ErrorReported);
|
||||||
}
|
}
|
||||||
|
|
||||||
let codegen_backend = util::get_codegen_backend(&sess);
|
let mut config = interface::Config {
|
||||||
|
opts: sopts,
|
||||||
rustc_lint::register_builtins(&mut sess.lint_store.borrow_mut(), Some(&sess));
|
crate_cfg: cfg,
|
||||||
|
input,
|
||||||
let mut cfg = config::build_configuration(&sess, cfg);
|
input_path: input_file_path,
|
||||||
util::add_configuration(&mut cfg, &sess, &*codegen_backend);
|
output_file: ofile,
|
||||||
sess.parse_sess.config = cfg;
|
output_dir: odir,
|
||||||
|
file_loader,
|
||||||
let result = {
|
diagnostic_output,
|
||||||
let plugins = sess.opts.debugging_opts.extra_plugins.clone();
|
stderr: None,
|
||||||
|
crate_name: None,
|
||||||
let cstore = CStore::new(codegen_backend.metadata_loader());
|
lint_caps: Default::default(),
|
||||||
|
|
||||||
do_or_return!(callbacks.late_callback(&*codegen_backend,
|
|
||||||
&matches,
|
|
||||||
&sess,
|
|
||||||
&cstore,
|
|
||||||
&input,
|
|
||||||
&odir,
|
|
||||||
&ofile), Some(sess));
|
|
||||||
|
|
||||||
let _sess_abort_error = OnDrop(|| sess.diagnostic().print_error_count());
|
|
||||||
|
|
||||||
let control = callbacks.build_controller(&sess, &matches);
|
|
||||||
|
|
||||||
driver::compile_input(codegen_backend,
|
|
||||||
&sess,
|
|
||||||
&cstore,
|
|
||||||
&input_file_path,
|
|
||||||
&input,
|
|
||||||
&odir,
|
|
||||||
&ofile,
|
|
||||||
Some(plugins),
|
|
||||||
&control)
|
|
||||||
};
|
};
|
||||||
|
|
||||||
if sess.opts.debugging_opts.self_profile {
|
callbacks.config(&mut config);
|
||||||
sess.profiler(|p| p.dump_raw_events(&sess.opts));
|
|
||||||
}
|
|
||||||
|
|
||||||
(result, Some(sess))
|
interface::run_compiler(config, |compiler| {
|
||||||
|
let sess = compiler.session();
|
||||||
|
let should_stop = RustcDefaultCalls::print_crate_info(
|
||||||
|
&***compiler.codegen_backend(),
|
||||||
|
sess,
|
||||||
|
Some(compiler.input()),
|
||||||
|
compiler.output_dir(),
|
||||||
|
compiler.output_file(),
|
||||||
|
).and_then(|| RustcDefaultCalls::list_metadata(
|
||||||
|
sess,
|
||||||
|
compiler.cstore(),
|
||||||
|
&matches,
|
||||||
|
compiler.input()
|
||||||
|
));
|
||||||
|
|
||||||
|
if should_stop == Compilation::Stop {
|
||||||
|
return sess.compile_status();
|
||||||
|
}
|
||||||
|
|
||||||
|
let pretty_info = parse_pretty(sess, &matches);
|
||||||
|
|
||||||
|
compiler.parse()?;
|
||||||
|
|
||||||
|
if let Some((ppm, opt_uii)) = pretty_info {
|
||||||
|
if ppm.needs_ast_map(&opt_uii) {
|
||||||
|
pretty::visit_crate(sess, &mut compiler.parse()?.peek_mut(), ppm);
|
||||||
|
compiler.global_ctxt()?.peek_mut().enter(|tcx| {
|
||||||
|
let expanded_crate = compiler.expansion()?.take().0;
|
||||||
|
pretty::print_after_hir_lowering(
|
||||||
|
tcx,
|
||||||
|
compiler.input(),
|
||||||
|
&expanded_crate,
|
||||||
|
ppm,
|
||||||
|
opt_uii.clone(),
|
||||||
|
compiler.output_file().as_ref().map(|p| &**p),
|
||||||
|
);
|
||||||
|
Ok(())
|
||||||
|
})?;
|
||||||
|
return sess.compile_status();
|
||||||
|
} else {
|
||||||
|
let mut krate = compiler.parse()?.take();
|
||||||
|
pretty::visit_crate(sess, &mut krate, ppm);
|
||||||
|
pretty::print_after_parsing(
|
||||||
|
sess,
|
||||||
|
&compiler.input(),
|
||||||
|
&krate,
|
||||||
|
ppm,
|
||||||
|
compiler.output_file().as_ref().map(|p| &**p),
|
||||||
|
);
|
||||||
|
return sess.compile_status();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if !callbacks.after_parsing(compiler) {
|
||||||
|
return sess.compile_status();
|
||||||
|
}
|
||||||
|
|
||||||
|
if sess.opts.debugging_opts.parse_only ||
|
||||||
|
sess.opts.debugging_opts.show_span.is_some() ||
|
||||||
|
sess.opts.debugging_opts.ast_json_noexpand {
|
||||||
|
return sess.compile_status();
|
||||||
|
}
|
||||||
|
|
||||||
|
compiler.register_plugins()?;
|
||||||
|
|
||||||
|
// Lint plugins are registered; now we can process command line flags.
|
||||||
|
if sess.opts.describe_lints {
|
||||||
|
describe_lints(&sess, &sess.lint_store.borrow(), true);
|
||||||
|
return sess.compile_status();
|
||||||
|
}
|
||||||
|
|
||||||
|
compiler.prepare_outputs()?;
|
||||||
|
|
||||||
|
if sess.opts.output_types.contains_key(&OutputType::DepInfo)
|
||||||
|
&& sess.opts.output_types.len() == 1
|
||||||
|
{
|
||||||
|
return sess.compile_status();
|
||||||
|
}
|
||||||
|
|
||||||
|
compiler.global_ctxt()?;
|
||||||
|
|
||||||
|
if sess.opts.debugging_opts.no_analysis ||
|
||||||
|
sess.opts.debugging_opts.ast_json {
|
||||||
|
return sess.compile_status();
|
||||||
|
}
|
||||||
|
|
||||||
|
if sess.opts.debugging_opts.save_analysis {
|
||||||
|
let expanded_crate = compiler.expansion()?.take().0;
|
||||||
|
|
||||||
|
let crate_name = compiler.crate_name()?.peek().clone();
|
||||||
|
compiler.global_ctxt()?.peek_mut().enter(|tcx| {
|
||||||
|
let result = tcx.analysis(LOCAL_CRATE);
|
||||||
|
|
||||||
|
time(sess, "save analysis", || {
|
||||||
|
// FIXME: Should this run even with analysis errors?
|
||||||
|
save::process_crate(
|
||||||
|
tcx,
|
||||||
|
&expanded_crate,
|
||||||
|
&crate_name,
|
||||||
|
&compiler.input(),
|
||||||
|
None,
|
||||||
|
DumpHandler::new(compiler.output_dir().as_ref().map(|p| &**p), &crate_name)
|
||||||
|
)
|
||||||
|
});
|
||||||
|
|
||||||
|
result
|
||||||
|
})?;
|
||||||
|
} else {
|
||||||
|
// Drop AST after creating GlobalCtxt to free memory
|
||||||
|
mem::drop(compiler.expansion()?.take());
|
||||||
|
}
|
||||||
|
compiler.global_ctxt()?.peek_mut().enter(|tcx| tcx.analysis(LOCAL_CRATE))?;
|
||||||
|
|
||||||
|
if !callbacks.after_analysis(compiler) {
|
||||||
|
return sess.compile_status();
|
||||||
|
}
|
||||||
|
|
||||||
|
compiler.ongoing_codegen()?;
|
||||||
|
|
||||||
|
// Drop GlobalCtxt after starting codegen to free memory
|
||||||
|
mem::drop(compiler.global_ctxt()?.take());
|
||||||
|
|
||||||
|
if sess.opts.debugging_opts.print_type_sizes {
|
||||||
|
sess.code_stats.borrow().print_type_sizes();
|
||||||
|
}
|
||||||
|
|
||||||
|
compiler.link()?;
|
||||||
|
|
||||||
|
if sess.opts.debugging_opts.perf_stats {
|
||||||
|
sess.print_perf_stats();
|
||||||
|
}
|
||||||
|
|
||||||
|
if sess.print_fuel_crate.is_some() {
|
||||||
|
eprintln!("Fuel used by {}: {}",
|
||||||
|
sess.print_fuel_crate.as_ref().unwrap(),
|
||||||
|
sess.print_fuel.load(SeqCst));
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(unix)]
|
#[cfg(unix)]
|
||||||
|
@ -363,72 +477,6 @@ impl Compilation {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// A trait for customizing the compilation process. Offers a number of hooks for
|
|
||||||
/// executing custom code or customizing input.
|
|
||||||
pub trait CompilerCalls<'a> {
|
|
||||||
/// Hook for a callback early in the process of handling arguments. This will
|
|
||||||
/// be called straight after options have been parsed but before anything
|
|
||||||
/// else (e.g., selecting input and output).
|
|
||||||
fn early_callback(&mut self,
|
|
||||||
_: &getopts::Matches,
|
|
||||||
_: &config::Options,
|
|
||||||
_: &ast::CrateConfig,
|
|
||||||
_: &errors::registry::Registry,
|
|
||||||
_: ErrorOutputType)
|
|
||||||
-> Compilation {
|
|
||||||
Compilation::Continue
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Hook for a callback late in the process of handling arguments. This will
|
|
||||||
/// be called just before actual compilation starts (and before build_controller
|
|
||||||
/// is called), after all arguments etc. have been completely handled.
|
|
||||||
fn late_callback(&mut self,
|
|
||||||
_: &dyn CodegenBackend,
|
|
||||||
_: &getopts::Matches,
|
|
||||||
_: &Session,
|
|
||||||
_: &CStore,
|
|
||||||
_: &Input,
|
|
||||||
_: &Option<PathBuf>,
|
|
||||||
_: &Option<PathBuf>)
|
|
||||||
-> Compilation {
|
|
||||||
Compilation::Continue
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Called after we extract the input from the arguments. Gives the implementer
|
|
||||||
/// an opportunity to change the inputs or to add some custom input handling.
|
|
||||||
/// The default behaviour is to simply pass through the inputs.
|
|
||||||
fn some_input(&mut self,
|
|
||||||
input: Input,
|
|
||||||
input_path: Option<PathBuf>)
|
|
||||||
-> (Input, Option<PathBuf>) {
|
|
||||||
(input, input_path)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Called after we extract the input from the arguments if there is no valid
|
|
||||||
/// input. Gives the implementer an opportunity to supply alternate input (by
|
|
||||||
/// returning a Some value) or to add custom behaviour for this error such as
|
|
||||||
/// emitting error messages. Returning None will cause compilation to stop
|
|
||||||
/// at this point.
|
|
||||||
fn no_input(&mut self,
|
|
||||||
_: &getopts::Matches,
|
|
||||||
_: &config::Options,
|
|
||||||
_: &ast::CrateConfig,
|
|
||||||
_: &Option<PathBuf>,
|
|
||||||
_: &Option<PathBuf>,
|
|
||||||
_: &errors::registry::Registry)
|
|
||||||
-> Option<(Input, Option<PathBuf>)> {
|
|
||||||
None
|
|
||||||
}
|
|
||||||
|
|
||||||
// Create a CompilController struct for controlling the behaviour of
|
|
||||||
// compilation.
|
|
||||||
fn build_controller(
|
|
||||||
self: Box<Self>,
|
|
||||||
_: &Session,
|
|
||||||
_: &getopts::Matches
|
|
||||||
) -> CompileController<'a>;
|
|
||||||
}
|
|
||||||
|
|
||||||
/// CompilerCalls instance for a regular rustc build.
|
/// CompilerCalls instance for a regular rustc build.
|
||||||
#[derive(Copy, Clone)]
|
#[derive(Copy, Clone)]
|
||||||
pub struct RustcDefaultCalls;
|
pub struct RustcDefaultCalls;
|
||||||
|
@ -532,178 +580,6 @@ fn show_content_with_pager(content: &String) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> CompilerCalls<'a> for RustcDefaultCalls {
|
|
||||||
fn early_callback(&mut self,
|
|
||||||
matches: &getopts::Matches,
|
|
||||||
_: &config::Options,
|
|
||||||
_: &ast::CrateConfig,
|
|
||||||
_: &errors::registry::Registry,
|
|
||||||
output: ErrorOutputType)
|
|
||||||
-> Compilation {
|
|
||||||
if let Some(ref code) = matches.opt_str("explain") {
|
|
||||||
handle_explain(code, output);
|
|
||||||
return Compilation::Stop;
|
|
||||||
}
|
|
||||||
|
|
||||||
Compilation::Continue
|
|
||||||
}
|
|
||||||
|
|
||||||
fn no_input(&mut self,
|
|
||||||
matches: &getopts::Matches,
|
|
||||||
sopts: &config::Options,
|
|
||||||
cfg: &ast::CrateConfig,
|
|
||||||
odir: &Option<PathBuf>,
|
|
||||||
ofile: &Option<PathBuf>,
|
|
||||||
descriptions: &errors::registry::Registry)
|
|
||||||
-> Option<(Input, Option<PathBuf>)> {
|
|
||||||
match matches.free.len() {
|
|
||||||
0 => {
|
|
||||||
let mut sess = build_session(sopts.clone(),
|
|
||||||
None,
|
|
||||||
descriptions.clone());
|
|
||||||
if sopts.describe_lints {
|
|
||||||
let mut ls = lint::LintStore::new();
|
|
||||||
rustc_lint::register_builtins(&mut ls, Some(&sess));
|
|
||||||
describe_lints(&sess, &ls, false);
|
|
||||||
return None;
|
|
||||||
}
|
|
||||||
rustc_lint::register_builtins(&mut sess.lint_store.borrow_mut(), Some(&sess));
|
|
||||||
let mut cfg = config::build_configuration(&sess, cfg.clone());
|
|
||||||
let codegen_backend = util::get_codegen_backend(&sess);
|
|
||||||
util::add_configuration(&mut cfg, &sess, &*codegen_backend);
|
|
||||||
sess.parse_sess.config = cfg;
|
|
||||||
let should_stop = RustcDefaultCalls::print_crate_info(
|
|
||||||
&*codegen_backend,
|
|
||||||
&sess,
|
|
||||||
None,
|
|
||||||
odir,
|
|
||||||
ofile
|
|
||||||
);
|
|
||||||
|
|
||||||
if should_stop == Compilation::Stop {
|
|
||||||
return None;
|
|
||||||
}
|
|
||||||
early_error(sopts.error_format, "no input filename given");
|
|
||||||
}
|
|
||||||
1 => panic!("make_input should have provided valid inputs"),
|
|
||||||
_ =>
|
|
||||||
early_error(
|
|
||||||
sopts.error_format,
|
|
||||||
&format!(
|
|
||||||
"multiple input filenames provided (first two filenames are `{}` and `{}`)",
|
|
||||||
matches.free[0],
|
|
||||||
matches.free[1],
|
|
||||||
),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn late_callback(&mut self,
|
|
||||||
codegen_backend: &dyn CodegenBackend,
|
|
||||||
matches: &getopts::Matches,
|
|
||||||
sess: &Session,
|
|
||||||
cstore: &CStore,
|
|
||||||
input: &Input,
|
|
||||||
odir: &Option<PathBuf>,
|
|
||||||
ofile: &Option<PathBuf>)
|
|
||||||
-> Compilation {
|
|
||||||
RustcDefaultCalls::print_crate_info(codegen_backend, sess, Some(input), odir, ofile)
|
|
||||||
.and_then(|| RustcDefaultCalls::list_metadata(sess, cstore, matches, input))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn build_controller(self: Box<Self>,
|
|
||||||
sess: &Session,
|
|
||||||
matches: &getopts::Matches)
|
|
||||||
-> CompileController<'a> {
|
|
||||||
let mut control = CompileController::basic();
|
|
||||||
|
|
||||||
control.keep_ast = sess.opts.debugging_opts.keep_ast;
|
|
||||||
control.continue_parse_after_error = sess.opts.debugging_opts.continue_parse_after_error;
|
|
||||||
|
|
||||||
if let Some((ppm, opt_uii)) = parse_pretty(sess, matches) {
|
|
||||||
if ppm.needs_ast_map(&opt_uii) {
|
|
||||||
control.after_hir_lowering.stop = Compilation::Stop;
|
|
||||||
|
|
||||||
control.after_parse.callback = box move |state| {
|
|
||||||
let mut krate = state.krate.take().unwrap();
|
|
||||||
pretty::visit_crate(state.session, &mut krate, ppm);
|
|
||||||
state.krate = Some(krate);
|
|
||||||
};
|
|
||||||
control.after_hir_lowering.callback = box move |state| {
|
|
||||||
pretty::print_after_hir_lowering(state.session,
|
|
||||||
state.cstore.unwrap(),
|
|
||||||
state.hir_map.unwrap(),
|
|
||||||
state.resolutions.unwrap(),
|
|
||||||
state.input,
|
|
||||||
&state.expanded_crate.take().unwrap(),
|
|
||||||
state.crate_name.unwrap(),
|
|
||||||
ppm,
|
|
||||||
state.output_filenames.unwrap(),
|
|
||||||
opt_uii.clone(),
|
|
||||||
state.out_file);
|
|
||||||
};
|
|
||||||
} else {
|
|
||||||
control.after_parse.stop = Compilation::Stop;
|
|
||||||
|
|
||||||
control.after_parse.callback = box move |state| {
|
|
||||||
let mut krate = state.krate.take().unwrap();
|
|
||||||
pretty::visit_crate(state.session, &mut krate, ppm);
|
|
||||||
pretty::print_after_parsing(state.session,
|
|
||||||
state.input,
|
|
||||||
&krate,
|
|
||||||
ppm,
|
|
||||||
state.out_file);
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
return control;
|
|
||||||
}
|
|
||||||
|
|
||||||
if sess.opts.debugging_opts.parse_only ||
|
|
||||||
sess.opts.debugging_opts.show_span.is_some() ||
|
|
||||||
sess.opts.debugging_opts.ast_json_noexpand {
|
|
||||||
control.after_parse.stop = Compilation::Stop;
|
|
||||||
}
|
|
||||||
|
|
||||||
if sess.opts.debugging_opts.no_analysis ||
|
|
||||||
sess.opts.debugging_opts.ast_json {
|
|
||||||
control.after_hir_lowering.stop = Compilation::Stop;
|
|
||||||
}
|
|
||||||
|
|
||||||
if sess.opts.debugging_opts.save_analysis {
|
|
||||||
enable_save_analysis(&mut control);
|
|
||||||
}
|
|
||||||
|
|
||||||
if sess.print_fuel_crate.is_some() {
|
|
||||||
let old_callback = control.compilation_done.callback;
|
|
||||||
control.compilation_done.callback = box move |state| {
|
|
||||||
old_callback(state);
|
|
||||||
let sess = state.session;
|
|
||||||
eprintln!("Fuel used by {}: {}",
|
|
||||||
sess.print_fuel_crate.as_ref().unwrap(),
|
|
||||||
sess.print_fuel.load(SeqCst));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
control
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn enable_save_analysis(control: &mut CompileController) {
|
|
||||||
control.keep_ast = true;
|
|
||||||
control.after_analysis.callback = box |state| {
|
|
||||||
time(state.session, "save analysis", || {
|
|
||||||
save::process_crate(state.tcx.unwrap(),
|
|
||||||
state.expanded_crate.unwrap(),
|
|
||||||
state.crate_name.unwrap(),
|
|
||||||
state.input,
|
|
||||||
None,
|
|
||||||
DumpHandler::new(state.out_dir,
|
|
||||||
state.crate_name.unwrap()))
|
|
||||||
});
|
|
||||||
};
|
|
||||||
control.after_analysis.run_callback_on_error = true;
|
|
||||||
}
|
|
||||||
|
|
||||||
impl RustcDefaultCalls {
|
impl RustcDefaultCalls {
|
||||||
pub fn list_metadata(sess: &Session,
|
pub fn list_metadata(sess: &Session,
|
||||||
cstore: &CStore,
|
cstore: &CStore,
|
||||||
|
@ -1199,49 +1075,6 @@ fn parse_crate_attrs<'a>(sess: &'a Session, input: &Input) -> PResult<'a, Vec<as
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Temporarily have stack size set to 32MB to deal with various crates with long method
|
|
||||||
// chains or deep syntax trees.
|
|
||||||
// FIXME(oli-obk): get https://github.com/rust-lang/rust/pull/55617 the finish line
|
|
||||||
const STACK_SIZE: usize = 32 * 1024 * 1024; // 32MB
|
|
||||||
|
|
||||||
/// Runs `f` in a suitable thread for running `rustc`; returns a `Result` with either the return
|
|
||||||
/// value of `f` or -- if a panic occurs -- the panic value.
|
|
||||||
///
|
|
||||||
/// This version applies the given name to the thread. This is used by rustdoc to ensure consistent
|
|
||||||
/// doctest output across platforms and executions.
|
|
||||||
pub fn in_named_rustc_thread<F, R>(name: String, f: F) -> Result<R, Box<dyn Any + Send>>
|
|
||||||
where F: FnOnce() -> R + Send + 'static,
|
|
||||||
R: Send + 'static,
|
|
||||||
{
|
|
||||||
// We need a thread for soundness of thread local storage in rustc. For debugging purposes
|
|
||||||
// we allow an escape hatch where everything runs on the main thread.
|
|
||||||
if env::var_os("RUSTC_UNSTABLE_NO_MAIN_THREAD").is_none() {
|
|
||||||
let mut cfg = thread::Builder::new().name(name);
|
|
||||||
|
|
||||||
// If the env is trying to override the stack size then *don't* set it explicitly.
|
|
||||||
// The libstd thread impl will fetch the `RUST_MIN_STACK` env var itself.
|
|
||||||
if env::var_os("RUST_MIN_STACK").is_none() {
|
|
||||||
cfg = cfg.stack_size(STACK_SIZE);
|
|
||||||
}
|
|
||||||
|
|
||||||
let thread = cfg.spawn(f);
|
|
||||||
thread.unwrap().join()
|
|
||||||
} else {
|
|
||||||
let f = panic::AssertUnwindSafe(f);
|
|
||||||
panic::catch_unwind(f)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Runs `f` in a suitable thread for running `rustc`; returns a
|
|
||||||
/// `Result` with either the return value of `f` or -- if a panic
|
|
||||||
/// occurs -- the panic value.
|
|
||||||
pub fn in_rustc_thread<F, R>(f: F) -> Result<R, Box<dyn Any + Send>>
|
|
||||||
where F: FnOnce() -> R + Send + 'static,
|
|
||||||
R: Send + 'static,
|
|
||||||
{
|
|
||||||
in_named_rustc_thread("rustc".to_string(), f)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Gets a list of extra command-line flags provided by the user, as strings.
|
/// Gets a list of extra command-line flags provided by the user, as strings.
|
||||||
///
|
///
|
||||||
/// This function is used during ICEs to show more information useful for
|
/// This function is used during ICEs to show more information useful for
|
||||||
|
@ -1296,28 +1129,15 @@ fn extra_compiler_flags() -> Option<(Vec<String>, bool)> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub struct CompilationFailure;
|
|
||||||
|
|
||||||
impl Error for CompilationFailure {}
|
|
||||||
|
|
||||||
impl Display for CompilationFailure {
|
|
||||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
|
||||||
write!(f, "compilation had errors")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Runs a procedure which will detect panics in the compiler and print nicer
|
/// Runs a procedure which will detect panics in the compiler and print nicer
|
||||||
/// error messages rather than just failing the test.
|
/// error messages rather than just failing the test.
|
||||||
///
|
///
|
||||||
/// The diagnostic emitter yielded to the procedure should be used for reporting
|
/// The diagnostic emitter yielded to the procedure should be used for reporting
|
||||||
/// errors of the compiler.
|
/// errors of the compiler.
|
||||||
pub fn monitor<F: FnOnce() + Send + 'static>(f: F) -> Result<(), CompilationFailure> {
|
pub fn report_ices_to_stderr_if_any<F: FnOnce() -> R, R>(f: F) -> Result<R, ErrorReported> {
|
||||||
in_rustc_thread(move || {
|
catch_unwind(panic::AssertUnwindSafe(f)).map_err(|value| {
|
||||||
f()
|
|
||||||
}).map_err(|value| {
|
|
||||||
if value.is::<errors::FatalErrorMarker>() {
|
if value.is::<errors::FatalErrorMarker>() {
|
||||||
CompilationFailure
|
ErrorReported
|
||||||
} else {
|
} else {
|
||||||
// Thread panicked without emitting a fatal diagnostic
|
// Thread panicked without emitting a fatal diagnostic
|
||||||
eprintln!("");
|
eprintln!("");
|
||||||
|
@ -1364,25 +1184,6 @@ pub fn monitor<F: FnOnce() + Send + 'static>(f: F) -> Result<(), CompilationFail
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn diagnostics_registry() -> errors::registry::Registry {
|
|
||||||
use errors::registry::Registry;
|
|
||||||
|
|
||||||
let mut all_errors = Vec::new();
|
|
||||||
all_errors.extend_from_slice(&rustc::DIAGNOSTICS);
|
|
||||||
all_errors.extend_from_slice(&rustc_typeck::DIAGNOSTICS);
|
|
||||||
all_errors.extend_from_slice(&rustc_resolve::DIAGNOSTICS);
|
|
||||||
all_errors.extend_from_slice(&rustc_privacy::DIAGNOSTICS);
|
|
||||||
// FIXME: need to figure out a way to get these back in here
|
|
||||||
// all_errors.extend_from_slice(get_codegen_backend(sess).diagnostics());
|
|
||||||
all_errors.extend_from_slice(&rustc_metadata::DIAGNOSTICS);
|
|
||||||
all_errors.extend_from_slice(&rustc_passes::DIAGNOSTICS);
|
|
||||||
all_errors.extend_from_slice(&rustc_plugin::DIAGNOSTICS);
|
|
||||||
all_errors.extend_from_slice(&rustc_mir::DIAGNOSTICS);
|
|
||||||
all_errors.extend_from_slice(&syntax::DIAGNOSTICS);
|
|
||||||
|
|
||||||
Registry::new(&all_errors)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// This allows tools to enable rust logging without having to magically match rustc's
|
/// This allows tools to enable rust logging without having to magically match rustc's
|
||||||
/// log crate version
|
/// log crate version
|
||||||
pub fn init_rustc_env_logger() {
|
pub fn init_rustc_env_logger() {
|
||||||
|
@ -1391,17 +1192,17 @@ pub fn init_rustc_env_logger() {
|
||||||
|
|
||||||
pub fn main() {
|
pub fn main() {
|
||||||
init_rustc_env_logger();
|
init_rustc_env_logger();
|
||||||
let result = run(|| {
|
let result = report_ices_to_stderr_if_any(|| {
|
||||||
let args = env::args_os().enumerate()
|
let args = env::args_os().enumerate()
|
||||||
.map(|(i, arg)| arg.into_string().unwrap_or_else(|arg| {
|
.map(|(i, arg)| arg.into_string().unwrap_or_else(|arg| {
|
||||||
early_error(ErrorOutputType::default(),
|
early_error(ErrorOutputType::default(),
|
||||||
&format!("Argument {} is not valid Unicode: {:?}", i, arg))
|
&format!("Argument {} is not valid Unicode: {:?}", i, arg))
|
||||||
}))
|
}))
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
run_compiler(&args,
|
run_compiler(&args, &mut DefaultCallbacks, None, None)
|
||||||
Box::new(RustcDefaultCalls),
|
}).and_then(|result| result);
|
||||||
None,
|
process::exit(match result {
|
||||||
None)
|
Ok(_) => EXIT_SUCCESS,
|
||||||
|
Err(_) => EXIT_FAILURE,
|
||||||
});
|
});
|
||||||
process::exit(result as i32);
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -6,13 +6,14 @@ use rustc::hir;
|
||||||
use rustc::hir::map as hir_map;
|
use rustc::hir::map as hir_map;
|
||||||
use rustc::hir::map::blocks;
|
use rustc::hir::map::blocks;
|
||||||
use rustc::hir::print as pprust_hir;
|
use rustc::hir::print as pprust_hir;
|
||||||
|
use rustc::hir::def_id::LOCAL_CRATE;
|
||||||
use rustc::session::Session;
|
use rustc::session::Session;
|
||||||
use rustc::session::config::{Input, OutputFilenames};
|
use rustc::session::config::Input;
|
||||||
use rustc::ty::{self, TyCtxt, Resolutions, AllArenas};
|
use rustc::ty::{self, TyCtxt};
|
||||||
use rustc_interface::util;
|
use rustc::util::common::ErrorReported;
|
||||||
|
use rustc_interface::util::ReplaceBodyWithLoop;
|
||||||
use rustc_borrowck as borrowck;
|
use rustc_borrowck as borrowck;
|
||||||
use rustc_borrowck::graphviz as borrowck_dot;
|
use rustc_borrowck::graphviz as borrowck_dot;
|
||||||
use rustc_metadata::cstore::CStore;
|
|
||||||
use rustc_mir::util::{write_mir_pretty, write_mir_graphviz};
|
use rustc_mir::util::{write_mir_pretty, write_mir_graphviz};
|
||||||
|
|
||||||
use syntax::ast;
|
use syntax::ast;
|
||||||
|
@ -35,7 +36,8 @@ pub use self::PpSourceMode::*;
|
||||||
pub use self::PpMode::*;
|
pub use self::PpMode::*;
|
||||||
use self::NodesMatchingUII::*;
|
use self::NodesMatchingUII::*;
|
||||||
use abort_on_err;
|
use abort_on_err;
|
||||||
use driver;
|
|
||||||
|
use source_name;
|
||||||
|
|
||||||
#[derive(Copy, Clone, PartialEq, Debug)]
|
#[derive(Copy, Clone, PartialEq, Debug)]
|
||||||
pub enum PpSourceMode {
|
pub enum PpSourceMode {
|
||||||
|
@ -154,7 +156,7 @@ impl PpSourceMode {
|
||||||
/// Constructs a `PrinterSupport` object and passes it to `f`.
|
/// Constructs a `PrinterSupport` object and passes it to `f`.
|
||||||
fn call_with_pp_support<'tcx, A, F>(&self,
|
fn call_with_pp_support<'tcx, A, F>(&self,
|
||||||
sess: &'tcx Session,
|
sess: &'tcx Session,
|
||||||
hir_map: Option<&hir_map::Map<'tcx>>,
|
tcx: Option<TyCtxt<'tcx, 'tcx, 'tcx>>,
|
||||||
f: F)
|
f: F)
|
||||||
-> A
|
-> A
|
||||||
where F: FnOnce(&dyn PrinterSupport) -> A
|
where F: FnOnce(&dyn PrinterSupport) -> A
|
||||||
|
@ -163,7 +165,7 @@ impl PpSourceMode {
|
||||||
PpmNormal | PpmEveryBodyLoops | PpmExpanded => {
|
PpmNormal | PpmEveryBodyLoops | PpmExpanded => {
|
||||||
let annotation = NoAnn {
|
let annotation = NoAnn {
|
||||||
sess,
|
sess,
|
||||||
hir_map: hir_map.map(|m| m.clone()),
|
tcx,
|
||||||
};
|
};
|
||||||
f(&annotation)
|
f(&annotation)
|
||||||
}
|
}
|
||||||
|
@ -171,7 +173,7 @@ impl PpSourceMode {
|
||||||
PpmIdentified | PpmExpandedIdentified => {
|
PpmIdentified | PpmExpandedIdentified => {
|
||||||
let annotation = IdentifiedAnnotation {
|
let annotation = IdentifiedAnnotation {
|
||||||
sess,
|
sess,
|
||||||
hir_map: hir_map.map(|m| m.clone()),
|
tcx,
|
||||||
};
|
};
|
||||||
f(&annotation)
|
f(&annotation)
|
||||||
}
|
}
|
||||||
|
@ -186,12 +188,7 @@ impl PpSourceMode {
|
||||||
}
|
}
|
||||||
fn call_with_pp_support_hir<'tcx, A, F>(
|
fn call_with_pp_support_hir<'tcx, A, F>(
|
||||||
&self,
|
&self,
|
||||||
sess: &'tcx Session,
|
tcx: TyCtxt<'tcx, 'tcx, 'tcx>,
|
||||||
cstore: &'tcx CStore,
|
|
||||||
hir_map: &hir_map::Map<'tcx>,
|
|
||||||
resolutions: &Resolutions,
|
|
||||||
output_filenames: &OutputFilenames,
|
|
||||||
id: &str,
|
|
||||||
f: F
|
f: F
|
||||||
) -> A
|
) -> A
|
||||||
where F: FnOnce(&dyn HirPrinterSupport, &hir::Crate) -> A
|
where F: FnOnce(&dyn HirPrinterSupport, &hir::Crate) -> A
|
||||||
|
@ -199,42 +196,29 @@ impl PpSourceMode {
|
||||||
match *self {
|
match *self {
|
||||||
PpmNormal => {
|
PpmNormal => {
|
||||||
let annotation = NoAnn {
|
let annotation = NoAnn {
|
||||||
sess,
|
sess: tcx.sess,
|
||||||
hir_map: Some(hir_map.clone()),
|
tcx: Some(tcx),
|
||||||
};
|
};
|
||||||
f(&annotation, hir_map.forest.krate())
|
f(&annotation, tcx.hir().forest.krate())
|
||||||
}
|
}
|
||||||
|
|
||||||
PpmIdentified => {
|
PpmIdentified => {
|
||||||
let annotation = IdentifiedAnnotation {
|
let annotation = IdentifiedAnnotation {
|
||||||
sess,
|
sess: tcx.sess,
|
||||||
hir_map: Some(hir_map.clone()),
|
tcx: Some(tcx),
|
||||||
};
|
};
|
||||||
f(&annotation, hir_map.forest.krate())
|
f(&annotation, tcx.hir().forest.krate())
|
||||||
}
|
}
|
||||||
PpmTyped => {
|
PpmTyped => {
|
||||||
let control = &driver::CompileController::basic();
|
abort_on_err(tcx.analysis(LOCAL_CRATE), tcx.sess);
|
||||||
let codegen_backend = util::get_codegen_backend(sess);
|
|
||||||
let mut arenas = AllArenas::new();
|
let empty_tables = ty::TypeckTables::empty(None);
|
||||||
driver::phase_3_run_analysis_passes(&*codegen_backend,
|
let annotation = TypedAnnotation {
|
||||||
control,
|
tcx,
|
||||||
sess,
|
tables: Cell::new(&empty_tables)
|
||||||
cstore,
|
};
|
||||||
hir_map.clone(),
|
tcx.dep_graph.with_ignore(|| {
|
||||||
resolutions.clone(),
|
f(&annotation, tcx.hir().forest.krate())
|
||||||
&mut arenas,
|
|
||||||
id,
|
|
||||||
output_filenames,
|
|
||||||
|tcx, _, result| {
|
|
||||||
abort_on_err(result, tcx.sess);
|
|
||||||
let empty_tables = ty::TypeckTables::empty(None);
|
|
||||||
let annotation = TypedAnnotation {
|
|
||||||
tcx,
|
|
||||||
tables: Cell::new(&empty_tables)
|
|
||||||
};
|
|
||||||
tcx.dep_graph.with_ignore(|| {
|
|
||||||
f(&annotation, hir_map.forest.krate())
|
|
||||||
})
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
_ => panic!("Should use call_with_pp_support"),
|
_ => panic!("Should use call_with_pp_support"),
|
||||||
|
@ -283,7 +267,7 @@ trait HirPrinterSupport<'hir>: pprust_hir::PpAnn {
|
||||||
|
|
||||||
struct NoAnn<'hir> {
|
struct NoAnn<'hir> {
|
||||||
sess: &'hir Session,
|
sess: &'hir Session,
|
||||||
hir_map: Option<hir_map::Map<'hir>>,
|
tcx: Option<TyCtxt<'hir, 'hir, 'hir>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'hir> PrinterSupport for NoAnn<'hir> {
|
impl<'hir> PrinterSupport for NoAnn<'hir> {
|
||||||
|
@ -302,7 +286,7 @@ impl<'hir> HirPrinterSupport<'hir> for NoAnn<'hir> {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn hir_map<'a>(&'a self) -> Option<&'a hir_map::Map<'hir>> {
|
fn hir_map<'a>(&'a self) -> Option<&'a hir_map::Map<'hir>> {
|
||||||
self.hir_map.as_ref()
|
self.tcx.map(|tcx| tcx.hir())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn pp_ann<'a>(&'a self) -> &'a dyn pprust_hir::PpAnn {
|
fn pp_ann<'a>(&'a self) -> &'a dyn pprust_hir::PpAnn {
|
||||||
|
@ -314,8 +298,8 @@ impl<'hir> pprust::PpAnn for NoAnn<'hir> {}
|
||||||
impl<'hir> pprust_hir::PpAnn for NoAnn<'hir> {
|
impl<'hir> pprust_hir::PpAnn for NoAnn<'hir> {
|
||||||
fn nested(&self, state: &mut pprust_hir::State, nested: pprust_hir::Nested)
|
fn nested(&self, state: &mut pprust_hir::State, nested: pprust_hir::Nested)
|
||||||
-> io::Result<()> {
|
-> io::Result<()> {
|
||||||
if let Some(ref map) = self.hir_map {
|
if let Some(tcx) = self.tcx {
|
||||||
pprust_hir::PpAnn::nested(map, state, nested)
|
pprust_hir::PpAnn::nested(tcx.hir(), state, nested)
|
||||||
} else {
|
} else {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -324,7 +308,7 @@ impl<'hir> pprust_hir::PpAnn for NoAnn<'hir> {
|
||||||
|
|
||||||
struct IdentifiedAnnotation<'hir> {
|
struct IdentifiedAnnotation<'hir> {
|
||||||
sess: &'hir Session,
|
sess: &'hir Session,
|
||||||
hir_map: Option<hir_map::Map<'hir>>,
|
tcx: Option<TyCtxt<'hir, 'hir, 'hir>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'hir> PrinterSupport for IdentifiedAnnotation<'hir> {
|
impl<'hir> PrinterSupport for IdentifiedAnnotation<'hir> {
|
||||||
|
@ -380,7 +364,7 @@ impl<'hir> HirPrinterSupport<'hir> for IdentifiedAnnotation<'hir> {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn hir_map<'a>(&'a self) -> Option<&'a hir_map::Map<'hir>> {
|
fn hir_map<'a>(&'a self) -> Option<&'a hir_map::Map<'hir>> {
|
||||||
self.hir_map.as_ref()
|
self.tcx.map(|tcx| tcx.hir())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn pp_ann<'a>(&'a self) -> &'a dyn pprust_hir::PpAnn {
|
fn pp_ann<'a>(&'a self) -> &'a dyn pprust_hir::PpAnn {
|
||||||
|
@ -391,8 +375,8 @@ impl<'hir> HirPrinterSupport<'hir> for IdentifiedAnnotation<'hir> {
|
||||||
impl<'hir> pprust_hir::PpAnn for IdentifiedAnnotation<'hir> {
|
impl<'hir> pprust_hir::PpAnn for IdentifiedAnnotation<'hir> {
|
||||||
fn nested(&self, state: &mut pprust_hir::State, nested: pprust_hir::Nested)
|
fn nested(&self, state: &mut pprust_hir::State, nested: pprust_hir::Nested)
|
||||||
-> io::Result<()> {
|
-> io::Result<()> {
|
||||||
if let Some(ref map) = self.hir_map {
|
if let Some(ref tcx) = self.tcx {
|
||||||
pprust_hir::PpAnn::nested(map, state, nested)
|
pprust_hir::PpAnn::nested(tcx.hir(), state, nested)
|
||||||
} else {
|
} else {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -691,12 +675,12 @@ fn print_flowgraph<'a, 'tcx, W: Write>(variants: Vec<borrowck_dot::Variant>,
|
||||||
|
|
||||||
pub fn visit_crate(sess: &Session, krate: &mut ast::Crate, ppm: PpMode) {
|
pub fn visit_crate(sess: &Session, krate: &mut ast::Crate, ppm: PpMode) {
|
||||||
if let PpmSource(PpmEveryBodyLoops) = ppm {
|
if let PpmSource(PpmEveryBodyLoops) = ppm {
|
||||||
util::ReplaceBodyWithLoop::new(sess).visit_crate(krate);
|
ReplaceBodyWithLoop::new(sess).visit_crate(krate);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_source(input: &Input, sess: &Session) -> (Vec<u8>, FileName) {
|
fn get_source(input: &Input, sess: &Session) -> (Vec<u8>, FileName) {
|
||||||
let src_name = input.source_name();
|
let src_name = source_name(input);
|
||||||
let src = sess.source_map()
|
let src = sess.source_map()
|
||||||
.get_source_file(&src_name)
|
.get_source_file(&src_name)
|
||||||
.unwrap()
|
.unwrap()
|
||||||
|
@ -752,31 +736,24 @@ pub fn print_after_parsing(sess: &Session,
|
||||||
write_output(out, ofile);
|
write_output(out, ofile);
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn print_after_hir_lowering<'tcx, 'a: 'tcx>(sess: &'a Session,
|
pub fn print_after_hir_lowering<'tcx>(
|
||||||
cstore: &'tcx CStore,
|
tcx: TyCtxt<'tcx, 'tcx, 'tcx>,
|
||||||
hir_map: &hir_map::Map<'tcx>,
|
input: &Input,
|
||||||
resolutions: &Resolutions,
|
krate: &ast::Crate,
|
||||||
input: &Input,
|
ppm: PpMode,
|
||||||
krate: &ast::Crate,
|
opt_uii: Option<UserIdentifiedItem>,
|
||||||
crate_name: &str,
|
ofile: Option<&Path>) {
|
||||||
ppm: PpMode,
|
|
||||||
output_filenames: &OutputFilenames,
|
|
||||||
opt_uii: Option<UserIdentifiedItem>,
|
|
||||||
ofile: Option<&Path>) {
|
|
||||||
if ppm.needs_analysis() {
|
if ppm.needs_analysis() {
|
||||||
print_with_analysis(sess,
|
abort_on_err(print_with_analysis(
|
||||||
cstore,
|
tcx,
|
||||||
hir_map,
|
ppm,
|
||||||
resolutions,
|
opt_uii,
|
||||||
crate_name,
|
ofile
|
||||||
output_filenames,
|
), tcx.sess);
|
||||||
ppm,
|
|
||||||
opt_uii,
|
|
||||||
ofile);
|
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
let (src, src_name) = get_source(input, sess);
|
let (src, src_name) = get_source(input, tcx.sess);
|
||||||
|
|
||||||
let mut rdr = &src[..];
|
let mut rdr = &src[..];
|
||||||
let mut out = Vec::new();
|
let mut out = Vec::new();
|
||||||
|
@ -785,7 +762,7 @@ pub fn print_after_hir_lowering<'tcx, 'a: 'tcx>(sess: &'a Session,
|
||||||
(PpmSource(s), _) => {
|
(PpmSource(s), _) => {
|
||||||
// Silently ignores an identified node.
|
// Silently ignores an identified node.
|
||||||
let out: &mut dyn Write = &mut out;
|
let out: &mut dyn Write = &mut out;
|
||||||
s.call_with_pp_support(sess, Some(hir_map), move |annotation| {
|
s.call_with_pp_support(tcx.sess, Some(tcx), move |annotation| {
|
||||||
debug!("pretty printing source code {:?}", s);
|
debug!("pretty printing source code {:?}", s);
|
||||||
let sess = annotation.sess();
|
let sess = annotation.sess();
|
||||||
pprust::print_crate(sess.source_map(),
|
pprust::print_crate(sess.source_map(),
|
||||||
|
@ -801,13 +778,7 @@ pub fn print_after_hir_lowering<'tcx, 'a: 'tcx>(sess: &'a Session,
|
||||||
|
|
||||||
(PpmHir(s), None) => {
|
(PpmHir(s), None) => {
|
||||||
let out: &mut dyn Write = &mut out;
|
let out: &mut dyn Write = &mut out;
|
||||||
s.call_with_pp_support_hir(sess,
|
s.call_with_pp_support_hir(tcx, move |annotation, krate| {
|
||||||
cstore,
|
|
||||||
hir_map,
|
|
||||||
resolutions,
|
|
||||||
output_filenames,
|
|
||||||
crate_name,
|
|
||||||
move |annotation, krate| {
|
|
||||||
debug!("pretty printing source code {:?}", s);
|
debug!("pretty printing source code {:?}", s);
|
||||||
let sess = annotation.sess();
|
let sess = annotation.sess();
|
||||||
pprust_hir::print_crate(sess.source_map(),
|
pprust_hir::print_crate(sess.source_map(),
|
||||||
|
@ -823,13 +794,7 @@ pub fn print_after_hir_lowering<'tcx, 'a: 'tcx>(sess: &'a Session,
|
||||||
|
|
||||||
(PpmHirTree(s), None) => {
|
(PpmHirTree(s), None) => {
|
||||||
let out: &mut dyn Write = &mut out;
|
let out: &mut dyn Write = &mut out;
|
||||||
s.call_with_pp_support_hir(sess,
|
s.call_with_pp_support_hir(tcx, move |_annotation, krate| {
|
||||||
cstore,
|
|
||||||
hir_map,
|
|
||||||
resolutions,
|
|
||||||
output_filenames,
|
|
||||||
crate_name,
|
|
||||||
move |_annotation, krate| {
|
|
||||||
debug!("pretty printing source code {:?}", s);
|
debug!("pretty printing source code {:?}", s);
|
||||||
write!(out, "{:#?}", krate)
|
write!(out, "{:#?}", krate)
|
||||||
})
|
})
|
||||||
|
@ -837,13 +802,7 @@ pub fn print_after_hir_lowering<'tcx, 'a: 'tcx>(sess: &'a Session,
|
||||||
|
|
||||||
(PpmHir(s), Some(uii)) => {
|
(PpmHir(s), Some(uii)) => {
|
||||||
let out: &mut dyn Write = &mut out;
|
let out: &mut dyn Write = &mut out;
|
||||||
s.call_with_pp_support_hir(sess,
|
s.call_with_pp_support_hir(tcx, move |annotation, _| {
|
||||||
cstore,
|
|
||||||
hir_map,
|
|
||||||
resolutions,
|
|
||||||
output_filenames,
|
|
||||||
crate_name,
|
|
||||||
move |annotation, _| {
|
|
||||||
debug!("pretty printing source code {:?}", s);
|
debug!("pretty printing source code {:?}", s);
|
||||||
let sess = annotation.sess();
|
let sess = annotation.sess();
|
||||||
let hir_map = annotation.hir_map().expect("-Z unpretty missing HIR map");
|
let hir_map = annotation.hir_map().expect("-Z unpretty missing HIR map");
|
||||||
|
@ -869,16 +828,10 @@ pub fn print_after_hir_lowering<'tcx, 'a: 'tcx>(sess: &'a Session,
|
||||||
|
|
||||||
(PpmHirTree(s), Some(uii)) => {
|
(PpmHirTree(s), Some(uii)) => {
|
||||||
let out: &mut dyn Write = &mut out;
|
let out: &mut dyn Write = &mut out;
|
||||||
s.call_with_pp_support_hir(sess,
|
s.call_with_pp_support_hir(tcx, move |_annotation, _krate| {
|
||||||
cstore,
|
|
||||||
hir_map,
|
|
||||||
resolutions,
|
|
||||||
output_filenames,
|
|
||||||
crate_name,
|
|
||||||
move |_annotation, _krate| {
|
|
||||||
debug!("pretty printing source code {:?}", s);
|
debug!("pretty printing source code {:?}", s);
|
||||||
for node_id in uii.all_matching_node_ids(hir_map) {
|
for node_id in uii.all_matching_node_ids(tcx.hir()) {
|
||||||
let node = hir_map.get(node_id);
|
let node = tcx.hir().get(node_id);
|
||||||
write!(out, "{:#?}", node)?;
|
write!(out, "{:#?}", node)?;
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
|
@ -896,18 +849,15 @@ pub fn print_after_hir_lowering<'tcx, 'a: 'tcx>(sess: &'a Session,
|
||||||
// analysis is performed. However, we want to call `phase_3_run_analysis_passes`
|
// analysis is performed. However, we want to call `phase_3_run_analysis_passes`
|
||||||
// with a different callback than the standard driver, so that isn't easy.
|
// with a different callback than the standard driver, so that isn't easy.
|
||||||
// Instead, we call that function ourselves.
|
// Instead, we call that function ourselves.
|
||||||
fn print_with_analysis<'tcx, 'a: 'tcx>(sess: &'a Session,
|
fn print_with_analysis<'tcx>(
|
||||||
cstore: &'a CStore,
|
tcx: TyCtxt<'_, 'tcx, 'tcx>,
|
||||||
hir_map: &hir_map::Map<'tcx>,
|
ppm: PpMode,
|
||||||
resolutions: &Resolutions,
|
uii: Option<UserIdentifiedItem>,
|
||||||
crate_name: &str,
|
ofile: Option<&Path>
|
||||||
output_filenames: &OutputFilenames,
|
) -> Result<(), ErrorReported> {
|
||||||
ppm: PpMode,
|
|
||||||
uii: Option<UserIdentifiedItem>,
|
|
||||||
ofile: Option<&Path>) {
|
|
||||||
let nodeid = if let Some(uii) = uii {
|
let nodeid = if let Some(uii) = uii {
|
||||||
debug!("pretty printing for {:?}", uii);
|
debug!("pretty printing for {:?}", uii);
|
||||||
Some(uii.to_one_node_id("-Z unpretty", sess, &hir_map))
|
Some(uii.to_one_node_id("-Z unpretty", tcx.sess, tcx.hir()))
|
||||||
} else {
|
} else {
|
||||||
debug!("pretty printing for whole crate");
|
debug!("pretty printing for whole crate");
|
||||||
None
|
None
|
||||||
|
@ -915,66 +865,57 @@ fn print_with_analysis<'tcx, 'a: 'tcx>(sess: &'a Session,
|
||||||
|
|
||||||
let mut out = Vec::new();
|
let mut out = Vec::new();
|
||||||
|
|
||||||
let control = &driver::CompileController::basic();
|
tcx.analysis(LOCAL_CRATE)?;
|
||||||
let codegen_backend = util::get_codegen_backend(sess);
|
|
||||||
let mut arenas = AllArenas::new();
|
let mut print = || match ppm {
|
||||||
driver::phase_3_run_analysis_passes(&*codegen_backend,
|
PpmMir | PpmMirCFG => {
|
||||||
control,
|
if let Some(nodeid) = nodeid {
|
||||||
sess,
|
let def_id = tcx.hir().local_def_id(nodeid);
|
||||||
cstore,
|
match ppm {
|
||||||
hir_map.clone(),
|
PpmMir => write_mir_pretty(tcx, Some(def_id), &mut out),
|
||||||
resolutions.clone(),
|
PpmMirCFG => write_mir_graphviz(tcx, Some(def_id), &mut out),
|
||||||
&mut arenas,
|
_ => unreachable!(),
|
||||||
crate_name,
|
}?;
|
||||||
output_filenames,
|
} else {
|
||||||
|tcx, _, result| {
|
match ppm {
|
||||||
abort_on_err(result, tcx.sess);
|
PpmMir => write_mir_pretty(tcx, None, &mut out),
|
||||||
match ppm {
|
PpmMirCFG => write_mir_graphviz(tcx, None, &mut out),
|
||||||
PpmMir | PpmMirCFG => {
|
_ => unreachable!(),
|
||||||
if let Some(nodeid) = nodeid {
|
}?;
|
||||||
let def_id = tcx.hir().local_def_id(nodeid);
|
|
||||||
match ppm {
|
|
||||||
PpmMir => write_mir_pretty(tcx, Some(def_id), &mut out),
|
|
||||||
PpmMirCFG => write_mir_graphviz(tcx, Some(def_id), &mut out),
|
|
||||||
_ => unreachable!(),
|
|
||||||
}?;
|
|
||||||
} else {
|
|
||||||
match ppm {
|
|
||||||
PpmMir => write_mir_pretty(tcx, None, &mut out),
|
|
||||||
PpmMirCFG => write_mir_graphviz(tcx, None, &mut out),
|
|
||||||
_ => unreachable!(),
|
|
||||||
}?;
|
|
||||||
}
|
|
||||||
Ok(())
|
|
||||||
}
|
}
|
||||||
PpmFlowGraph(mode) => {
|
Ok(())
|
||||||
let nodeid =
|
|
||||||
nodeid.expect("`pretty flowgraph=..` needs NodeId (int) or unique path \
|
|
||||||
suffix (b::c::d)");
|
|
||||||
let node = tcx.hir().find(nodeid).unwrap_or_else(|| {
|
|
||||||
tcx.sess.fatal(&format!("--pretty flowgraph couldn't find id: {}", nodeid))
|
|
||||||
});
|
|
||||||
|
|
||||||
match blocks::Code::from_node(&tcx.hir(), nodeid) {
|
|
||||||
Some(code) => {
|
|
||||||
let variants = gather_flowgraph_variants(tcx.sess);
|
|
||||||
|
|
||||||
let out: &mut dyn Write = &mut out;
|
|
||||||
|
|
||||||
print_flowgraph(variants, tcx, code, mode, out)
|
|
||||||
}
|
|
||||||
None => {
|
|
||||||
let message = format!("--pretty=flowgraph needs block, fn, or method; \
|
|
||||||
got {:?}",
|
|
||||||
node);
|
|
||||||
|
|
||||||
tcx.sess.span_fatal(tcx.hir().span(nodeid), &message)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
_ => unreachable!(),
|
|
||||||
}
|
}
|
||||||
}).unwrap();
|
PpmFlowGraph(mode) => {
|
||||||
|
let nodeid =
|
||||||
|
nodeid.expect("`pretty flowgraph=..` needs NodeId (int) or unique path \
|
||||||
|
suffix (b::c::d)");
|
||||||
|
let node = tcx.hir().find(nodeid).unwrap_or_else(|| {
|
||||||
|
tcx.sess.fatal(&format!("--pretty flowgraph couldn't find id: {}", nodeid))
|
||||||
|
});
|
||||||
|
|
||||||
|
match blocks::Code::from_node(&tcx.hir(), nodeid) {
|
||||||
|
Some(code) => {
|
||||||
|
let variants = gather_flowgraph_variants(tcx.sess);
|
||||||
|
|
||||||
|
let out: &mut dyn Write = &mut out;
|
||||||
|
|
||||||
|
print_flowgraph(variants, tcx, code, mode, out)
|
||||||
|
}
|
||||||
|
None => {
|
||||||
|
let message = format!("--pretty=flowgraph needs block, fn, or method; \
|
||||||
|
got {:?}",
|
||||||
|
node);
|
||||||
|
|
||||||
|
tcx.sess.span_fatal(tcx.hir().span(nodeid), &message)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ => unreachable!(),
|
||||||
|
};
|
||||||
|
|
||||||
|
print().unwrap();
|
||||||
|
|
||||||
write_output(out, ofile);
|
write_output(out, ofile);
|
||||||
|
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,34 +1,23 @@
|
||||||
//! Standalone tests for the inference module.
|
//! Standalone tests for the inference module.
|
||||||
|
|
||||||
use driver;
|
|
||||||
use errors;
|
|
||||||
use errors::emitter::Emitter;
|
use errors::emitter::Emitter;
|
||||||
use errors::{DiagnosticBuilder, Level};
|
use errors::{DiagnosticBuilder, Level};
|
||||||
use rustc::hir;
|
use rustc::hir;
|
||||||
use rustc::hir::map as hir_map;
|
|
||||||
use rustc::infer::outlives::env::OutlivesEnvironment;
|
use rustc::infer::outlives::env::OutlivesEnvironment;
|
||||||
use rustc::infer::{self, InferOk, InferResult, SuppressRegionErrors};
|
use rustc::infer::{self, InferOk, InferResult, SuppressRegionErrors};
|
||||||
use rustc::middle::region;
|
use rustc::middle::region;
|
||||||
use rustc::session::config::{OutputFilenames, OutputTypes};
|
use rustc::session::{DiagnosticOutput, config};
|
||||||
use rustc::session::{self, config};
|
|
||||||
use rustc::traits::ObligationCause;
|
use rustc::traits::ObligationCause;
|
||||||
use rustc::ty::query::OnDiskCache;
|
|
||||||
use rustc::ty::subst::Subst;
|
use rustc::ty::subst::Subst;
|
||||||
use rustc::ty::{self, Ty, TyCtxt, TypeFoldable};
|
use rustc::ty::{self, Ty, TyCtxt, TypeFoldable};
|
||||||
use rustc_data_structures::sync::{self, Lrc};
|
use rustc_data_structures::sync;
|
||||||
use rustc_interface::util;
|
|
||||||
use rustc_lint;
|
|
||||||
use rustc_metadata::cstore::CStore;
|
|
||||||
use rustc_target::spec::abi::Abi;
|
use rustc_target::spec::abi::Abi;
|
||||||
use syntax;
|
use rustc_interface::interface;
|
||||||
use syntax::ast;
|
use syntax::ast;
|
||||||
use syntax::feature_gate::UnstableFeatures;
|
use syntax::feature_gate::UnstableFeatures;
|
||||||
use syntax::source_map::{FileName, FilePathMapping, SourceMap};
|
use syntax::source_map::FileName;
|
||||||
use syntax::symbol::Symbol;
|
use syntax::symbol::Symbol;
|
||||||
|
|
||||||
use std::path::PathBuf;
|
|
||||||
use std::sync::mpsc;
|
|
||||||
|
|
||||||
struct Env<'a, 'gcx: 'a + 'tcx, 'tcx: 'a> {
|
struct Env<'a, 'gcx: 'a + 'tcx, 'tcx: 'a> {
|
||||||
infcx: &'a infer::InferCtxt<'a, 'gcx, 'tcx>,
|
infcx: &'a infer::InferCtxt<'a, 'gcx, 'tcx>,
|
||||||
region_scope_tree: &'a mut region::ScopeTree,
|
region_scope_tree: &'a mut region::ScopeTree,
|
||||||
|
@ -75,102 +64,57 @@ impl Emitter for ExpectErrorEmitter {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn errors(msgs: &[&str]) -> (Box<dyn Emitter + sync::Send>, usize) {
|
fn errors(msgs: &[&str]) -> (Box<dyn Emitter + Send + sync::Send>, usize) {
|
||||||
let v = msgs.iter().map(|m| m.to_string()).collect();
|
let mut v: Vec<_> = msgs.iter().map(|m| m.to_string()).collect();
|
||||||
|
if !v.is_empty() {
|
||||||
|
v.push("aborting due to previous error".to_owned());
|
||||||
|
}
|
||||||
(
|
(
|
||||||
box ExpectErrorEmitter { messages: v } as Box<dyn Emitter + sync::Send>,
|
box ExpectErrorEmitter { messages: v } as Box<dyn Emitter + Send + sync::Send>,
|
||||||
msgs.len(),
|
msgs.len(),
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn test_env<F>(source_string: &str, args: (Box<dyn Emitter + sync::Send>, usize), body: F)
|
fn test_env<F>(
|
||||||
where
|
|
||||||
F: FnOnce(Env) + sync::Send,
|
|
||||||
{
|
|
||||||
syntax::with_globals(|| {
|
|
||||||
let mut options = config::Options::default();
|
|
||||||
options.debugging_opts.verbose = true;
|
|
||||||
options.unstable_features = UnstableFeatures::Allow;
|
|
||||||
|
|
||||||
// When we're compiling this library with `--test` it'll run as a binary but
|
|
||||||
// not actually exercise much functionality.
|
|
||||||
// As a result most of the logic loading the codegen backend is defunkt
|
|
||||||
// (it assumes we're a dynamic library in a sysroot)
|
|
||||||
// so let's just use the metadata only backend which doesn't need to load any libraries.
|
|
||||||
options.debugging_opts.codegen_backend = Some("metadata_only".to_owned());
|
|
||||||
|
|
||||||
driver::spawn_thread_pool(options, |options| {
|
|
||||||
test_env_with_pool(options, source_string, args, body)
|
|
||||||
})
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
fn test_env_with_pool<F>(
|
|
||||||
options: config::Options,
|
|
||||||
source_string: &str,
|
source_string: &str,
|
||||||
(emitter, expected_err_count): (Box<dyn Emitter + sync::Send>, usize),
|
(emitter, expected_err_count): (Box<dyn Emitter + Send + sync::Send>, usize),
|
||||||
body: F,
|
body: F,
|
||||||
) where
|
)
|
||||||
F: FnOnce(Env),
|
where
|
||||||
|
F: FnOnce(Env) + Send,
|
||||||
{
|
{
|
||||||
let diagnostic_handler = errors::Handler::with_emitter(true, None, emitter);
|
let mut opts = config::Options::default();
|
||||||
let sess = session::build_session_(
|
opts.debugging_opts.verbose = true;
|
||||||
options,
|
opts.unstable_features = UnstableFeatures::Allow;
|
||||||
None,
|
|
||||||
diagnostic_handler,
|
// When we're compiling this library with `--test` it'll run as a binary but
|
||||||
Lrc::new(SourceMap::new(FilePathMapping::empty())),
|
// not actually exercise much functionality.
|
||||||
Default::default(),
|
// As a result most of the logic loading the codegen backend is defunkt
|
||||||
);
|
// (it assumes we're a dynamic library in a sysroot)
|
||||||
let cstore = CStore::new(util::get_codegen_backend(&sess).metadata_loader());
|
// so let's just use the metadata only backend which doesn't need to load any libraries.
|
||||||
rustc_lint::register_builtins(&mut sess.lint_store.borrow_mut(), Some(&sess));
|
opts.debugging_opts.codegen_backend = Some("metadata_only".to_owned());
|
||||||
|
|
||||||
let input = config::Input::Str {
|
let input = config::Input::Str {
|
||||||
name: FileName::anon_source_code(&source_string),
|
name: FileName::anon_source_code(&source_string),
|
||||||
input: source_string.to_string(),
|
input: source_string.to_string(),
|
||||||
};
|
};
|
||||||
let krate =
|
|
||||||
driver::phase_1_parse_input(&driver::CompileController::basic(), &sess, &input).unwrap();
|
let config = interface::Config {
|
||||||
let driver::ExpansionResult {
|
opts,
|
||||||
defs,
|
crate_cfg: Default::default(),
|
||||||
resolutions,
|
input,
|
||||||
mut hir_forest,
|
input_path: None,
|
||||||
..
|
output_file: None,
|
||||||
} = {
|
output_dir: None,
|
||||||
driver::phase_2_configure_and_expand(
|
file_loader: None,
|
||||||
&sess,
|
diagnostic_output: DiagnosticOutput::Emitter(emitter),
|
||||||
&cstore,
|
stderr: None,
|
||||||
krate,
|
crate_name: Some("test".to_owned()),
|
||||||
None,
|
lint_caps: Default::default(),
|
||||||
"test",
|
|
||||||
None,
|
|
||||||
|_| Ok(()),
|
|
||||||
).expect("phase 2 aborted")
|
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut arenas = ty::AllArenas::new();
|
interface::run_compiler(config, |compiler| {
|
||||||
let hir_map = hir_map::map_crate(&sess, &cstore, &mut hir_forest, &defs);
|
compiler.global_ctxt().unwrap().peek_mut().enter(|tcx| {
|
||||||
|
|
||||||
// Run just enough stuff to build a tcx.
|
|
||||||
let (tx, _rx) = mpsc::channel();
|
|
||||||
let outputs = OutputFilenames {
|
|
||||||
out_directory: PathBuf::new(),
|
|
||||||
out_filestem: String::new(),
|
|
||||||
single_output_file: None,
|
|
||||||
extra: String::new(),
|
|
||||||
outputs: OutputTypes::new(&[]),
|
|
||||||
};
|
|
||||||
TyCtxt::create_and_enter(
|
|
||||||
&sess,
|
|
||||||
&cstore,
|
|
||||||
ty::query::Providers::default(),
|
|
||||||
ty::query::Providers::default(),
|
|
||||||
&mut arenas,
|
|
||||||
resolutions,
|
|
||||||
hir_map,
|
|
||||||
OnDiskCache::new_empty(sess.source_map()),
|
|
||||||
"test_crate",
|
|
||||||
tx,
|
|
||||||
&outputs,
|
|
||||||
|tcx| {
|
|
||||||
tcx.infer_ctxt().enter(|infcx| {
|
tcx.infer_ctxt().enter(|infcx| {
|
||||||
let mut region_scope_tree = region::ScopeTree::default();
|
let mut region_scope_tree = region::ScopeTree::default();
|
||||||
let param_env = ty::ParamEnv::empty();
|
let param_env = ty::ParamEnv::empty();
|
||||||
|
@ -189,8 +133,8 @@ fn test_env_with_pool<F>(
|
||||||
);
|
);
|
||||||
assert_eq!(tcx.sess.err_count(), expected_err_count);
|
assert_eq!(tcx.sess.err_count(), expected_err_count);
|
||||||
});
|
});
|
||||||
},
|
})
|
||||||
);
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
fn d1() -> ty::DebruijnIndex {
|
fn d1() -> ty::DebruijnIndex {
|
||||||
|
|
|
@ -21,7 +21,7 @@ mod persist;
|
||||||
|
|
||||||
pub use assert_dep_graph::assert_dep_graph;
|
pub use assert_dep_graph::assert_dep_graph;
|
||||||
pub use persist::dep_graph_tcx_init;
|
pub use persist::dep_graph_tcx_init;
|
||||||
pub use persist::load_dep_graph;
|
pub use persist::{DepGraphFuture, load_dep_graph};
|
||||||
pub use persist::load_query_result_cache;
|
pub use persist::load_query_result_cache;
|
||||||
pub use persist::LoadResult;
|
pub use persist::LoadResult;
|
||||||
pub use persist::copy_cgu_workproducts_to_incr_comp_cache_dir;
|
pub use persist::copy_cgu_workproducts_to_incr_comp_cache_dir;
|
||||||
|
|
|
@ -94,10 +94,10 @@ impl<T> MaybeAsync<T> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub type DepGraphFuture = MaybeAsync<LoadResult<(PreviousDepGraph, WorkProductMap)>>;
|
||||||
|
|
||||||
/// Launch a thread and load the dependency graph in the background.
|
/// Launch a thread and load the dependency graph in the background.
|
||||||
pub fn load_dep_graph(sess: &Session) ->
|
pub fn load_dep_graph(sess: &Session) -> DepGraphFuture {
|
||||||
MaybeAsync<LoadResult<(PreviousDepGraph, WorkProductMap)>>
|
|
||||||
{
|
|
||||||
// Since `sess` isn't `Sync`, we perform all accesses to `sess`
|
// Since `sess` isn't `Sync`, we perform all accesses to `sess`
|
||||||
// before we fire the background thread.
|
// before we fire the background thread.
|
||||||
|
|
||||||
|
|
|
@ -16,7 +16,7 @@ pub use fs::in_incr_comp_dir;
|
||||||
pub use fs::in_incr_comp_dir_sess;
|
pub use fs::in_incr_comp_dir_sess;
|
||||||
pub use fs::prepare_session_directory;
|
pub use fs::prepare_session_directory;
|
||||||
pub use load::dep_graph_tcx_init;
|
pub use load::dep_graph_tcx_init;
|
||||||
pub use load::load_dep_graph;
|
pub use load::{DepGraphFuture, load_dep_graph};
|
||||||
pub use load::load_query_result_cache;
|
pub use load::load_query_result_cache;
|
||||||
pub use load::LoadResult;
|
pub use load::LoadResult;
|
||||||
pub use save::save_dep_graph;
|
pub use save::save_dep_graph;
|
||||||
|
|
155
src/librustc_interface/interface.rs
Normal file
155
src/librustc_interface/interface.rs
Normal file
|
@ -0,0 +1,155 @@
|
||||||
|
use queries::Queries;
|
||||||
|
use rustc::lint;
|
||||||
|
use rustc::session::config::{self, Input};
|
||||||
|
use rustc::session::{DiagnosticOutput, Session};
|
||||||
|
use rustc::util::common::ErrorReported;
|
||||||
|
use rustc_codegen_utils::codegen_backend::CodegenBackend;
|
||||||
|
use rustc_data_structures::OnDrop;
|
||||||
|
use rustc_data_structures::sync::Lrc;
|
||||||
|
use rustc_data_structures::fx::{FxHashSet, FxHashMap};
|
||||||
|
use rustc_metadata::cstore::CStore;
|
||||||
|
use std::collections::HashSet;
|
||||||
|
use std::io::Write;
|
||||||
|
use std::path::PathBuf;
|
||||||
|
use std::result;
|
||||||
|
use std::sync::{Arc, Mutex};
|
||||||
|
use syntax;
|
||||||
|
use syntax::source_map::{FileLoader, SourceMap};
|
||||||
|
use util;
|
||||||
|
use profile;
|
||||||
|
|
||||||
|
pub use passes::BoxedResolver;
|
||||||
|
|
||||||
|
pub type Result<T> = result::Result<T, ErrorReported>;
|
||||||
|
|
||||||
|
/// Represents a compiler session.
|
||||||
|
/// Can be used run `rustc_interface` queries.
|
||||||
|
/// Created by passing `Config` to `run_compiler`.
|
||||||
|
pub struct Compiler {
|
||||||
|
pub(crate) sess: Lrc<Session>,
|
||||||
|
codegen_backend: Lrc<Box<dyn CodegenBackend>>,
|
||||||
|
source_map: Lrc<SourceMap>,
|
||||||
|
pub(crate) input: Input,
|
||||||
|
pub(crate) input_path: Option<PathBuf>,
|
||||||
|
pub(crate) output_dir: Option<PathBuf>,
|
||||||
|
pub(crate) output_file: Option<PathBuf>,
|
||||||
|
pub(crate) queries: Queries,
|
||||||
|
pub(crate) cstore: Lrc<CStore>,
|
||||||
|
pub(crate) crate_name: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Compiler {
|
||||||
|
pub fn session(&self) -> &Lrc<Session> {
|
||||||
|
&self.sess
|
||||||
|
}
|
||||||
|
pub fn codegen_backend(&self) -> &Lrc<Box<dyn CodegenBackend>> {
|
||||||
|
&self.codegen_backend
|
||||||
|
}
|
||||||
|
pub fn cstore(&self) -> &Lrc<CStore> {
|
||||||
|
&self.cstore
|
||||||
|
}
|
||||||
|
pub fn source_map(&self) -> &Lrc<SourceMap> {
|
||||||
|
&self.source_map
|
||||||
|
}
|
||||||
|
pub fn input(&self) -> &Input {
|
||||||
|
&self.input
|
||||||
|
}
|
||||||
|
pub fn output_dir(&self) -> &Option<PathBuf> {
|
||||||
|
&self.output_dir
|
||||||
|
}
|
||||||
|
pub fn output_file(&self) -> &Option<PathBuf> {
|
||||||
|
&self.output_file
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// The compiler configuration
|
||||||
|
pub struct Config {
|
||||||
|
/// Command line options
|
||||||
|
pub opts: config::Options,
|
||||||
|
|
||||||
|
/// cfg! configuration in addition to the default ones
|
||||||
|
pub crate_cfg: FxHashSet<(String, Option<String>)>,
|
||||||
|
|
||||||
|
pub input: Input,
|
||||||
|
pub input_path: Option<PathBuf>,
|
||||||
|
pub output_dir: Option<PathBuf>,
|
||||||
|
pub output_file: Option<PathBuf>,
|
||||||
|
pub file_loader: Option<Box<dyn FileLoader + Send + Sync>>,
|
||||||
|
pub diagnostic_output: DiagnosticOutput,
|
||||||
|
|
||||||
|
/// Set to capture stderr output during compiler execution
|
||||||
|
pub stderr: Option<Arc<Mutex<Vec<u8>>>>,
|
||||||
|
|
||||||
|
pub crate_name: Option<String>,
|
||||||
|
pub lint_caps: FxHashMap<lint::LintId, lint::Level>,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn run_compiler_in_existing_thread_pool<F, R>(config: Config, f: F) -> R
|
||||||
|
where
|
||||||
|
F: FnOnce(&Compiler) -> R,
|
||||||
|
{
|
||||||
|
let (sess, codegen_backend, source_map) = util::create_session(
|
||||||
|
config.opts,
|
||||||
|
config.crate_cfg,
|
||||||
|
config.diagnostic_output,
|
||||||
|
config.file_loader,
|
||||||
|
config.input_path.clone(),
|
||||||
|
config.lint_caps,
|
||||||
|
);
|
||||||
|
|
||||||
|
let cstore = Lrc::new(CStore::new(codegen_backend.metadata_loader()));
|
||||||
|
|
||||||
|
let compiler = Compiler {
|
||||||
|
sess,
|
||||||
|
codegen_backend,
|
||||||
|
source_map,
|
||||||
|
cstore,
|
||||||
|
input: config.input,
|
||||||
|
input_path: config.input_path,
|
||||||
|
output_dir: config.output_dir,
|
||||||
|
output_file: config.output_file,
|
||||||
|
queries: Default::default(),
|
||||||
|
crate_name: config.crate_name,
|
||||||
|
};
|
||||||
|
|
||||||
|
let _sess_abort_error = OnDrop(|| compiler.sess.diagnostic().print_error_count());
|
||||||
|
|
||||||
|
if compiler.sess.profile_queries() {
|
||||||
|
profile::begin(&compiler.sess);
|
||||||
|
}
|
||||||
|
|
||||||
|
let r = f(&compiler);
|
||||||
|
|
||||||
|
if compiler.sess.profile_queries() {
|
||||||
|
profile::dump(&compiler.sess, "profile_queries".to_string())
|
||||||
|
}
|
||||||
|
|
||||||
|
if compiler.sess.opts.debugging_opts.self_profile {
|
||||||
|
compiler.sess.profiler(|p| p.dump_raw_events(&compiler.sess.opts));
|
||||||
|
}
|
||||||
|
|
||||||
|
r
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn run_compiler<F, R>(mut config: Config, f: F) -> R
|
||||||
|
where
|
||||||
|
F: FnOnce(&Compiler) -> R + Send,
|
||||||
|
R: Send,
|
||||||
|
{
|
||||||
|
syntax::with_globals(move || {
|
||||||
|
let stderr = config.stderr.take();
|
||||||
|
util::spawn_thread_pool(
|
||||||
|
config.opts.debugging_opts.threads,
|
||||||
|
&stderr,
|
||||||
|
|| run_compiler_in_existing_thread_pool(config, f),
|
||||||
|
)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn default_thread_pool<F, R>(f: F) -> R
|
||||||
|
where
|
||||||
|
F: FnOnce() -> R + Send,
|
||||||
|
R: Send,
|
||||||
|
{
|
||||||
|
util::spawn_thread_pool(None, &None, f)
|
||||||
|
}
|
|
@ -3,6 +3,7 @@
|
||||||
#![feature(nll)]
|
#![feature(nll)]
|
||||||
#![feature(arbitrary_self_types)]
|
#![feature(arbitrary_self_types)]
|
||||||
#![feature(generator_trait)]
|
#![feature(generator_trait)]
|
||||||
|
#![feature(generators)]
|
||||||
#![cfg_attr(unix, feature(libc))]
|
#![cfg_attr(unix, feature(libc))]
|
||||||
|
|
||||||
#![allow(unused_imports)]
|
#![allow(unused_imports)]
|
||||||
|
@ -37,7 +38,11 @@ extern crate syntax;
|
||||||
extern crate syntax_pos;
|
extern crate syntax_pos;
|
||||||
extern crate syntax_ext;
|
extern crate syntax_ext;
|
||||||
|
|
||||||
pub mod passes;
|
pub mod interface;
|
||||||
pub mod profile;
|
mod passes;
|
||||||
|
mod queries;
|
||||||
pub mod util;
|
pub mod util;
|
||||||
pub mod proc_macro_decls;
|
mod proc_macro_decls;
|
||||||
|
mod profile;
|
||||||
|
|
||||||
|
pub use interface::{run_compiler, Config};
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
use interface::{Compiler, Result};
|
||||||
use util;
|
use util;
|
||||||
use proc_macro_decls;
|
use proc_macro_decls;
|
||||||
|
|
||||||
|
@ -8,7 +9,7 @@ use rustc::hir::def_id::{CrateNum, LOCAL_CRATE};
|
||||||
use rustc::lint;
|
use rustc::lint;
|
||||||
use rustc::middle::{self, reachable, resolve_lifetime, stability};
|
use rustc::middle::{self, reachable, resolve_lifetime, stability};
|
||||||
use rustc::middle::privacy::AccessLevels;
|
use rustc::middle::privacy::AccessLevels;
|
||||||
use rustc::ty::{self, AllArenas, Resolutions, TyCtxt};
|
use rustc::ty::{self, AllArenas, Resolutions, TyCtxt, GlobalCtxt};
|
||||||
use rustc::ty::steal::Steal;
|
use rustc::ty::steal::Steal;
|
||||||
use rustc::traits;
|
use rustc::traits;
|
||||||
use rustc::util::common::{time, ErrorReported};
|
use rustc::util::common::{time, ErrorReported};
|
||||||
|
@ -23,6 +24,7 @@ use rustc_data_structures::fingerprint::Fingerprint;
|
||||||
use rustc_data_structures::stable_hasher::StableHasher;
|
use rustc_data_structures::stable_hasher::StableHasher;
|
||||||
use rustc_data_structures::sync::{Lrc, ParallelIterator, par_iter};
|
use rustc_data_structures::sync::{Lrc, ParallelIterator, par_iter};
|
||||||
use rustc_incremental;
|
use rustc_incremental;
|
||||||
|
use rustc_incremental::DepGraphFuture;
|
||||||
use rustc_metadata::creader::CrateLoader;
|
use rustc_metadata::creader::CrateLoader;
|
||||||
use rustc_metadata::cstore::{self, CStore};
|
use rustc_metadata::cstore::{self, CStore};
|
||||||
use rustc_mir as mir;
|
use rustc_mir as mir;
|
||||||
|
@ -35,12 +37,13 @@ use rustc_traits;
|
||||||
use rustc_typeck as typeck;
|
use rustc_typeck as typeck;
|
||||||
use syntax::{self, ast, attr, diagnostics, visit};
|
use syntax::{self, ast, attr, diagnostics, visit};
|
||||||
use syntax::early_buffered_lints::BufferedEarlyLint;
|
use syntax::early_buffered_lints::BufferedEarlyLint;
|
||||||
use syntax::ext::base::ExtCtxt;
|
use syntax::ext::base::{NamedSyntaxExtension, ExtCtxt};
|
||||||
use syntax::mut_visit::MutVisitor;
|
use syntax::mut_visit::MutVisitor;
|
||||||
use syntax::parse::{self, PResult};
|
use syntax::parse::{self, PResult};
|
||||||
use syntax::util::node_count::NodeCounter;
|
use syntax::util::node_count::NodeCounter;
|
||||||
use syntax::util::lev_distance::find_best_match_for_name;
|
use syntax::util::lev_distance::find_best_match_for_name;
|
||||||
use syntax::symbol::Symbol;
|
use syntax::symbol::Symbol;
|
||||||
|
use syntax::feature_gate::AttributeType;
|
||||||
use syntax_pos::{FileName, hygiene};
|
use syntax_pos::{FileName, hygiene};
|
||||||
use syntax_ext;
|
use syntax_ext;
|
||||||
|
|
||||||
|
@ -59,8 +62,524 @@ use std::rc::Rc;
|
||||||
use std::mem;
|
use std::mem;
|
||||||
use std::ops::Generator;
|
use std::ops::Generator;
|
||||||
|
|
||||||
/// Returns all the paths that correspond to generated files.
|
pub fn parse<'a>(sess: &'a Session, input: &Input) -> PResult<'a, ast::Crate> {
|
||||||
pub fn generated_output_paths(
|
sess.diagnostic()
|
||||||
|
.set_continue_after_error(sess.opts.debugging_opts.continue_parse_after_error);
|
||||||
|
hygiene::set_default_edition(sess.edition());
|
||||||
|
|
||||||
|
sess.profiler(|p| p.start_activity(ProfileCategory::Parsing));
|
||||||
|
let krate = time(sess, "parsing", || match *input {
|
||||||
|
Input::File(ref file) => parse::parse_crate_from_file(file, &sess.parse_sess),
|
||||||
|
Input::Str {
|
||||||
|
ref input,
|
||||||
|
ref name,
|
||||||
|
} => parse::parse_crate_from_source_str(name.clone(), input.clone(), &sess.parse_sess),
|
||||||
|
})?;
|
||||||
|
sess.profiler(|p| p.end_activity(ProfileCategory::Parsing));
|
||||||
|
|
||||||
|
sess.diagnostic().set_continue_after_error(true);
|
||||||
|
|
||||||
|
if sess.opts.debugging_opts.ast_json_noexpand {
|
||||||
|
println!("{}", json::as_json(&krate));
|
||||||
|
}
|
||||||
|
|
||||||
|
if sess.opts.debugging_opts.input_stats {
|
||||||
|
println!(
|
||||||
|
"Lines of code: {}",
|
||||||
|
sess.source_map().count_lines()
|
||||||
|
);
|
||||||
|
println!("Pre-expansion node count: {}", count_nodes(&krate));
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(ref s) = sess.opts.debugging_opts.show_span {
|
||||||
|
syntax::show_span::run(sess.diagnostic(), s, &krate);
|
||||||
|
}
|
||||||
|
|
||||||
|
if sess.opts.debugging_opts.hir_stats {
|
||||||
|
hir_stats::print_ast_stats(&krate, "PRE EXPANSION AST STATS");
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(krate)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn count_nodes(krate: &ast::Crate) -> usize {
|
||||||
|
let mut counter = NodeCounter::new();
|
||||||
|
visit::walk_crate(&mut counter, krate);
|
||||||
|
counter.count
|
||||||
|
}
|
||||||
|
|
||||||
|
declare_box_region_type!(
|
||||||
|
pub BoxedResolver,
|
||||||
|
for(),
|
||||||
|
(&mut Resolver<'_>) -> (Result<ast::Crate>, ExpansionResult)
|
||||||
|
);
|
||||||
|
|
||||||
|
/// Runs the "early phases" of the compiler: initial `cfg` processing,
|
||||||
|
/// loading compiler plugins (including those from `addl_plugins`),
|
||||||
|
/// syntax expansion, secondary `cfg` expansion, synthesis of a test
|
||||||
|
/// harness if one is to be provided, injection of a dependency on the
|
||||||
|
/// standard library and prelude, and name resolution.
|
||||||
|
///
|
||||||
|
/// Returns `None` if we're aborting after handling -W help.
|
||||||
|
pub fn configure_and_expand(
|
||||||
|
sess: Lrc<Session>,
|
||||||
|
cstore: Lrc<CStore>,
|
||||||
|
krate: ast::Crate,
|
||||||
|
crate_name: &str,
|
||||||
|
plugin_info: PluginInfo,
|
||||||
|
) -> Result<(ast::Crate, BoxedResolver)> {
|
||||||
|
// Currently, we ignore the name resolution data structures for the purposes of dependency
|
||||||
|
// tracking. Instead we will run name resolution and include its output in the hash of each
|
||||||
|
// item, much like we do for macro expansion. In other words, the hash reflects not just
|
||||||
|
// its contents but the results of name resolution on those contents. Hopefully we'll push
|
||||||
|
// this back at some point.
|
||||||
|
let crate_name = crate_name.to_string();
|
||||||
|
let (result, resolver) = BoxedResolver::new(static move || {
|
||||||
|
let sess = &*sess;
|
||||||
|
let mut crate_loader = CrateLoader::new(sess, &*cstore, &crate_name);
|
||||||
|
let resolver_arenas = Resolver::arenas();
|
||||||
|
let res = configure_and_expand_inner(
|
||||||
|
sess,
|
||||||
|
&*cstore,
|
||||||
|
krate,
|
||||||
|
&crate_name,
|
||||||
|
&resolver_arenas,
|
||||||
|
&mut crate_loader,
|
||||||
|
plugin_info,
|
||||||
|
);
|
||||||
|
let mut resolver = match res {
|
||||||
|
Err(v) => {
|
||||||
|
yield BoxedResolver::initial_yield(Err(v));
|
||||||
|
panic!()
|
||||||
|
}
|
||||||
|
Ok((krate, resolver)) => {
|
||||||
|
yield BoxedResolver::initial_yield(Ok(krate));
|
||||||
|
resolver
|
||||||
|
}
|
||||||
|
};
|
||||||
|
box_region_allow_access!(for(), (&mut Resolver<'_>), (&mut resolver));
|
||||||
|
ExpansionResult::from_owned_resolver(resolver)
|
||||||
|
});
|
||||||
|
result.map(|k| (k, resolver))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct ExpansionResult {
|
||||||
|
pub defs: Steal<hir::map::Definitions>,
|
||||||
|
pub resolutions: Steal<Resolutions>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ExpansionResult {
|
||||||
|
fn from_owned_resolver(
|
||||||
|
resolver: Resolver<'_>,
|
||||||
|
) -> Self {
|
||||||
|
ExpansionResult {
|
||||||
|
defs: Steal::new(resolver.definitions),
|
||||||
|
resolutions: Steal::new(Resolutions {
|
||||||
|
freevars: resolver.freevars,
|
||||||
|
export_map: resolver.export_map,
|
||||||
|
trait_map: resolver.trait_map,
|
||||||
|
glob_map: resolver.glob_map,
|
||||||
|
maybe_unused_trait_imports: resolver.maybe_unused_trait_imports,
|
||||||
|
maybe_unused_extern_crates: resolver.maybe_unused_extern_crates,
|
||||||
|
extern_prelude: resolver.extern_prelude.iter().map(|(ident, entry)| {
|
||||||
|
(ident.name, entry.introduced_by_item)
|
||||||
|
}).collect(),
|
||||||
|
}),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn from_resolver_ref(
|
||||||
|
resolver: &Resolver<'_>,
|
||||||
|
) -> Self {
|
||||||
|
ExpansionResult {
|
||||||
|
defs: Steal::new(resolver.definitions.clone()),
|
||||||
|
resolutions: Steal::new(Resolutions {
|
||||||
|
freevars: resolver.freevars.clone(),
|
||||||
|
export_map: resolver.export_map.clone(),
|
||||||
|
trait_map: resolver.trait_map.clone(),
|
||||||
|
glob_map: resolver.glob_map.clone(),
|
||||||
|
maybe_unused_trait_imports: resolver.maybe_unused_trait_imports.clone(),
|
||||||
|
maybe_unused_extern_crates: resolver.maybe_unused_extern_crates.clone(),
|
||||||
|
extern_prelude: resolver.extern_prelude.iter().map(|(ident, entry)| {
|
||||||
|
(ident.name, entry.introduced_by_item)
|
||||||
|
}).collect(),
|
||||||
|
}),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl BoxedResolver {
|
||||||
|
pub fn to_expansion_result(
|
||||||
|
mut resolver: Rc<Option<RefCell<BoxedResolver>>>,
|
||||||
|
) -> ExpansionResult {
|
||||||
|
if let Some(resolver) = Rc::get_mut(&mut resolver) {
|
||||||
|
mem::replace(resolver, None).unwrap().into_inner().complete()
|
||||||
|
} else {
|
||||||
|
let resolver = &*resolver;
|
||||||
|
resolver.as_ref().unwrap().borrow_mut().access(|resolver| {
|
||||||
|
ExpansionResult::from_resolver_ref(resolver)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct PluginInfo {
|
||||||
|
syntax_exts: Vec<NamedSyntaxExtension>,
|
||||||
|
attributes: Vec<(String, AttributeType)>,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn register_plugins<'a>(
|
||||||
|
compiler: &Compiler,
|
||||||
|
sess: &'a Session,
|
||||||
|
cstore: &'a CStore,
|
||||||
|
mut krate: ast::Crate,
|
||||||
|
crate_name: &str,
|
||||||
|
) -> Result<(ast::Crate, PluginInfo)> {
|
||||||
|
krate = time(sess, "attributes injection", || {
|
||||||
|
syntax::attr::inject(krate, &sess.parse_sess, &sess.opts.debugging_opts.crate_attr)
|
||||||
|
});
|
||||||
|
|
||||||
|
let (mut krate, features) = syntax::config::features(
|
||||||
|
krate,
|
||||||
|
&sess.parse_sess,
|
||||||
|
sess.edition(),
|
||||||
|
);
|
||||||
|
// these need to be set "early" so that expansion sees `quote` if enabled.
|
||||||
|
sess.init_features(features);
|
||||||
|
|
||||||
|
let crate_types = util::collect_crate_types(sess, &krate.attrs);
|
||||||
|
sess.crate_types.set(crate_types);
|
||||||
|
|
||||||
|
let disambiguator = util::compute_crate_disambiguator(sess);
|
||||||
|
sess.crate_disambiguator.set(disambiguator);
|
||||||
|
rustc_incremental::prepare_session_directory(sess, &crate_name, disambiguator);
|
||||||
|
|
||||||
|
if sess.opts.incremental.is_some() {
|
||||||
|
time(sess, "garbage collect incremental cache directory", || {
|
||||||
|
if let Err(e) = rustc_incremental::garbage_collect_session_directories(sess) {
|
||||||
|
warn!(
|
||||||
|
"Error while trying to garbage collect incremental \
|
||||||
|
compilation cache directory: {}",
|
||||||
|
e
|
||||||
|
);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// If necessary, compute the dependency graph (in the background).
|
||||||
|
compiler.dep_graph_future().ok();
|
||||||
|
|
||||||
|
time(sess, "recursion limit", || {
|
||||||
|
middle::recursion_limit::update_limits(sess, &krate);
|
||||||
|
});
|
||||||
|
|
||||||
|
krate = time(sess, "crate injection", || {
|
||||||
|
let alt_std_name = sess.opts.alt_std_name.as_ref().map(|s| &**s);
|
||||||
|
syntax::std_inject::maybe_inject_crates_ref(krate, alt_std_name, sess.edition())
|
||||||
|
});
|
||||||
|
|
||||||
|
let registrars = time(sess, "plugin loading", || {
|
||||||
|
plugin::load::load_plugins(
|
||||||
|
sess,
|
||||||
|
&cstore,
|
||||||
|
&krate,
|
||||||
|
crate_name,
|
||||||
|
Some(sess.opts.debugging_opts.extra_plugins.clone()),
|
||||||
|
)
|
||||||
|
});
|
||||||
|
|
||||||
|
let mut registry = Registry::new(sess, krate.span);
|
||||||
|
|
||||||
|
time(sess, "plugin registration", || {
|
||||||
|
if sess.features_untracked().rustc_diagnostic_macros {
|
||||||
|
registry.register_macro(
|
||||||
|
"__diagnostic_used",
|
||||||
|
diagnostics::plugin::expand_diagnostic_used,
|
||||||
|
);
|
||||||
|
registry.register_macro(
|
||||||
|
"__register_diagnostic",
|
||||||
|
diagnostics::plugin::expand_register_diagnostic,
|
||||||
|
);
|
||||||
|
registry.register_macro(
|
||||||
|
"__build_diagnostic_array",
|
||||||
|
diagnostics::plugin::expand_build_diagnostic_array,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
for registrar in registrars {
|
||||||
|
registry.args_hidden = Some(registrar.args);
|
||||||
|
(registrar.fun)(&mut registry);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
let Registry {
|
||||||
|
syntax_exts,
|
||||||
|
early_lint_passes,
|
||||||
|
late_lint_passes,
|
||||||
|
lint_groups,
|
||||||
|
llvm_passes,
|
||||||
|
attributes,
|
||||||
|
..
|
||||||
|
} = registry;
|
||||||
|
|
||||||
|
sess.track_errors(|| {
|
||||||
|
let mut ls = sess.lint_store.borrow_mut();
|
||||||
|
for pass in early_lint_passes {
|
||||||
|
ls.register_early_pass(Some(sess), true, false, pass);
|
||||||
|
}
|
||||||
|
for pass in late_lint_passes {
|
||||||
|
ls.register_late_pass(Some(sess), true, pass);
|
||||||
|
}
|
||||||
|
|
||||||
|
for (name, (to, deprecated_name)) in lint_groups {
|
||||||
|
ls.register_group(Some(sess), true, name, deprecated_name, to);
|
||||||
|
}
|
||||||
|
|
||||||
|
*sess.plugin_llvm_passes.borrow_mut() = llvm_passes;
|
||||||
|
*sess.plugin_attributes.borrow_mut() = attributes.clone();
|
||||||
|
})?;
|
||||||
|
|
||||||
|
Ok((krate, PluginInfo {
|
||||||
|
syntax_exts,
|
||||||
|
attributes,
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn configure_and_expand_inner<'a>(
|
||||||
|
sess: &'a Session,
|
||||||
|
cstore: &'a CStore,
|
||||||
|
mut krate: ast::Crate,
|
||||||
|
crate_name: &str,
|
||||||
|
resolver_arenas: &'a ResolverArenas<'a>,
|
||||||
|
crate_loader: &'a mut CrateLoader<'a>,
|
||||||
|
plugin_info: PluginInfo,
|
||||||
|
) -> Result<(ast::Crate, Resolver<'a>)> {
|
||||||
|
let attributes = plugin_info.attributes;
|
||||||
|
time(sess, "pre ast expansion lint checks", || {
|
||||||
|
lint::check_ast_crate(
|
||||||
|
sess,
|
||||||
|
&krate,
|
||||||
|
true,
|
||||||
|
rustc_lint::BuiltinCombinedPreExpansionLintPass::new());
|
||||||
|
});
|
||||||
|
|
||||||
|
let mut resolver = Resolver::new(
|
||||||
|
sess,
|
||||||
|
cstore,
|
||||||
|
&krate,
|
||||||
|
crate_name,
|
||||||
|
crate_loader,
|
||||||
|
&resolver_arenas,
|
||||||
|
);
|
||||||
|
syntax_ext::register_builtins(&mut resolver, plugin_info.syntax_exts);
|
||||||
|
|
||||||
|
// Expand all macros
|
||||||
|
sess.profiler(|p| p.start_activity(ProfileCategory::Expansion));
|
||||||
|
krate = time(sess, "expansion", || {
|
||||||
|
// Windows dlls do not have rpaths, so they don't know how to find their
|
||||||
|
// dependencies. It's up to us to tell the system where to find all the
|
||||||
|
// dependent dlls. Note that this uses cfg!(windows) as opposed to
|
||||||
|
// targ_cfg because syntax extensions are always loaded for the host
|
||||||
|
// compiler, not for the target.
|
||||||
|
//
|
||||||
|
// This is somewhat of an inherently racy operation, however, as
|
||||||
|
// multiple threads calling this function could possibly continue
|
||||||
|
// extending PATH far beyond what it should. To solve this for now we
|
||||||
|
// just don't add any new elements to PATH which are already there
|
||||||
|
// within PATH. This is basically a targeted fix at #17360 for rustdoc
|
||||||
|
// which runs rustc in parallel but has been seen (#33844) to cause
|
||||||
|
// problems with PATH becoming too long.
|
||||||
|
let mut old_path = OsString::new();
|
||||||
|
if cfg!(windows) {
|
||||||
|
old_path = env::var_os("PATH").unwrap_or(old_path);
|
||||||
|
let mut new_path = sess.host_filesearch(PathKind::All).search_path_dirs();
|
||||||
|
for path in env::split_paths(&old_path) {
|
||||||
|
if !new_path.contains(&path) {
|
||||||
|
new_path.push(path);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
env::set_var(
|
||||||
|
"PATH",
|
||||||
|
&env::join_paths(
|
||||||
|
new_path
|
||||||
|
.iter()
|
||||||
|
.filter(|p| env::join_paths(iter::once(p)).is_ok()),
|
||||||
|
).unwrap(),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create the config for macro expansion
|
||||||
|
let features = sess.features_untracked();
|
||||||
|
let cfg = syntax::ext::expand::ExpansionConfig {
|
||||||
|
features: Some(&features),
|
||||||
|
recursion_limit: *sess.recursion_limit.get(),
|
||||||
|
trace_mac: sess.opts.debugging_opts.trace_macros,
|
||||||
|
should_test: sess.opts.test,
|
||||||
|
..syntax::ext::expand::ExpansionConfig::default(crate_name.to_string())
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut ecx = ExtCtxt::new(&sess.parse_sess, cfg, &mut resolver);
|
||||||
|
|
||||||
|
// Expand macros now!
|
||||||
|
let krate = time(sess, "expand crate", || {
|
||||||
|
ecx.monotonic_expander().expand_crate(krate)
|
||||||
|
});
|
||||||
|
|
||||||
|
// The rest is error reporting
|
||||||
|
|
||||||
|
time(sess, "check unused macros", || {
|
||||||
|
ecx.check_unused_macros();
|
||||||
|
});
|
||||||
|
|
||||||
|
let mut missing_fragment_specifiers: Vec<_> = ecx.parse_sess
|
||||||
|
.missing_fragment_specifiers
|
||||||
|
.borrow()
|
||||||
|
.iter()
|
||||||
|
.cloned()
|
||||||
|
.collect();
|
||||||
|
missing_fragment_specifiers.sort();
|
||||||
|
|
||||||
|
for span in missing_fragment_specifiers {
|
||||||
|
let lint = lint::builtin::MISSING_FRAGMENT_SPECIFIER;
|
||||||
|
let msg = "missing fragment specifier";
|
||||||
|
sess.buffer_lint(lint, ast::CRATE_NODE_ID, span, msg);
|
||||||
|
}
|
||||||
|
if cfg!(windows) {
|
||||||
|
env::set_var("PATH", &old_path);
|
||||||
|
}
|
||||||
|
krate
|
||||||
|
});
|
||||||
|
sess.profiler(|p| p.end_activity(ProfileCategory::Expansion));
|
||||||
|
|
||||||
|
time(sess, "maybe building test harness", || {
|
||||||
|
syntax::test::modify_for_testing(
|
||||||
|
&sess.parse_sess,
|
||||||
|
&mut resolver,
|
||||||
|
sess.opts.test,
|
||||||
|
&mut krate,
|
||||||
|
sess.diagnostic(),
|
||||||
|
&sess.features_untracked(),
|
||||||
|
)
|
||||||
|
});
|
||||||
|
|
||||||
|
// If we're actually rustdoc then there's no need to actually compile
|
||||||
|
// anything, so switch everything to just looping
|
||||||
|
if sess.opts.actually_rustdoc {
|
||||||
|
util::ReplaceBodyWithLoop::new(sess).visit_crate(&mut krate);
|
||||||
|
}
|
||||||
|
|
||||||
|
let (has_proc_macro_decls, has_global_allocator) = time(sess, "AST validation", || {
|
||||||
|
ast_validation::check_crate(sess, &krate)
|
||||||
|
});
|
||||||
|
|
||||||
|
// If we're in rustdoc we're always compiling as an rlib, but that'll trip a
|
||||||
|
// bunch of checks in the `modify` function below. For now just skip this
|
||||||
|
// step entirely if we're rustdoc as it's not too useful anyway.
|
||||||
|
if !sess.opts.actually_rustdoc {
|
||||||
|
krate = time(sess, "maybe creating a macro crate", || {
|
||||||
|
let crate_types = sess.crate_types.borrow();
|
||||||
|
let num_crate_types = crate_types.len();
|
||||||
|
let is_proc_macro_crate = crate_types.contains(&config::CrateType::ProcMacro);
|
||||||
|
let is_test_crate = sess.opts.test;
|
||||||
|
syntax_ext::proc_macro_decls::modify(
|
||||||
|
&sess.parse_sess,
|
||||||
|
&mut resolver,
|
||||||
|
krate,
|
||||||
|
is_proc_macro_crate,
|
||||||
|
has_proc_macro_decls,
|
||||||
|
is_test_crate,
|
||||||
|
num_crate_types,
|
||||||
|
sess.diagnostic(),
|
||||||
|
)
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
if has_global_allocator {
|
||||||
|
// Expand global allocators, which are treated as an in-tree proc macro
|
||||||
|
time(sess, "creating allocators", || {
|
||||||
|
allocator::expand::modify(
|
||||||
|
&sess.parse_sess,
|
||||||
|
&mut resolver,
|
||||||
|
&mut krate,
|
||||||
|
crate_name.to_string(),
|
||||||
|
sess.diagnostic(),
|
||||||
|
)
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Done with macro expansion!
|
||||||
|
|
||||||
|
if sess.opts.debugging_opts.input_stats {
|
||||||
|
println!("Post-expansion node count: {}", count_nodes(&krate));
|
||||||
|
}
|
||||||
|
|
||||||
|
if sess.opts.debugging_opts.hir_stats {
|
||||||
|
hir_stats::print_ast_stats(&krate, "POST EXPANSION AST STATS");
|
||||||
|
}
|
||||||
|
|
||||||
|
if sess.opts.debugging_opts.ast_json {
|
||||||
|
println!("{}", json::as_json(&krate));
|
||||||
|
}
|
||||||
|
|
||||||
|
time(sess, "name resolution", || {
|
||||||
|
resolver.resolve_crate(&krate);
|
||||||
|
});
|
||||||
|
|
||||||
|
// Needs to go *after* expansion to be able to check the results of macro expansion.
|
||||||
|
time(sess, "complete gated feature checking", || {
|
||||||
|
syntax::feature_gate::check_crate(
|
||||||
|
&krate,
|
||||||
|
&sess.parse_sess,
|
||||||
|
&sess.features_untracked(),
|
||||||
|
&attributes,
|
||||||
|
sess.opts.unstable_features,
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
// Add all buffered lints from the `ParseSess` to the `Session`.
|
||||||
|
sess.parse_sess.buffered_lints.with_lock(|buffered_lints| {
|
||||||
|
info!("{} parse sess buffered_lints", buffered_lints.len());
|
||||||
|
for BufferedEarlyLint{id, span, msg, lint_id} in buffered_lints.drain(..) {
|
||||||
|
let lint = lint::Lint::from_parser_lint_id(lint_id);
|
||||||
|
sess.buffer_lint(lint, id, span, &msg);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
Ok((krate, resolver))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn lower_to_hir(
|
||||||
|
sess: &Session,
|
||||||
|
cstore: &CStore,
|
||||||
|
resolver: &mut Resolver<'_>,
|
||||||
|
dep_graph: &DepGraph,
|
||||||
|
krate: &ast::Crate,
|
||||||
|
) -> Result<hir::map::Forest> {
|
||||||
|
// Lower ast -> hir
|
||||||
|
let hir_forest = time(sess, "lowering ast -> hir", || {
|
||||||
|
let hir_crate = lower_crate(sess, cstore, &dep_graph, &krate, resolver);
|
||||||
|
|
||||||
|
if sess.opts.debugging_opts.hir_stats {
|
||||||
|
hir_stats::print_hir_stats(&hir_crate);
|
||||||
|
}
|
||||||
|
|
||||||
|
hir::map::Forest::new(hir_crate, &dep_graph)
|
||||||
|
});
|
||||||
|
|
||||||
|
time(sess, "early lint checks", || {
|
||||||
|
lint::check_ast_crate(sess, &krate, false, rustc_lint::BuiltinCombinedEarlyLintPass::new())
|
||||||
|
});
|
||||||
|
|
||||||
|
// Discard hygiene data, which isn't required after lowering to HIR.
|
||||||
|
if !sess.opts.debugging_opts.keep_hygiene_data {
|
||||||
|
syntax::ext::hygiene::clear_markings();
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(hir_forest)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Returns all the paths that correspond to generated files.
|
||||||
|
fn generated_output_paths(
|
||||||
sess: &Session,
|
sess: &Session,
|
||||||
outputs: &OutputFilenames,
|
outputs: &OutputFilenames,
|
||||||
exact_name: bool,
|
exact_name: bool,
|
||||||
|
@ -106,7 +625,7 @@ where
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn output_contains_path(output_paths: &[PathBuf], input_path: &PathBuf) -> bool {
|
fn output_contains_path(output_paths: &[PathBuf], input_path: &PathBuf) -> bool {
|
||||||
let input_path = input_path.canonicalize().ok();
|
let input_path = input_path.canonicalize().ok();
|
||||||
if input_path.is_none() {
|
if input_path.is_none() {
|
||||||
return false;
|
return false;
|
||||||
|
@ -121,7 +640,7 @@ pub fn output_contains_path(output_paths: &[PathBuf], input_path: &PathBuf) -> b
|
||||||
check_output(output_paths, check).is_some()
|
check_output(output_paths, check).is_some()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn output_conflicts_with_dir(output_paths: &[PathBuf]) -> Option<PathBuf> {
|
fn output_conflicts_with_dir(output_paths: &[PathBuf]) -> Option<PathBuf> {
|
||||||
let check = |output_path: &PathBuf| {
|
let check = |output_path: &PathBuf| {
|
||||||
if output_path.is_dir() {
|
if output_path.is_dir() {
|
||||||
Some(output_path.clone())
|
Some(output_path.clone())
|
||||||
|
@ -138,7 +657,7 @@ fn escape_dep_filename(filename: &FileName) -> String {
|
||||||
filename.to_string().replace(" ", "\\ ")
|
filename.to_string().replace(" ", "\\ ")
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn write_out_deps(sess: &Session, outputs: &OutputFilenames, out_filenames: &[PathBuf]) {
|
fn write_out_deps(sess: &Session, outputs: &OutputFilenames, out_filenames: &[PathBuf]) {
|
||||||
// Write out dependency rules to the dep-info file if requested
|
// Write out dependency rules to the dep-info file if requested
|
||||||
if !sess.opts.output_types.contains_key(&OutputType::DepInfo) {
|
if !sess.opts.output_types.contains_key(&OutputType::DepInfo) {
|
||||||
return;
|
return;
|
||||||
|
@ -178,15 +697,192 @@ pub fn write_out_deps(sess: &Session, outputs: &OutputFilenames, out_filenames:
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn provide(providers: &mut ty::query::Providers) {
|
pub fn prepare_outputs(
|
||||||
providers.analysis = analysis;
|
sess: &Session,
|
||||||
proc_macro_decls::provide(providers);
|
compiler: &Compiler,
|
||||||
|
krate: &ast::Crate,
|
||||||
|
crate_name: &str
|
||||||
|
) -> Result<OutputFilenames> {
|
||||||
|
// FIXME: rustdoc passes &[] instead of &krate.attrs here
|
||||||
|
let outputs = util::build_output_filenames(
|
||||||
|
&compiler.input,
|
||||||
|
&compiler.output_dir,
|
||||||
|
&compiler.output_file,
|
||||||
|
&krate.attrs,
|
||||||
|
sess
|
||||||
|
);
|
||||||
|
|
||||||
|
let output_paths = generated_output_paths(
|
||||||
|
sess,
|
||||||
|
&outputs,
|
||||||
|
compiler.output_file.is_some(),
|
||||||
|
&crate_name,
|
||||||
|
);
|
||||||
|
|
||||||
|
// Ensure the source file isn't accidentally overwritten during compilation.
|
||||||
|
if let Some(ref input_path) = compiler.input_path {
|
||||||
|
if sess.opts.will_create_output_file() {
|
||||||
|
if output_contains_path(&output_paths, input_path) {
|
||||||
|
sess.err(&format!(
|
||||||
|
"the input file \"{}\" would be overwritten by the generated \
|
||||||
|
executable",
|
||||||
|
input_path.display()
|
||||||
|
));
|
||||||
|
return Err(ErrorReported);
|
||||||
|
}
|
||||||
|
if let Some(dir_path) = output_conflicts_with_dir(&output_paths) {
|
||||||
|
sess.err(&format!(
|
||||||
|
"the generated executable for the input file \"{}\" conflicts with the \
|
||||||
|
existing directory \"{}\"",
|
||||||
|
input_path.display(),
|
||||||
|
dir_path.display()
|
||||||
|
));
|
||||||
|
return Err(ErrorReported);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
write_out_deps(sess, &outputs, &output_paths);
|
||||||
|
|
||||||
|
let only_dep_info = sess.opts.output_types.contains_key(&OutputType::DepInfo)
|
||||||
|
&& sess.opts.output_types.len() == 1;
|
||||||
|
|
||||||
|
if !only_dep_info {
|
||||||
|
if let Some(ref dir) = compiler.output_dir {
|
||||||
|
if fs::create_dir_all(dir).is_err() {
|
||||||
|
sess.err("failed to find or create the directory specified by --out-dir");
|
||||||
|
return Err(ErrorReported);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(outputs)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn default_provide(providers: &mut ty::query::Providers) {
|
||||||
|
providers.analysis = analysis;
|
||||||
|
proc_macro_decls::provide(providers);
|
||||||
|
plugin::build::provide(providers);
|
||||||
|
hir::provide(providers);
|
||||||
|
borrowck::provide(providers);
|
||||||
|
mir::provide(providers);
|
||||||
|
reachable::provide(providers);
|
||||||
|
resolve_lifetime::provide(providers);
|
||||||
|
rustc_privacy::provide(providers);
|
||||||
|
typeck::provide(providers);
|
||||||
|
ty::provide(providers);
|
||||||
|
traits::provide(providers);
|
||||||
|
stability::provide(providers);
|
||||||
|
middle::intrinsicck::provide(providers);
|
||||||
|
middle::liveness::provide(providers);
|
||||||
|
reachable::provide(providers);
|
||||||
|
rustc_passes::provide(providers);
|
||||||
|
rustc_traits::provide(providers);
|
||||||
|
middle::region::provide(providers);
|
||||||
|
middle::entry::provide(providers);
|
||||||
|
cstore::provide(providers);
|
||||||
|
lint::provide(providers);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn default_provide_extern(providers: &mut ty::query::Providers) {
|
||||||
|
cstore::provide_extern(providers);
|
||||||
|
}
|
||||||
|
|
||||||
|
declare_box_region_type!(
|
||||||
|
pub BoxedGlobalCtxt,
|
||||||
|
for('gcx),
|
||||||
|
(&'gcx GlobalCtxt<'gcx>) -> ((), ())
|
||||||
|
);
|
||||||
|
|
||||||
|
impl BoxedGlobalCtxt {
|
||||||
|
pub fn enter<F, R>(&mut self, f: F) -> R
|
||||||
|
where
|
||||||
|
F: for<'tcx> FnOnce(TyCtxt<'tcx, 'tcx, 'tcx>) -> R
|
||||||
|
{
|
||||||
|
self.access(|gcx| ty::tls::enter_global(gcx, |tcx| f(tcx)))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn create_global_ctxt(
|
||||||
|
compiler: &Compiler,
|
||||||
|
mut hir_forest: hir::map::Forest,
|
||||||
|
defs: hir::map::Definitions,
|
||||||
|
resolutions: Resolutions,
|
||||||
|
outputs: OutputFilenames,
|
||||||
|
tx: mpsc::Sender<Box<dyn Any + Send>>,
|
||||||
|
crate_name: &str
|
||||||
|
) -> BoxedGlobalCtxt {
|
||||||
|
let sess = compiler.session().clone();
|
||||||
|
let cstore = compiler.cstore.clone();
|
||||||
|
let codegen_backend = compiler.codegen_backend().clone();
|
||||||
|
let crate_name = crate_name.to_string();
|
||||||
|
|
||||||
|
let ((), result) = BoxedGlobalCtxt::new(static move || {
|
||||||
|
let sess = &*sess;
|
||||||
|
let cstore = &*cstore;
|
||||||
|
|
||||||
|
let global_ctxt: Option<GlobalCtxt<'_>>;
|
||||||
|
let arenas = AllArenas::new();
|
||||||
|
|
||||||
|
// Construct the HIR map
|
||||||
|
let hir_map = time(sess, "indexing hir", || {
|
||||||
|
hir::map::map_crate(sess, cstore, &mut hir_forest, &defs)
|
||||||
|
});
|
||||||
|
|
||||||
|
let query_result_on_disk_cache = time(sess, "load query result cache", || {
|
||||||
|
rustc_incremental::load_query_result_cache(sess)
|
||||||
|
});
|
||||||
|
|
||||||
|
let mut local_providers = ty::query::Providers::default();
|
||||||
|
default_provide(&mut local_providers);
|
||||||
|
codegen_backend.provide(&mut local_providers);
|
||||||
|
|
||||||
|
let mut extern_providers = local_providers;
|
||||||
|
default_provide_extern(&mut extern_providers);
|
||||||
|
codegen_backend.provide_extern(&mut extern_providers);
|
||||||
|
|
||||||
|
let gcx = TyCtxt::create_global_ctxt(
|
||||||
|
sess,
|
||||||
|
cstore,
|
||||||
|
local_providers,
|
||||||
|
extern_providers,
|
||||||
|
&arenas,
|
||||||
|
resolutions,
|
||||||
|
hir_map,
|
||||||
|
query_result_on_disk_cache,
|
||||||
|
&crate_name,
|
||||||
|
tx,
|
||||||
|
&outputs
|
||||||
|
);
|
||||||
|
|
||||||
|
global_ctxt = Some(gcx);
|
||||||
|
let gcx = global_ctxt.as_ref().unwrap();
|
||||||
|
|
||||||
|
ty::tls::enter_global(gcx, |tcx| {
|
||||||
|
// Do some initialization of the DepGraph that can only be done with the
|
||||||
|
// tcx available.
|
||||||
|
time(tcx.sess, "dep graph tcx init", || rustc_incremental::dep_graph_tcx_init(tcx));
|
||||||
|
});
|
||||||
|
|
||||||
|
yield BoxedGlobalCtxt::initial_yield(());
|
||||||
|
box_region_allow_access!(for('gcx), (&'gcx GlobalCtxt<'gcx>), (gcx));
|
||||||
|
|
||||||
|
gcx.queries.record_computed_queries(sess);
|
||||||
|
|
||||||
|
if sess.opts.debugging_opts.query_stats {
|
||||||
|
gcx.queries.print_stats();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
result
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Runs the resolution, type-checking, region checking and other
|
||||||
|
/// miscellaneous analysis passes on the crate.
|
||||||
fn analysis<'tcx>(
|
fn analysis<'tcx>(
|
||||||
tcx: TyCtxt<'_, 'tcx, 'tcx>,
|
tcx: TyCtxt<'_, 'tcx, 'tcx>,
|
||||||
cnum: CrateNum,
|
cnum: CrateNum,
|
||||||
) -> Result<(), ErrorReported> {
|
) -> Result<()> {
|
||||||
assert_eq!(cnum, LOCAL_CRATE);
|
assert_eq!(cnum, LOCAL_CRATE);
|
||||||
|
|
||||||
let sess = tcx.sess;
|
let sess = tcx.sess;
|
||||||
|
@ -249,9 +945,9 @@ fn analysis<'tcx>(
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
time(sess,
|
time(sess, "MIR borrow checking", || {
|
||||||
"MIR borrow checking",
|
tcx.par_body_owners(|def_id| tcx.ensure().mir_borrowck(def_id));
|
||||||
|| tcx.par_body_owners(|def_id| { tcx.ensure().mir_borrowck(def_id); }));
|
});
|
||||||
|
|
||||||
time(sess, "dumping chalk-like clauses", || {
|
time(sess, "dumping chalk-like clauses", || {
|
||||||
rustc_traits::lowering::dump_program_clauses(tcx);
|
rustc_traits::lowering::dump_program_clauses(tcx);
|
||||||
|
@ -304,3 +1000,39 @@ fn analysis<'tcx>(
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Runs the codegen backend, after which the AST and analysis can
|
||||||
|
/// be discarded.
|
||||||
|
pub fn start_codegen<'tcx>(
|
||||||
|
codegen_backend: &dyn CodegenBackend,
|
||||||
|
tcx: TyCtxt<'_, 'tcx, 'tcx>,
|
||||||
|
rx: mpsc::Receiver<Box<dyn Any + Send>>,
|
||||||
|
outputs: &OutputFilenames,
|
||||||
|
) -> Box<dyn Any> {
|
||||||
|
if log_enabled!(::log::Level::Info) {
|
||||||
|
println!("Pre-codegen");
|
||||||
|
tcx.print_debug_stats();
|
||||||
|
}
|
||||||
|
|
||||||
|
time(tcx.sess, "resolving dependency formats", || {
|
||||||
|
::rustc::middle::dependency_format::calculate(tcx)
|
||||||
|
});
|
||||||
|
|
||||||
|
tcx.sess.profiler(|p| p.start_activity(ProfileCategory::Codegen));
|
||||||
|
let codegen = time(tcx.sess, "codegen", move || codegen_backend.codegen_crate(tcx, rx));
|
||||||
|
tcx.sess.profiler(|p| p.end_activity(ProfileCategory::Codegen));
|
||||||
|
|
||||||
|
if log_enabled!(::log::Level::Info) {
|
||||||
|
println!("Post-codegen");
|
||||||
|
tcx.print_debug_stats();
|
||||||
|
}
|
||||||
|
|
||||||
|
if tcx.sess.opts.output_types.contains_key(&OutputType::Mir) {
|
||||||
|
if let Err(e) = mir::transform::dump_mir::emit_mir(tcx, outputs) {
|
||||||
|
tcx.sess.err(&format!("could not emit MIR: {}", e));
|
||||||
|
tcx.sess.abort_if_errors();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
codegen
|
||||||
|
}
|
||||||
|
|
302
src/librustc_interface/queries.rs
Normal file
302
src/librustc_interface/queries.rs
Normal file
|
@ -0,0 +1,302 @@
|
||||||
|
use interface::{Compiler, Result};
|
||||||
|
use passes::{self, BoxedResolver, ExpansionResult, BoxedGlobalCtxt, PluginInfo};
|
||||||
|
use rustc_incremental::DepGraphFuture;
|
||||||
|
use rustc_data_structures::sync::Lrc;
|
||||||
|
use rustc::session::config::{Input, OutputFilenames, OutputType};
|
||||||
|
use rustc::session::Session;
|
||||||
|
use rustc::util::common::{time, ErrorReported};
|
||||||
|
use rustc::util::profiling::ProfileCategory;
|
||||||
|
use rustc::lint;
|
||||||
|
use rustc::hir;
|
||||||
|
use rustc::hir::def_id::LOCAL_CRATE;
|
||||||
|
use rustc::ty;
|
||||||
|
use rustc::ty::steal::Steal;
|
||||||
|
use rustc::dep_graph::DepGraph;
|
||||||
|
use rustc_passes::hir_stats;
|
||||||
|
use rustc_plugin::registry::Registry;
|
||||||
|
use serialize::json;
|
||||||
|
use std::cell::{Ref, RefMut, RefCell};
|
||||||
|
use std::ops::Deref;
|
||||||
|
use std::rc::Rc;
|
||||||
|
use std::sync::mpsc;
|
||||||
|
use std::any::Any;
|
||||||
|
use std::mem;
|
||||||
|
use syntax::parse::{self, PResult};
|
||||||
|
use syntax::util::node_count::NodeCounter;
|
||||||
|
use syntax::{self, ast, attr, diagnostics, visit};
|
||||||
|
use syntax_pos::hygiene;
|
||||||
|
|
||||||
|
/// Represent the result of a query.
|
||||||
|
/// This result can be stolen with the `take` method and returned with the `give` method.
|
||||||
|
pub struct Query<T> {
|
||||||
|
result: RefCell<Option<Result<T>>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T> Query<T> {
|
||||||
|
fn compute<F: FnOnce() -> Result<T>>(&self, f: F) -> Result<&Query<T>> {
|
||||||
|
let mut result = self.result.borrow_mut();
|
||||||
|
if result.is_none() {
|
||||||
|
*result = Some(f());
|
||||||
|
}
|
||||||
|
result.as_ref().unwrap().as_ref().map(|_| self).map_err(|err| *err)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Takes ownership of the query result. Further attempts to take or peek the query
|
||||||
|
/// result will panic unless it is returned by calling the `give` method.
|
||||||
|
pub fn take(&self) -> T {
|
||||||
|
self.result
|
||||||
|
.borrow_mut()
|
||||||
|
.take()
|
||||||
|
.expect("missing query result")
|
||||||
|
.unwrap()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns a stolen query result. Panics if there's already a result.
|
||||||
|
pub fn give(&self, value: T) {
|
||||||
|
let mut result = self.result.borrow_mut();
|
||||||
|
assert!(result.is_none(), "a result already exists");
|
||||||
|
*result = Some(Ok(value));
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Borrows the query result using the RefCell. Panics if the result is stolen.
|
||||||
|
pub fn peek(&self) -> Ref<'_, T> {
|
||||||
|
Ref::map(self.result.borrow(), |r| {
|
||||||
|
r.as_ref().unwrap().as_ref().expect("missing query result")
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Mutably borrows the query result using the RefCell. Panics if the result is stolen.
|
||||||
|
pub fn peek_mut(&self) -> RefMut<'_, T> {
|
||||||
|
RefMut::map(self.result.borrow_mut(), |r| {
|
||||||
|
r.as_mut().unwrap().as_mut().expect("missing query result")
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T> Default for Query<T> {
|
||||||
|
fn default() -> Self {
|
||||||
|
Query {
|
||||||
|
result: RefCell::new(None),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Default)]
|
||||||
|
pub(crate) struct Queries {
|
||||||
|
dep_graph_future: Query<Option<DepGraphFuture>>,
|
||||||
|
parse: Query<ast::Crate>,
|
||||||
|
crate_name: Query<String>,
|
||||||
|
register_plugins: Query<(ast::Crate, PluginInfo)>,
|
||||||
|
expansion: Query<(ast::Crate, Rc<Option<RefCell<BoxedResolver>>>)>,
|
||||||
|
dep_graph: Query<DepGraph>,
|
||||||
|
lower_to_hir: Query<(Steal<hir::map::Forest>, ExpansionResult)>,
|
||||||
|
prepare_outputs: Query<OutputFilenames>,
|
||||||
|
codegen_channel: Query<(Steal<mpsc::Sender<Box<dyn Any + Send>>>,
|
||||||
|
Steal<mpsc::Receiver<Box<dyn Any + Send>>>)>,
|
||||||
|
global_ctxt: Query<BoxedGlobalCtxt>,
|
||||||
|
ongoing_codegen: Query<Box<dyn Any>>,
|
||||||
|
link: Query<()>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Compiler {
|
||||||
|
pub fn dep_graph_future(&self) -> Result<&Query<Option<DepGraphFuture>>> {
|
||||||
|
self.queries.dep_graph_future.compute(|| {
|
||||||
|
Ok(if self.session().opts.build_dep_graph() {
|
||||||
|
Some(rustc_incremental::load_dep_graph(self.session()))
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn parse(&self) -> Result<&Query<ast::Crate>> {
|
||||||
|
self.queries.parse.compute(|| {
|
||||||
|
passes::parse(self.session(), &self.input).map_err(
|
||||||
|
|mut parse_error| {
|
||||||
|
parse_error.emit();
|
||||||
|
ErrorReported
|
||||||
|
},
|
||||||
|
)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn register_plugins(&self) -> Result<&Query<(ast::Crate, PluginInfo)>> {
|
||||||
|
self.queries.register_plugins.compute(|| {
|
||||||
|
let crate_name = self.crate_name()?.peek().clone();
|
||||||
|
let krate = self.parse()?.take();
|
||||||
|
|
||||||
|
passes::register_plugins(
|
||||||
|
self,
|
||||||
|
self.session(),
|
||||||
|
self.cstore(),
|
||||||
|
krate,
|
||||||
|
&crate_name,
|
||||||
|
)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn crate_name(&self) -> Result<&Query<String>> {
|
||||||
|
self.queries.crate_name.compute(|| {
|
||||||
|
let parse_result = self.parse()?;
|
||||||
|
let krate = parse_result.peek();
|
||||||
|
let result = match self.crate_name {
|
||||||
|
Some(ref crate_name) => crate_name.clone(),
|
||||||
|
None => rustc_codegen_utils::link::find_crate_name(
|
||||||
|
Some(self.session()),
|
||||||
|
&krate.attrs,
|
||||||
|
&self.input
|
||||||
|
),
|
||||||
|
};
|
||||||
|
Ok(result)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn expansion(
|
||||||
|
&self
|
||||||
|
) -> Result<&Query<(ast::Crate, Rc<Option<RefCell<BoxedResolver>>>)>> {
|
||||||
|
self.queries.expansion.compute(|| {
|
||||||
|
let crate_name = self.crate_name()?.peek().clone();
|
||||||
|
let (krate, plugin_info) = self.register_plugins()?.take();
|
||||||
|
passes::configure_and_expand(
|
||||||
|
self.sess.clone(),
|
||||||
|
self.cstore().clone(),
|
||||||
|
krate,
|
||||||
|
&crate_name,
|
||||||
|
plugin_info,
|
||||||
|
).map(|(krate, resolver)| (krate, Rc::new(Some(RefCell::new(resolver)))))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn dep_graph(&self) -> Result<&Query<DepGraph>> {
|
||||||
|
self.queries.dep_graph.compute(|| {
|
||||||
|
Ok(match self.dep_graph_future()?.take() {
|
||||||
|
None => DepGraph::new_disabled(),
|
||||||
|
Some(future) => {
|
||||||
|
let (prev_graph, prev_work_products) =
|
||||||
|
time(self.session(), "blocked while dep-graph loading finishes", || {
|
||||||
|
future.open().unwrap_or_else(|e| rustc_incremental::LoadResult::Error {
|
||||||
|
message: format!("could not decode incremental cache: {:?}", e),
|
||||||
|
}).open(self.session())
|
||||||
|
});
|
||||||
|
DepGraph::new(prev_graph, prev_work_products)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn lower_to_hir(&self) -> Result<&Query<(Steal<hir::map::Forest>, ExpansionResult)>> {
|
||||||
|
self.queries.lower_to_hir.compute(|| {
|
||||||
|
let expansion_result = self.expansion()?;
|
||||||
|
let (krate, resolver) = expansion_result.take();
|
||||||
|
let resolver_ref = &*resolver;
|
||||||
|
let hir = Steal::new(resolver_ref.as_ref().unwrap().borrow_mut().access(|resolver| {
|
||||||
|
passes::lower_to_hir(
|
||||||
|
self.session(),
|
||||||
|
self.cstore(),
|
||||||
|
resolver,
|
||||||
|
&*self.dep_graph()?.peek(),
|
||||||
|
&krate
|
||||||
|
)
|
||||||
|
})?);
|
||||||
|
expansion_result.give((krate, Rc::new(None)));
|
||||||
|
Ok((hir, BoxedResolver::to_expansion_result(resolver)))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn prepare_outputs(&self) -> Result<&Query<OutputFilenames>> {
|
||||||
|
self.queries.prepare_outputs.compute(|| {
|
||||||
|
self.lower_to_hir()?;
|
||||||
|
let krate = self.expansion()?;
|
||||||
|
let krate = krate.peek();
|
||||||
|
let crate_name = self.crate_name()?;
|
||||||
|
let crate_name = crate_name.peek();
|
||||||
|
passes::prepare_outputs(self.session(), self, &krate.0, &*crate_name)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn codegen_channel(&self) -> Result<&Query<(Steal<mpsc::Sender<Box<dyn Any + Send>>>,
|
||||||
|
Steal<mpsc::Receiver<Box<dyn Any + Send>>>)>> {
|
||||||
|
self.queries.codegen_channel.compute(|| {
|
||||||
|
let (tx, rx) = mpsc::channel();
|
||||||
|
Ok((Steal::new(tx), Steal::new(rx)))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn global_ctxt(&self) -> Result<&Query<BoxedGlobalCtxt>> {
|
||||||
|
self.queries.global_ctxt.compute(|| {
|
||||||
|
let crate_name = self.crate_name()?.peek().clone();
|
||||||
|
let outputs = self.prepare_outputs()?.peek().clone();
|
||||||
|
let hir = self.lower_to_hir()?;
|
||||||
|
let hir = hir.peek();
|
||||||
|
let (ref hir_forest, ref expansion) = *hir;
|
||||||
|
let tx = self.codegen_channel()?.peek().0.steal();
|
||||||
|
Ok(passes::create_global_ctxt(
|
||||||
|
self,
|
||||||
|
hir_forest.steal(),
|
||||||
|
expansion.defs.steal(),
|
||||||
|
expansion.resolutions.steal(),
|
||||||
|
outputs,
|
||||||
|
tx,
|
||||||
|
&crate_name))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn ongoing_codegen(&self) -> Result<&Query<Box<dyn Any>>> {
|
||||||
|
self.queries.ongoing_codegen.compute(|| {
|
||||||
|
let rx = self.codegen_channel()?.peek().1.steal();
|
||||||
|
let outputs = self.prepare_outputs()?;
|
||||||
|
self.global_ctxt()?.peek_mut().enter(|tcx| {
|
||||||
|
tcx.analysis(LOCAL_CRATE).ok();
|
||||||
|
|
||||||
|
// Don't do code generation if there were any errors
|
||||||
|
self.session().compile_status()?;
|
||||||
|
|
||||||
|
Ok(passes::start_codegen(
|
||||||
|
&***self.codegen_backend(),
|
||||||
|
tcx,
|
||||||
|
rx,
|
||||||
|
&*outputs.peek()
|
||||||
|
))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn link(&self) -> Result<&Query<()>> {
|
||||||
|
self.queries.link.compute(|| {
|
||||||
|
let sess = self.session();
|
||||||
|
|
||||||
|
let ongoing_codegen = self.ongoing_codegen()?.take();
|
||||||
|
|
||||||
|
self.codegen_backend().join_codegen_and_link(
|
||||||
|
ongoing_codegen,
|
||||||
|
sess,
|
||||||
|
&*self.dep_graph()?.peek(),
|
||||||
|
&*self.prepare_outputs()?.peek(),
|
||||||
|
).map_err(|_| ErrorReported)?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn compile(&self) -> Result<()> {
|
||||||
|
self.prepare_outputs()?;
|
||||||
|
|
||||||
|
if self.session().opts.output_types.contains_key(&OutputType::DepInfo)
|
||||||
|
&& self.session().opts.output_types.len() == 1
|
||||||
|
{
|
||||||
|
return Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
self.global_ctxt()?;
|
||||||
|
|
||||||
|
// Drop AST after creating GlobalCtxt to free memory
|
||||||
|
mem::drop(self.expansion()?.take());
|
||||||
|
|
||||||
|
self.ongoing_codegen()?;
|
||||||
|
|
||||||
|
// Drop GlobalCtxt after starting codegen to free memory
|
||||||
|
mem::drop(self.global_ctxt()?.take());
|
||||||
|
|
||||||
|
self.link().map(|_| ())
|
||||||
|
}
|
||||||
|
}
|
|
@ -4,6 +4,8 @@ use rustc::session::CrateDisambiguator;
|
||||||
use rustc::ty;
|
use rustc::ty;
|
||||||
use rustc::lint;
|
use rustc::lint;
|
||||||
use rustc_codegen_utils::codegen_backend::CodegenBackend;
|
use rustc_codegen_utils::codegen_backend::CodegenBackend;
|
||||||
|
#[cfg(parallel_compiler)]
|
||||||
|
use rustc_data_structures::jobserver;
|
||||||
use rustc_data_structures::sync::{Lock, Lrc};
|
use rustc_data_structures::sync::{Lock, Lrc};
|
||||||
use rustc_data_structures::stable_hasher::StableHasher;
|
use rustc_data_structures::stable_hasher::StableHasher;
|
||||||
use rustc_data_structures::fingerprint::Fingerprint;
|
use rustc_data_structures::fingerprint::Fingerprint;
|
||||||
|
@ -79,6 +81,161 @@ pub fn add_configuration(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn create_session(
|
||||||
|
sopts: config::Options,
|
||||||
|
cfg: FxHashSet<(String, Option<String>)>,
|
||||||
|
diagnostic_output: DiagnosticOutput,
|
||||||
|
file_loader: Option<Box<dyn FileLoader + Send + Sync + 'static>>,
|
||||||
|
input_path: Option<PathBuf>,
|
||||||
|
lint_caps: FxHashMap<lint::LintId, lint::Level>,
|
||||||
|
) -> (Lrc<Session>, Lrc<Box<dyn CodegenBackend>>, Lrc<SourceMap>) {
|
||||||
|
let descriptions = diagnostics_registry();
|
||||||
|
|
||||||
|
let loader = file_loader.unwrap_or(box RealFileLoader);
|
||||||
|
let source_map = Lrc::new(SourceMap::with_file_loader(
|
||||||
|
loader,
|
||||||
|
sopts.file_path_mapping(),
|
||||||
|
));
|
||||||
|
let mut sess = session::build_session_with_source_map(
|
||||||
|
sopts,
|
||||||
|
input_path,
|
||||||
|
descriptions,
|
||||||
|
source_map.clone(),
|
||||||
|
diagnostic_output,
|
||||||
|
lint_caps,
|
||||||
|
);
|
||||||
|
|
||||||
|
let codegen_backend = get_codegen_backend(&sess);
|
||||||
|
|
||||||
|
rustc_lint::register_builtins(&mut sess.lint_store.borrow_mut(), Some(&sess));
|
||||||
|
|
||||||
|
let mut cfg = config::build_configuration(&sess, config::to_crate_config(cfg));
|
||||||
|
add_configuration(&mut cfg, &sess, &*codegen_backend);
|
||||||
|
sess.parse_sess.config = cfg;
|
||||||
|
|
||||||
|
(Lrc::new(sess), Lrc::new(codegen_backend), source_map)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Temporarily have stack size set to 32MB to deal with various crates with long method
|
||||||
|
// chains or deep syntax trees.
|
||||||
|
// FIXME(oli-obk): get https://github.com/rust-lang/rust/pull/55617 the finish line
|
||||||
|
const STACK_SIZE: usize = 32 * 1024 * 1024; // 32MB
|
||||||
|
|
||||||
|
fn get_stack_size() -> Option<usize> {
|
||||||
|
// FIXME: Hacks on hacks. If the env is trying to override the stack size
|
||||||
|
// then *don't* set it explicitly.
|
||||||
|
if env::var_os("RUST_MIN_STACK").is_none() {
|
||||||
|
Some(STACK_SIZE)
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
struct Sink(Arc<Mutex<Vec<u8>>>);
|
||||||
|
impl Write for Sink {
|
||||||
|
fn write(&mut self, data: &[u8]) -> io::Result<usize> {
|
||||||
|
Write::write(&mut *self.0.lock().unwrap(), data)
|
||||||
|
}
|
||||||
|
fn flush(&mut self) -> io::Result<()> { Ok(()) }
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(not(parallel_compiler))]
|
||||||
|
pub fn scoped_thread<F: FnOnce() -> R + Send, R: Send>(cfg: thread::Builder, f: F) -> R {
|
||||||
|
struct Ptr(*mut ());
|
||||||
|
unsafe impl Send for Ptr {}
|
||||||
|
unsafe impl Sync for Ptr {}
|
||||||
|
|
||||||
|
let mut f = Some(f);
|
||||||
|
let run = Ptr(&mut f as *mut _ as *mut ());
|
||||||
|
let mut result = None;
|
||||||
|
let result_ptr = Ptr(&mut result as *mut _ as *mut ());
|
||||||
|
|
||||||
|
let thread = cfg.spawn(move || {
|
||||||
|
let run = unsafe { (*(run.0 as *mut Option<F>)).take().unwrap() };
|
||||||
|
let result = unsafe { &mut *(result_ptr.0 as *mut Option<R>) };
|
||||||
|
*result = Some(run());
|
||||||
|
});
|
||||||
|
|
||||||
|
match thread.unwrap().join() {
|
||||||
|
Ok(()) => result.unwrap(),
|
||||||
|
Err(p) => panic::resume_unwind(p),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(not(parallel_compiler))]
|
||||||
|
pub fn spawn_thread_pool<F: FnOnce() -> R + Send, R: Send>(
|
||||||
|
_threads: Option<usize>,
|
||||||
|
stderr: &Option<Arc<Mutex<Vec<u8>>>>,
|
||||||
|
f: F,
|
||||||
|
) -> R {
|
||||||
|
let mut cfg = thread::Builder::new().name("rustc".to_string());
|
||||||
|
|
||||||
|
if let Some(size) = get_stack_size() {
|
||||||
|
cfg = cfg.stack_size(size);
|
||||||
|
}
|
||||||
|
|
||||||
|
scoped_thread(cfg, || {
|
||||||
|
syntax::with_globals( || {
|
||||||
|
ty::tls::GCX_PTR.set(&Lock::new(0), || {
|
||||||
|
if let Some(stderr) = stderr {
|
||||||
|
io::set_panic(Some(box Sink(stderr.clone())));
|
||||||
|
}
|
||||||
|
ty::tls::with_thread_locals(|| f())
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(parallel_compiler)]
|
||||||
|
pub fn spawn_thread_pool<F: FnOnce() -> R + Send, R: Send>(
|
||||||
|
threads: Option<usize>,
|
||||||
|
stderr: &Option<Arc<Mutex<Vec<u8>>>>,
|
||||||
|
f: F,
|
||||||
|
) -> R {
|
||||||
|
use rayon::{ThreadPool, ThreadPoolBuilder};
|
||||||
|
use syntax;
|
||||||
|
use syntax_pos;
|
||||||
|
|
||||||
|
let gcx_ptr = &Lock::new(0);
|
||||||
|
|
||||||
|
let mut config = ThreadPoolBuilder::new()
|
||||||
|
.acquire_thread_handler(jobserver::acquire_thread)
|
||||||
|
.release_thread_handler(jobserver::release_thread)
|
||||||
|
.num_threads(Session::threads_from_count(threads))
|
||||||
|
.deadlock_handler(|| unsafe { ty::query::handle_deadlock() });
|
||||||
|
|
||||||
|
if let Some(size) = get_stack_size() {
|
||||||
|
config = config.stack_size(size);
|
||||||
|
}
|
||||||
|
|
||||||
|
let with_pool = move |pool: &ThreadPool| pool.install(move || f());
|
||||||
|
|
||||||
|
syntax::with_globals(|| {
|
||||||
|
syntax::GLOBALS.with(|syntax_globals| {
|
||||||
|
syntax_pos::GLOBALS.with(|syntax_pos_globals| {
|
||||||
|
// The main handler runs for each Rayon worker thread and sets up
|
||||||
|
// the thread local rustc uses. syntax_globals and syntax_pos_globals are
|
||||||
|
// captured and set on the new threads. ty::tls::with_thread_locals sets up
|
||||||
|
// thread local callbacks from libsyntax
|
||||||
|
let main_handler = move |worker: &mut dyn FnMut()| {
|
||||||
|
syntax::GLOBALS.set(syntax_globals, || {
|
||||||
|
syntax_pos::GLOBALS.set(syntax_pos_globals, || {
|
||||||
|
if let Some(stderr) = stderr {
|
||||||
|
io::set_panic(Some(box Sink(stderr.clone())));
|
||||||
|
}
|
||||||
|
ty::tls::with_thread_locals(|| {
|
||||||
|
ty::tls::GCX_PTR.set(gcx_ptr, || worker())
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
};
|
||||||
|
|
||||||
|
ThreadPool::scoped_pool(config, main_handler, with_pool).unwrap()
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
fn load_backend_from_dylib(path: &Path) -> fn() -> Box<dyn CodegenBackend> {
|
fn load_backend_from_dylib(path: &Path) -> fn() -> Box<dyn CodegenBackend> {
|
||||||
let lib = DynamicLibrary::open(Some(path)).unwrap_or_else(|err| {
|
let lib = DynamicLibrary::open(Some(path)).unwrap_or_else(|err| {
|
||||||
let err = format!("couldn't load codegen backend {:?}: {:?}", path, err);
|
let err = format!("couldn't load codegen backend {:?}: {:?}", path, err);
|
||||||
|
@ -297,7 +454,7 @@ pub fn get_codegen_sysroot(backend_name: &str) -> fn() -> Box<dyn CodegenBackend
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn compute_crate_disambiguator(session: &Session) -> CrateDisambiguator {
|
pub(crate) fn compute_crate_disambiguator(session: &Session) -> CrateDisambiguator {
|
||||||
use std::hash::Hasher;
|
use std::hash::Hasher;
|
||||||
|
|
||||||
// The crate_disambiguator is a 128 bit hash. The disambiguator is fed
|
// The crate_disambiguator is a 128 bit hash. The disambiguator is fed
|
||||||
|
|
|
@ -7,13 +7,13 @@ use self::def_ctor::{get_def_from_def_id, get_def_from_hir_id};
|
||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
|
|
||||||
pub struct AutoTraitFinder<'a, 'tcx: 'a, 'rcx: 'a> {
|
pub struct AutoTraitFinder<'a, 'tcx> {
|
||||||
pub cx: &'a core::DocContext<'a, 'tcx, 'rcx>,
|
pub cx: &'a core::DocContext<'tcx>,
|
||||||
pub f: auto::AutoTraitFinder<'a, 'tcx>,
|
pub f: auto::AutoTraitFinder<'a, 'tcx>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a, 'tcx, 'rcx> AutoTraitFinder<'a, 'tcx, 'rcx> {
|
impl<'a, 'tcx> AutoTraitFinder<'a, 'tcx> {
|
||||||
pub fn new(cx: &'a core::DocContext<'a, 'tcx, 'rcx>) -> Self {
|
pub fn new(cx: &'a core::DocContext<'tcx>) -> Self {
|
||||||
let f = auto::AutoTraitFinder::new(&cx.tcx);
|
let f = auto::AutoTraitFinder::new(&cx.tcx);
|
||||||
|
|
||||||
AutoTraitFinder { cx, f }
|
AutoTraitFinder { cx, f }
|
||||||
|
|
|
@ -11,12 +11,12 @@ use super::*;
|
||||||
|
|
||||||
use self::def_ctor::{get_def_from_def_id, get_def_from_hir_id};
|
use self::def_ctor::{get_def_from_def_id, get_def_from_hir_id};
|
||||||
|
|
||||||
pub struct BlanketImplFinder<'a, 'tcx: 'a, 'rcx: 'a> {
|
pub struct BlanketImplFinder<'a, 'tcx> {
|
||||||
pub cx: &'a core::DocContext<'a, 'tcx, 'rcx>,
|
pub cx: &'a core::DocContext<'tcx>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a, 'tcx, 'rcx> BlanketImplFinder <'a, 'tcx, 'rcx> {
|
impl<'a, 'tcx> BlanketImplFinder<'a, 'tcx> {
|
||||||
pub fn new(cx: &'a core::DocContext<'a, 'tcx, 'rcx>) -> Self {
|
pub fn new(cx: &'a core::DocContext<'tcx>) -> Self {
|
||||||
BlanketImplFinder { cx }
|
BlanketImplFinder { cx }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -2,7 +2,7 @@ use crate::core::DocContext;
|
||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
|
|
||||||
pub fn get_def_from_def_id<F>(cx: &DocContext<'_, '_, '_>,
|
pub fn get_def_from_def_id<F>(cx: &DocContext<'_>,
|
||||||
def_id: DefId,
|
def_id: DefId,
|
||||||
callback: &F,
|
callback: &F,
|
||||||
) -> Vec<Item>
|
) -> Vec<Item>
|
||||||
|
@ -38,7 +38,7 @@ where F: Fn(& dyn Fn(DefId) -> Def) -> Vec<Item> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_def_from_hir_id<F>(cx: &DocContext<'_, '_, '_>,
|
pub fn get_def_from_hir_id<F>(cx: &DocContext<'_>,
|
||||||
id: hir::HirId,
|
id: hir::HirId,
|
||||||
name: String,
|
name: String,
|
||||||
callback: &F,
|
callback: &F,
|
||||||
|
|
|
@ -36,7 +36,7 @@ use super::Clean;
|
||||||
/// The returned value is `None` if the definition could not be inlined,
|
/// The returned value is `None` if the definition could not be inlined,
|
||||||
/// and `Some` of a vector of items if it was successfully expanded.
|
/// and `Some` of a vector of items if it was successfully expanded.
|
||||||
pub fn try_inline(
|
pub fn try_inline(
|
||||||
cx: &DocContext<'_, '_, '_>,
|
cx: &DocContext<'_>,
|
||||||
def: Def,
|
def: Def,
|
||||||
name: ast::Name,
|
name: ast::Name,
|
||||||
visited: &mut FxHashSet<DefId>
|
visited: &mut FxHashSet<DefId>
|
||||||
|
@ -129,7 +129,7 @@ pub fn try_inline(
|
||||||
Some(ret)
|
Some(ret)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn try_inline_glob(cx: &DocContext<'_, '_, '_>, def: Def, visited: &mut FxHashSet<DefId>)
|
pub fn try_inline_glob(cx: &DocContext<'_>, def: Def, visited: &mut FxHashSet<DefId>)
|
||||||
-> Option<Vec<clean::Item>>
|
-> Option<Vec<clean::Item>>
|
||||||
{
|
{
|
||||||
if def == Def::Err { return None }
|
if def == Def::Err { return None }
|
||||||
|
@ -146,7 +146,7 @@ pub fn try_inline_glob(cx: &DocContext<'_, '_, '_>, def: Def, visited: &mut FxHa
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn load_attrs(cx: &DocContext<'_, '_, '_>, did: DefId) -> clean::Attributes {
|
pub fn load_attrs(cx: &DocContext<'_>, did: DefId) -> clean::Attributes {
|
||||||
cx.tcx.get_attrs(did).clean(cx)
|
cx.tcx.get_attrs(did).clean(cx)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -154,7 +154,7 @@ pub fn load_attrs(cx: &DocContext<'_, '_, '_>, did: DefId) -> clean::Attributes
|
||||||
///
|
///
|
||||||
/// These names are used later on by HTML rendering to generate things like
|
/// These names are used later on by HTML rendering to generate things like
|
||||||
/// source links back to the original item.
|
/// source links back to the original item.
|
||||||
pub fn record_extern_fqn(cx: &DocContext<'_, '_, '_>, did: DefId, kind: clean::TypeKind) {
|
pub fn record_extern_fqn(cx: &DocContext<'_>, did: DefId, kind: clean::TypeKind) {
|
||||||
let mut crate_name = cx.tcx.crate_name(did.krate).to_string();
|
let mut crate_name = cx.tcx.crate_name(did.krate).to_string();
|
||||||
if did.is_local() {
|
if did.is_local() {
|
||||||
crate_name = cx.crate_name.clone().unwrap_or(crate_name);
|
crate_name = cx.crate_name.clone().unwrap_or(crate_name);
|
||||||
|
@ -182,7 +182,7 @@ pub fn record_extern_fqn(cx: &DocContext<'_, '_, '_>, did: DefId, kind: clean::T
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn build_external_trait(cx: &DocContext<'_, '_, '_>, did: DefId) -> clean::Trait {
|
pub fn build_external_trait(cx: &DocContext<'_>, did: DefId) -> clean::Trait {
|
||||||
let auto_trait = cx.tcx.trait_def(did).has_auto_impl;
|
let auto_trait = cx.tcx.trait_def(did).has_auto_impl;
|
||||||
let trait_items = cx.tcx.associated_items(did).map(|item| item.clean(cx)).collect();
|
let trait_items = cx.tcx.associated_items(did).map(|item| item.clean(cx)).collect();
|
||||||
let predicates = cx.tcx.predicates_of(did);
|
let predicates = cx.tcx.predicates_of(did);
|
||||||
|
@ -202,7 +202,7 @@ pub fn build_external_trait(cx: &DocContext<'_, '_, '_>, did: DefId) -> clean::T
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn build_external_function(cx: &DocContext<'_, '_, '_>, did: DefId) -> clean::Function {
|
fn build_external_function(cx: &DocContext<'_>, did: DefId) -> clean::Function {
|
||||||
let sig = cx.tcx.fn_sig(did);
|
let sig = cx.tcx.fn_sig(did);
|
||||||
|
|
||||||
let constness = if cx.tcx.is_min_const_fn(did) {
|
let constness = if cx.tcx.is_min_const_fn(did) {
|
||||||
|
@ -224,7 +224,7 @@ fn build_external_function(cx: &DocContext<'_, '_, '_>, did: DefId) -> clean::Fu
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn build_enum(cx: &DocContext<'_, '_, '_>, did: DefId) -> clean::Enum {
|
fn build_enum(cx: &DocContext<'_>, did: DefId) -> clean::Enum {
|
||||||
let predicates = cx.tcx.predicates_of(did);
|
let predicates = cx.tcx.predicates_of(did);
|
||||||
|
|
||||||
clean::Enum {
|
clean::Enum {
|
||||||
|
@ -234,7 +234,7 @@ fn build_enum(cx: &DocContext<'_, '_, '_>, did: DefId) -> clean::Enum {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn build_struct(cx: &DocContext<'_, '_, '_>, did: DefId) -> clean::Struct {
|
fn build_struct(cx: &DocContext<'_>, did: DefId) -> clean::Struct {
|
||||||
let predicates = cx.tcx.predicates_of(did);
|
let predicates = cx.tcx.predicates_of(did);
|
||||||
let variant = cx.tcx.adt_def(did).non_enum_variant();
|
let variant = cx.tcx.adt_def(did).non_enum_variant();
|
||||||
|
|
||||||
|
@ -250,7 +250,7 @@ fn build_struct(cx: &DocContext<'_, '_, '_>, did: DefId) -> clean::Struct {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn build_union(cx: &DocContext<'_, '_, '_>, did: DefId) -> clean::Union {
|
fn build_union(cx: &DocContext<'_>, did: DefId) -> clean::Union {
|
||||||
let predicates = cx.tcx.predicates_of(did);
|
let predicates = cx.tcx.predicates_of(did);
|
||||||
let variant = cx.tcx.adt_def(did).non_enum_variant();
|
let variant = cx.tcx.adt_def(did).non_enum_variant();
|
||||||
|
|
||||||
|
@ -262,7 +262,7 @@ fn build_union(cx: &DocContext<'_, '_, '_>, did: DefId) -> clean::Union {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn build_type_alias(cx: &DocContext<'_, '_, '_>, did: DefId) -> clean::Typedef {
|
fn build_type_alias(cx: &DocContext<'_>, did: DefId) -> clean::Typedef {
|
||||||
let predicates = cx.tcx.predicates_of(did);
|
let predicates = cx.tcx.predicates_of(did);
|
||||||
|
|
||||||
clean::Typedef {
|
clean::Typedef {
|
||||||
|
@ -271,7 +271,7 @@ fn build_type_alias(cx: &DocContext<'_, '_, '_>, did: DefId) -> clean::Typedef {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn build_impls(cx: &DocContext<'_, '_, '_>, did: DefId) -> Vec<clean::Item> {
|
pub fn build_impls(cx: &DocContext<'_>, did: DefId) -> Vec<clean::Item> {
|
||||||
let tcx = cx.tcx;
|
let tcx = cx.tcx;
|
||||||
let mut impls = Vec::new();
|
let mut impls = Vec::new();
|
||||||
|
|
||||||
|
@ -282,7 +282,7 @@ pub fn build_impls(cx: &DocContext<'_, '_, '_>, did: DefId) -> Vec<clean::Item>
|
||||||
impls
|
impls
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn build_impl(cx: &DocContext<'_, '_, '_>, did: DefId, ret: &mut Vec<clean::Item>) {
|
pub fn build_impl(cx: &DocContext<'_>, did: DefId, ret: &mut Vec<clean::Item>) {
|
||||||
if !cx.renderinfo.borrow_mut().inlined.insert(did) {
|
if !cx.renderinfo.borrow_mut().inlined.insert(did) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
@ -393,7 +393,7 @@ pub fn build_impl(cx: &DocContext<'_, '_, '_>, did: DefId, ret: &mut Vec<clean::
|
||||||
}
|
}
|
||||||
|
|
||||||
fn build_module(
|
fn build_module(
|
||||||
cx: &DocContext<'_, '_, '_>,
|
cx: &DocContext<'_>,
|
||||||
did: DefId,
|
did: DefId,
|
||||||
visited: &mut FxHashSet<DefId>
|
visited: &mut FxHashSet<DefId>
|
||||||
) -> clean::Module {
|
) -> clean::Module {
|
||||||
|
@ -404,7 +404,7 @@ fn build_module(
|
||||||
is_crate: false,
|
is_crate: false,
|
||||||
};
|
};
|
||||||
|
|
||||||
fn fill_in(cx: &DocContext<'_, '_, '_>, did: DefId, items: &mut Vec<clean::Item>,
|
fn fill_in(cx: &DocContext<'_>, did: DefId, items: &mut Vec<clean::Item>,
|
||||||
visited: &mut FxHashSet<DefId>) {
|
visited: &mut FxHashSet<DefId>) {
|
||||||
// If we're re-exporting a re-export it may actually re-export something in
|
// If we're re-exporting a re-export it may actually re-export something in
|
||||||
// two namespaces, so the target may be listed twice. Make sure we only
|
// two namespaces, so the target may be listed twice. Make sure we only
|
||||||
|
@ -421,7 +421,7 @@ fn build_module(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn print_inlined_const(cx: &DocContext<'_, '_, '_>, did: DefId) -> String {
|
pub fn print_inlined_const(cx: &DocContext<'_>, did: DefId) -> String {
|
||||||
if let Some(node_id) = cx.tcx.hir().as_local_hir_id(did) {
|
if let Some(node_id) = cx.tcx.hir().as_local_hir_id(did) {
|
||||||
cx.tcx.hir().hir_to_pretty_string(node_id)
|
cx.tcx.hir().hir_to_pretty_string(node_id)
|
||||||
} else {
|
} else {
|
||||||
|
@ -429,14 +429,14 @@ pub fn print_inlined_const(cx: &DocContext<'_, '_, '_>, did: DefId) -> String {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn build_const(cx: &DocContext<'_, '_, '_>, did: DefId) -> clean::Constant {
|
fn build_const(cx: &DocContext<'_>, did: DefId) -> clean::Constant {
|
||||||
clean::Constant {
|
clean::Constant {
|
||||||
type_: cx.tcx.type_of(did).clean(cx),
|
type_: cx.tcx.type_of(did).clean(cx),
|
||||||
expr: print_inlined_const(cx, did)
|
expr: print_inlined_const(cx, did)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn build_static(cx: &DocContext<'_, '_, '_>, did: DefId, mutable: bool) -> clean::Static {
|
fn build_static(cx: &DocContext<'_>, did: DefId, mutable: bool) -> clean::Static {
|
||||||
clean::Static {
|
clean::Static {
|
||||||
type_: cx.tcx.type_of(did).clean(cx),
|
type_: cx.tcx.type_of(did).clean(cx),
|
||||||
mutability: if mutable {clean::Mutable} else {clean::Immutable},
|
mutability: if mutable {clean::Mutable} else {clean::Immutable},
|
||||||
|
@ -444,7 +444,7 @@ fn build_static(cx: &DocContext<'_, '_, '_>, did: DefId, mutable: bool) -> clean
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn build_macro(cx: &DocContext<'_, '_, '_>, did: DefId, name: ast::Name) -> clean::ItemEnum {
|
fn build_macro(cx: &DocContext<'_>, did: DefId, name: ast::Name) -> clean::ItemEnum {
|
||||||
let imported_from = cx.tcx.original_crate_name(did.krate);
|
let imported_from = cx.tcx.original_crate_name(did.krate);
|
||||||
match cx.cstore.load_macro_untracked(did, cx.sess()) {
|
match cx.cstore.load_macro_untracked(did, cx.sess()) {
|
||||||
LoadedMacro::MacroDef(def) => {
|
LoadedMacro::MacroDef(def) => {
|
||||||
|
@ -546,7 +546,7 @@ fn separate_supertrait_bounds(mut g: clean::Generics)
|
||||||
(g, ty_bounds)
|
(g, ty_bounds)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn record_extern_trait(cx: &DocContext<'_, '_, '_>, did: DefId) {
|
pub fn record_extern_trait(cx: &DocContext<'_>, did: DefId) {
|
||||||
if did.is_local() {
|
if did.is_local() {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
|
@ -71,56 +71,56 @@ thread_local!(pub static MAX_DEF_ID: RefCell<FxHashMap<CrateNum, DefId>> = Defau
|
||||||
const FN_OUTPUT_NAME: &'static str = "Output";
|
const FN_OUTPUT_NAME: &'static str = "Output";
|
||||||
|
|
||||||
// extract the stability index for a node from tcx, if possible
|
// extract the stability index for a node from tcx, if possible
|
||||||
fn get_stability(cx: &DocContext<'_, '_, '_>, def_id: DefId) -> Option<Stability> {
|
fn get_stability(cx: &DocContext<'_>, def_id: DefId) -> Option<Stability> {
|
||||||
cx.tcx.lookup_stability(def_id).clean(cx)
|
cx.tcx.lookup_stability(def_id).clean(cx)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_deprecation(cx: &DocContext<'_, '_, '_>, def_id: DefId) -> Option<Deprecation> {
|
fn get_deprecation(cx: &DocContext<'_>, def_id: DefId) -> Option<Deprecation> {
|
||||||
cx.tcx.lookup_deprecation(def_id).clean(cx)
|
cx.tcx.lookup_deprecation(def_id).clean(cx)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub trait Clean<T> {
|
pub trait Clean<T> {
|
||||||
fn clean(&self, cx: &DocContext<'_, '_, '_>) -> T;
|
fn clean(&self, cx: &DocContext<'_>) -> T;
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T: Clean<U>, U> Clean<Vec<U>> for [T] {
|
impl<T: Clean<U>, U> Clean<Vec<U>> for [T] {
|
||||||
fn clean(&self, cx: &DocContext<'_, '_, '_>) -> Vec<U> {
|
fn clean(&self, cx: &DocContext<'_>) -> Vec<U> {
|
||||||
self.iter().map(|x| x.clean(cx)).collect()
|
self.iter().map(|x| x.clean(cx)).collect()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T: Clean<U>, U, V: Idx> Clean<IndexVec<V, U>> for IndexVec<V, T> {
|
impl<T: Clean<U>, U, V: Idx> Clean<IndexVec<V, U>> for IndexVec<V, T> {
|
||||||
fn clean(&self, cx: &DocContext<'_, '_, '_>) -> IndexVec<V, U> {
|
fn clean(&self, cx: &DocContext<'_>) -> IndexVec<V, U> {
|
||||||
self.iter().map(|x| x.clean(cx)).collect()
|
self.iter().map(|x| x.clean(cx)).collect()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T: Clean<U>, U> Clean<U> for P<T> {
|
impl<T: Clean<U>, U> Clean<U> for P<T> {
|
||||||
fn clean(&self, cx: &DocContext<'_, '_, '_>) -> U {
|
fn clean(&self, cx: &DocContext<'_>) -> U {
|
||||||
(**self).clean(cx)
|
(**self).clean(cx)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T: Clean<U>, U> Clean<U> for Rc<T> {
|
impl<T: Clean<U>, U> Clean<U> for Rc<T> {
|
||||||
fn clean(&self, cx: &DocContext<'_, '_, '_>) -> U {
|
fn clean(&self, cx: &DocContext<'_>) -> U {
|
||||||
(**self).clean(cx)
|
(**self).clean(cx)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T: Clean<U>, U> Clean<Option<U>> for Option<T> {
|
impl<T: Clean<U>, U> Clean<Option<U>> for Option<T> {
|
||||||
fn clean(&self, cx: &DocContext<'_, '_, '_>) -> Option<U> {
|
fn clean(&self, cx: &DocContext<'_>) -> Option<U> {
|
||||||
self.as_ref().map(|v| v.clean(cx))
|
self.as_ref().map(|v| v.clean(cx))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T, U> Clean<U> for ty::Binder<T> where T: Clean<U> {
|
impl<T, U> Clean<U> for ty::Binder<T> where T: Clean<U> {
|
||||||
fn clean(&self, cx: &DocContext<'_, '_, '_>) -> U {
|
fn clean(&self, cx: &DocContext<'_>) -> U {
|
||||||
self.skip_binder().clean(cx)
|
self.skip_binder().clean(cx)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T: Clean<U>, U> Clean<Vec<U>> for P<[T]> {
|
impl<T: Clean<U>, U> Clean<Vec<U>> for P<[T]> {
|
||||||
fn clean(&self, cx: &DocContext<'_, '_, '_>) -> Vec<U> {
|
fn clean(&self, cx: &DocContext<'_>) -> Vec<U> {
|
||||||
self.iter().map(|x| x.clean(cx)).collect()
|
self.iter().map(|x| x.clean(cx)).collect()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -139,8 +139,8 @@ pub struct Crate {
|
||||||
pub masked_crates: FxHashSet<CrateNum>,
|
pub masked_crates: FxHashSet<CrateNum>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a, 'tcx, 'rcx> Clean<Crate> for visit_ast::RustdocVisitor<'a, 'tcx, 'rcx> {
|
impl<'a, 'tcx> Clean<Crate> for visit_ast::RustdocVisitor<'a, 'tcx> {
|
||||||
fn clean(&self, cx: &DocContext<'_, '_, '_>) -> Crate {
|
fn clean(&self, cx: &DocContext<'_>) -> Crate {
|
||||||
use crate::visit_lib::LibEmbargoVisitor;
|
use crate::visit_lib::LibEmbargoVisitor;
|
||||||
|
|
||||||
{
|
{
|
||||||
|
@ -234,7 +234,7 @@ pub struct ExternalCrate {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Clean<ExternalCrate> for CrateNum {
|
impl Clean<ExternalCrate> for CrateNum {
|
||||||
fn clean(&self, cx: &DocContext<'_, '_, '_>) -> ExternalCrate {
|
fn clean(&self, cx: &DocContext<'_>) -> ExternalCrate {
|
||||||
let root = DefId { krate: *self, index: CRATE_DEF_INDEX };
|
let root = DefId { krate: *self, index: CRATE_DEF_INDEX };
|
||||||
let krate_span = cx.tcx.def_span(root);
|
let krate_span = cx.tcx.def_span(root);
|
||||||
let krate_src = cx.sess().source_map().span_to_filename(krate_span);
|
let krate_src = cx.sess().source_map().span_to_filename(krate_span);
|
||||||
|
@ -582,7 +582,7 @@ pub struct Module {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Clean<Item> for doctree::Module {
|
impl Clean<Item> for doctree::Module {
|
||||||
fn clean(&self, cx: &DocContext<'_, '_, '_>) -> Item {
|
fn clean(&self, cx: &DocContext<'_>) -> Item {
|
||||||
let name = if self.name.is_some() {
|
let name = if self.name.is_some() {
|
||||||
self.name.expect("No name provided").clean(cx)
|
self.name.expect("No name provided").clean(cx)
|
||||||
} else {
|
} else {
|
||||||
|
@ -1023,7 +1023,7 @@ impl AttributesExt for Attributes {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Clean<Attributes> for [ast::Attribute] {
|
impl Clean<Attributes> for [ast::Attribute] {
|
||||||
fn clean(&self, cx: &DocContext<'_, '_, '_>) -> Attributes {
|
fn clean(&self, cx: &DocContext<'_>) -> Attributes {
|
||||||
Attributes::from_ast(cx.sess().diagnostic(), self)
|
Attributes::from_ast(cx.sess().diagnostic(), self)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1035,7 +1035,7 @@ pub enum GenericBound {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl GenericBound {
|
impl GenericBound {
|
||||||
fn maybe_sized(cx: &DocContext<'_, '_, '_>) -> GenericBound {
|
fn maybe_sized(cx: &DocContext<'_>) -> GenericBound {
|
||||||
let did = cx.tcx.require_lang_item(lang_items::SizedTraitLangItem);
|
let did = cx.tcx.require_lang_item(lang_items::SizedTraitLangItem);
|
||||||
let empty = cx.tcx.intern_substs(&[]);
|
let empty = cx.tcx.intern_substs(&[]);
|
||||||
let path = external_path(cx, &cx.tcx.item_name(did).as_str(),
|
let path = external_path(cx, &cx.tcx.item_name(did).as_str(),
|
||||||
|
@ -1052,7 +1052,7 @@ impl GenericBound {
|
||||||
}, hir::TraitBoundModifier::Maybe)
|
}, hir::TraitBoundModifier::Maybe)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn is_sized_bound(&self, cx: &DocContext<'_, '_, '_>) -> bool {
|
fn is_sized_bound(&self, cx: &DocContext<'_>) -> bool {
|
||||||
use rustc::hir::TraitBoundModifier as TBM;
|
use rustc::hir::TraitBoundModifier as TBM;
|
||||||
if let GenericBound::TraitBound(PolyTrait { ref trait_, .. }, TBM::None) = *self {
|
if let GenericBound::TraitBound(PolyTrait { ref trait_, .. }, TBM::None) = *self {
|
||||||
if trait_.def_id() == cx.tcx.lang_items().sized_trait() {
|
if trait_.def_id() == cx.tcx.lang_items().sized_trait() {
|
||||||
|
@ -1078,7 +1078,7 @@ impl GenericBound {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Clean<GenericBound> for hir::GenericBound {
|
impl Clean<GenericBound> for hir::GenericBound {
|
||||||
fn clean(&self, cx: &DocContext<'_, '_, '_>) -> GenericBound {
|
fn clean(&self, cx: &DocContext<'_>) -> GenericBound {
|
||||||
match *self {
|
match *self {
|
||||||
hir::GenericBound::Outlives(lt) => GenericBound::Outlives(lt.clean(cx)),
|
hir::GenericBound::Outlives(lt) => GenericBound::Outlives(lt.clean(cx)),
|
||||||
hir::GenericBound::Trait(ref t, modifier) => {
|
hir::GenericBound::Trait(ref t, modifier) => {
|
||||||
|
@ -1088,7 +1088,7 @@ impl Clean<GenericBound> for hir::GenericBound {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn external_generic_args(cx: &DocContext<'_, '_, '_>, trait_did: Option<DefId>, has_self: bool,
|
fn external_generic_args(cx: &DocContext<'_>, trait_did: Option<DefId>, has_self: bool,
|
||||||
bindings: Vec<TypeBinding>, substs: SubstsRef<'_>) -> GenericArgs {
|
bindings: Vec<TypeBinding>, substs: SubstsRef<'_>) -> GenericArgs {
|
||||||
let lifetimes = substs.regions().filter_map(|v| v.clean(cx)).collect();
|
let lifetimes = substs.regions().filter_map(|v| v.clean(cx)).collect();
|
||||||
let types = substs.types().skip(has_self as usize).collect::<Vec<_>>();
|
let types = substs.types().skip(has_self as usize).collect::<Vec<_>>();
|
||||||
|
@ -1130,7 +1130,7 @@ fn external_generic_args(cx: &DocContext<'_, '_, '_>, trait_did: Option<DefId>,
|
||||||
|
|
||||||
// trait_did should be set to a trait's DefId if called on a TraitRef, in order to sugar
|
// trait_did should be set to a trait's DefId if called on a TraitRef, in order to sugar
|
||||||
// from Fn<(A, B,), C> to Fn(A, B) -> C
|
// from Fn<(A, B,), C> to Fn(A, B) -> C
|
||||||
fn external_path(cx: &DocContext<'_, '_, '_>, name: &str, trait_did: Option<DefId>, has_self: bool,
|
fn external_path(cx: &DocContext<'_>, name: &str, trait_did: Option<DefId>, has_self: bool,
|
||||||
bindings: Vec<TypeBinding>, substs: SubstsRef<'_>) -> Path {
|
bindings: Vec<TypeBinding>, substs: SubstsRef<'_>) -> Path {
|
||||||
Path {
|
Path {
|
||||||
global: false,
|
global: false,
|
||||||
|
@ -1143,7 +1143,7 @@ fn external_path(cx: &DocContext<'_, '_, '_>, name: &str, trait_did: Option<DefI
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a, 'tcx> Clean<GenericBound> for (&'a ty::TraitRef<'tcx>, Vec<TypeBinding>) {
|
impl<'a, 'tcx> Clean<GenericBound> for (&'a ty::TraitRef<'tcx>, Vec<TypeBinding>) {
|
||||||
fn clean(&self, cx: &DocContext<'_, '_, '_>) -> GenericBound {
|
fn clean(&self, cx: &DocContext<'_>) -> GenericBound {
|
||||||
let (trait_ref, ref bounds) = *self;
|
let (trait_ref, ref bounds) = *self;
|
||||||
inline::record_extern_fqn(cx, trait_ref.def_id, TypeKind::Trait);
|
inline::record_extern_fqn(cx, trait_ref.def_id, TypeKind::Trait);
|
||||||
let path = external_path(cx, &cx.tcx.item_name(trait_ref.def_id).as_str(),
|
let path = external_path(cx, &cx.tcx.item_name(trait_ref.def_id).as_str(),
|
||||||
|
@ -1187,13 +1187,13 @@ impl<'a, 'tcx> Clean<GenericBound> for (&'a ty::TraitRef<'tcx>, Vec<TypeBinding>
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'tcx> Clean<GenericBound> for ty::TraitRef<'tcx> {
|
impl<'tcx> Clean<GenericBound> for ty::TraitRef<'tcx> {
|
||||||
fn clean(&self, cx: &DocContext<'_, '_, '_>) -> GenericBound {
|
fn clean(&self, cx: &DocContext<'_>) -> GenericBound {
|
||||||
(self, vec![]).clean(cx)
|
(self, vec![]).clean(cx)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'tcx> Clean<Option<Vec<GenericBound>>> for InternalSubsts<'tcx> {
|
impl<'tcx> Clean<Option<Vec<GenericBound>>> for InternalSubsts<'tcx> {
|
||||||
fn clean(&self, cx: &DocContext<'_, '_, '_>) -> Option<Vec<GenericBound>> {
|
fn clean(&self, cx: &DocContext<'_>) -> Option<Vec<GenericBound>> {
|
||||||
let mut v = Vec::new();
|
let mut v = Vec::new();
|
||||||
v.extend(self.regions().filter_map(|r| r.clean(cx)).map(GenericBound::Outlives));
|
v.extend(self.regions().filter_map(|r| r.clean(cx)).map(GenericBound::Outlives));
|
||||||
v.extend(self.types().map(|t| GenericBound::TraitBound(PolyTrait {
|
v.extend(self.types().map(|t| GenericBound::TraitBound(PolyTrait {
|
||||||
|
@ -1220,7 +1220,7 @@ impl Lifetime {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Clean<Lifetime> for hir::Lifetime {
|
impl Clean<Lifetime> for hir::Lifetime {
|
||||||
fn clean(&self, cx: &DocContext<'_, '_, '_>) -> Lifetime {
|
fn clean(&self, cx: &DocContext<'_>) -> Lifetime {
|
||||||
if self.hir_id != hir::DUMMY_HIR_ID {
|
if self.hir_id != hir::DUMMY_HIR_ID {
|
||||||
let def = cx.tcx.named_region(self.hir_id);
|
let def = cx.tcx.named_region(self.hir_id);
|
||||||
match def {
|
match def {
|
||||||
|
@ -1239,7 +1239,7 @@ impl Clean<Lifetime> for hir::Lifetime {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Clean<Lifetime> for hir::GenericParam {
|
impl Clean<Lifetime> for hir::GenericParam {
|
||||||
fn clean(&self, _: &DocContext<'_, '_, '_>) -> Lifetime {
|
fn clean(&self, _: &DocContext<'_>) -> Lifetime {
|
||||||
match self.kind {
|
match self.kind {
|
||||||
hir::GenericParamKind::Lifetime { .. } => {
|
hir::GenericParamKind::Lifetime { .. } => {
|
||||||
if self.bounds.len() > 0 {
|
if self.bounds.len() > 0 {
|
||||||
|
@ -1263,7 +1263,7 @@ impl Clean<Lifetime> for hir::GenericParam {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Clean<Constant> for hir::ConstArg {
|
impl Clean<Constant> for hir::ConstArg {
|
||||||
fn clean(&self, cx: &DocContext<'_, '_, '_>) -> Constant {
|
fn clean(&self, cx: &DocContext<'_>) -> Constant {
|
||||||
Constant {
|
Constant {
|
||||||
type_: cx.tcx.type_of(cx.tcx.hir().body_owner_def_id(self.value.body)).clean(cx),
|
type_: cx.tcx.type_of(cx.tcx.hir().body_owner_def_id(self.value.body)).clean(cx),
|
||||||
expr: print_const_expr(cx, self.value.body),
|
expr: print_const_expr(cx, self.value.body),
|
||||||
|
@ -1272,13 +1272,13 @@ impl Clean<Constant> for hir::ConstArg {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'tcx> Clean<Lifetime> for ty::GenericParamDef {
|
impl<'tcx> Clean<Lifetime> for ty::GenericParamDef {
|
||||||
fn clean(&self, _cx: &DocContext<'_, '_, '_>) -> Lifetime {
|
fn clean(&self, _cx: &DocContext<'_>) -> Lifetime {
|
||||||
Lifetime(self.name.to_string())
|
Lifetime(self.name.to_string())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Clean<Option<Lifetime>> for ty::RegionKind {
|
impl Clean<Option<Lifetime>> for ty::RegionKind {
|
||||||
fn clean(&self, cx: &DocContext<'_, '_, '_>) -> Option<Lifetime> {
|
fn clean(&self, cx: &DocContext<'_>) -> Option<Lifetime> {
|
||||||
match *self {
|
match *self {
|
||||||
ty::ReStatic => Some(Lifetime::statik()),
|
ty::ReStatic => Some(Lifetime::statik()),
|
||||||
ty::ReLateBound(_, ty::BrNamed(_, name)) => Some(Lifetime(name.to_string())),
|
ty::ReLateBound(_, ty::BrNamed(_, name)) => Some(Lifetime(name.to_string())),
|
||||||
|
@ -1307,7 +1307,7 @@ pub enum WherePredicate {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Clean<WherePredicate> for hir::WherePredicate {
|
impl Clean<WherePredicate> for hir::WherePredicate {
|
||||||
fn clean(&self, cx: &DocContext<'_, '_, '_>) -> WherePredicate {
|
fn clean(&self, cx: &DocContext<'_>) -> WherePredicate {
|
||||||
match *self {
|
match *self {
|
||||||
hir::WherePredicate::BoundPredicate(ref wbp) => {
|
hir::WherePredicate::BoundPredicate(ref wbp) => {
|
||||||
WherePredicate::BoundPredicate {
|
WherePredicate::BoundPredicate {
|
||||||
|
@ -1334,7 +1334,7 @@ impl Clean<WherePredicate> for hir::WherePredicate {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> Clean<Option<WherePredicate>> for ty::Predicate<'a> {
|
impl<'a> Clean<Option<WherePredicate>> for ty::Predicate<'a> {
|
||||||
fn clean(&self, cx: &DocContext<'_, '_, '_>) -> Option<WherePredicate> {
|
fn clean(&self, cx: &DocContext<'_>) -> Option<WherePredicate> {
|
||||||
use rustc::ty::Predicate;
|
use rustc::ty::Predicate;
|
||||||
|
|
||||||
match *self {
|
match *self {
|
||||||
|
@ -1353,7 +1353,7 @@ impl<'a> Clean<Option<WherePredicate>> for ty::Predicate<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> Clean<WherePredicate> for ty::TraitPredicate<'a> {
|
impl<'a> Clean<WherePredicate> for ty::TraitPredicate<'a> {
|
||||||
fn clean(&self, cx: &DocContext<'_, '_, '_>) -> WherePredicate {
|
fn clean(&self, cx: &DocContext<'_>) -> WherePredicate {
|
||||||
WherePredicate::BoundPredicate {
|
WherePredicate::BoundPredicate {
|
||||||
ty: self.trait_ref.self_ty().clean(cx),
|
ty: self.trait_ref.self_ty().clean(cx),
|
||||||
bounds: vec![self.trait_ref.clean(cx)]
|
bounds: vec![self.trait_ref.clean(cx)]
|
||||||
|
@ -1362,7 +1362,7 @@ impl<'a> Clean<WherePredicate> for ty::TraitPredicate<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'tcx> Clean<WherePredicate> for ty::SubtypePredicate<'tcx> {
|
impl<'tcx> Clean<WherePredicate> for ty::SubtypePredicate<'tcx> {
|
||||||
fn clean(&self, _cx: &DocContext<'_, '_, '_>) -> WherePredicate {
|
fn clean(&self, _cx: &DocContext<'_>) -> WherePredicate {
|
||||||
panic!("subtype predicates are an internal rustc artifact \
|
panic!("subtype predicates are an internal rustc artifact \
|
||||||
and should not be seen by rustdoc")
|
and should not be seen by rustdoc")
|
||||||
}
|
}
|
||||||
|
@ -1371,7 +1371,7 @@ impl<'tcx> Clean<WherePredicate> for ty::SubtypePredicate<'tcx> {
|
||||||
impl<'tcx> Clean<Option<WherePredicate>> for
|
impl<'tcx> Clean<Option<WherePredicate>> for
|
||||||
ty::OutlivesPredicate<ty::Region<'tcx>,ty::Region<'tcx>> {
|
ty::OutlivesPredicate<ty::Region<'tcx>,ty::Region<'tcx>> {
|
||||||
|
|
||||||
fn clean(&self, cx: &DocContext<'_, '_, '_>) -> Option<WherePredicate> {
|
fn clean(&self, cx: &DocContext<'_>) -> Option<WherePredicate> {
|
||||||
let ty::OutlivesPredicate(ref a, ref b) = *self;
|
let ty::OutlivesPredicate(ref a, ref b) = *self;
|
||||||
|
|
||||||
match (a, b) {
|
match (a, b) {
|
||||||
|
@ -1389,7 +1389,7 @@ impl<'tcx> Clean<Option<WherePredicate>> for
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'tcx> Clean<Option<WherePredicate>> for ty::OutlivesPredicate<Ty<'tcx>, ty::Region<'tcx>> {
|
impl<'tcx> Clean<Option<WherePredicate>> for ty::OutlivesPredicate<Ty<'tcx>, ty::Region<'tcx>> {
|
||||||
fn clean(&self, cx: &DocContext<'_, '_, '_>) -> Option<WherePredicate> {
|
fn clean(&self, cx: &DocContext<'_>) -> Option<WherePredicate> {
|
||||||
let ty::OutlivesPredicate(ref ty, ref lt) = *self;
|
let ty::OutlivesPredicate(ref ty, ref lt) = *self;
|
||||||
|
|
||||||
match lt {
|
match lt {
|
||||||
|
@ -1405,7 +1405,7 @@ impl<'tcx> Clean<Option<WherePredicate>> for ty::OutlivesPredicate<Ty<'tcx>, ty:
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'tcx> Clean<WherePredicate> for ty::ProjectionPredicate<'tcx> {
|
impl<'tcx> Clean<WherePredicate> for ty::ProjectionPredicate<'tcx> {
|
||||||
fn clean(&self, cx: &DocContext<'_, '_, '_>) -> WherePredicate {
|
fn clean(&self, cx: &DocContext<'_>) -> WherePredicate {
|
||||||
WherePredicate::EqPredicate {
|
WherePredicate::EqPredicate {
|
||||||
lhs: self.projection_ty.clean(cx),
|
lhs: self.projection_ty.clean(cx),
|
||||||
rhs: self.ty.clean(cx)
|
rhs: self.ty.clean(cx)
|
||||||
|
@ -1414,7 +1414,7 @@ impl<'tcx> Clean<WherePredicate> for ty::ProjectionPredicate<'tcx> {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'tcx> Clean<Type> for ty::ProjectionTy<'tcx> {
|
impl<'tcx> Clean<Type> for ty::ProjectionTy<'tcx> {
|
||||||
fn clean(&self, cx: &DocContext<'_, '_, '_>) -> Type {
|
fn clean(&self, cx: &DocContext<'_>) -> Type {
|
||||||
let trait_ = match self.trait_ref(cx.tcx).clean(cx) {
|
let trait_ = match self.trait_ref(cx.tcx).clean(cx) {
|
||||||
GenericBound::TraitBound(t, _) => t.trait_,
|
GenericBound::TraitBound(t, _) => t.trait_,
|
||||||
GenericBound::Outlives(_) => panic!("cleaning a trait got a lifetime"),
|
GenericBound::Outlives(_) => panic!("cleaning a trait got a lifetime"),
|
||||||
|
@ -1462,7 +1462,7 @@ impl GenericParamDef {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'tcx> Clean<GenericParamDef> for ty::GenericParamDef {
|
impl<'tcx> Clean<GenericParamDef> for ty::GenericParamDef {
|
||||||
fn clean(&self, cx: &DocContext<'_, '_, '_>) -> GenericParamDef {
|
fn clean(&self, cx: &DocContext<'_>) -> GenericParamDef {
|
||||||
let (name, kind) = match self.kind {
|
let (name, kind) = match self.kind {
|
||||||
ty::GenericParamDefKind::Lifetime => {
|
ty::GenericParamDefKind::Lifetime => {
|
||||||
(self.name.to_string(), GenericParamDefKind::Lifetime)
|
(self.name.to_string(), GenericParamDefKind::Lifetime)
|
||||||
|
@ -1495,7 +1495,7 @@ impl<'tcx> Clean<GenericParamDef> for ty::GenericParamDef {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Clean<GenericParamDef> for hir::GenericParam {
|
impl Clean<GenericParamDef> for hir::GenericParam {
|
||||||
fn clean(&self, cx: &DocContext<'_, '_, '_>) -> GenericParamDef {
|
fn clean(&self, cx: &DocContext<'_>) -> GenericParamDef {
|
||||||
let (name, kind) = match self.kind {
|
let (name, kind) = match self.kind {
|
||||||
hir::GenericParamKind::Lifetime { .. } => {
|
hir::GenericParamKind::Lifetime { .. } => {
|
||||||
let name = if self.bounds.len() > 0 {
|
let name = if self.bounds.len() > 0 {
|
||||||
|
@ -1545,7 +1545,7 @@ pub struct Generics {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Clean<Generics> for hir::Generics {
|
impl Clean<Generics> for hir::Generics {
|
||||||
fn clean(&self, cx: &DocContext<'_, '_, '_>) -> Generics {
|
fn clean(&self, cx: &DocContext<'_>) -> Generics {
|
||||||
// Synthetic type-parameters are inserted after normal ones.
|
// Synthetic type-parameters are inserted after normal ones.
|
||||||
// In order for normal parameters to be able to refer to synthetic ones,
|
// In order for normal parameters to be able to refer to synthetic ones,
|
||||||
// scans them first.
|
// scans them first.
|
||||||
|
@ -1615,7 +1615,7 @@ impl Clean<Generics> for hir::Generics {
|
||||||
|
|
||||||
impl<'a, 'tcx> Clean<Generics> for (&'a ty::Generics,
|
impl<'a, 'tcx> Clean<Generics> for (&'a ty::Generics,
|
||||||
&'a Lrc<ty::GenericPredicates<'tcx>>) {
|
&'a Lrc<ty::GenericPredicates<'tcx>>) {
|
||||||
fn clean(&self, cx: &DocContext<'_, '_, '_>) -> Generics {
|
fn clean(&self, cx: &DocContext<'_>) -> Generics {
|
||||||
use self::WherePredicate as WP;
|
use self::WherePredicate as WP;
|
||||||
|
|
||||||
let (gens, preds) = *self;
|
let (gens, preds) = *self;
|
||||||
|
@ -1702,7 +1702,7 @@ pub struct Method {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> Clean<Method> for (&'a hir::MethodSig, &'a hir::Generics, hir::BodyId) {
|
impl<'a> Clean<Method> for (&'a hir::MethodSig, &'a hir::Generics, hir::BodyId) {
|
||||||
fn clean(&self, cx: &DocContext<'_, '_, '_>) -> Method {
|
fn clean(&self, cx: &DocContext<'_>) -> Method {
|
||||||
let (generics, decl) = enter_impl_trait(cx, || {
|
let (generics, decl) = enter_impl_trait(cx, || {
|
||||||
(self.1.clean(cx), (&*self.0.decl, self.2).clean(cx))
|
(self.1.clean(cx), (&*self.0.decl, self.2).clean(cx))
|
||||||
});
|
});
|
||||||
|
@ -1729,7 +1729,7 @@ pub struct Function {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Clean<Item> for doctree::Function {
|
impl Clean<Item> for doctree::Function {
|
||||||
fn clean(&self, cx: &DocContext<'_, '_, '_>) -> Item {
|
fn clean(&self, cx: &DocContext<'_>) -> Item {
|
||||||
let (generics, decl) = enter_impl_trait(cx, || {
|
let (generics, decl) = enter_impl_trait(cx, || {
|
||||||
(self.generics.clean(cx), (&self.decl, self.body).clean(cx))
|
(self.generics.clean(cx), (&self.decl, self.body).clean(cx))
|
||||||
});
|
});
|
||||||
|
@ -1800,7 +1800,7 @@ pub struct Arguments {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> Clean<Arguments> for (&'a [hir::Ty], &'a [ast::Ident]) {
|
impl<'a> Clean<Arguments> for (&'a [hir::Ty], &'a [ast::Ident]) {
|
||||||
fn clean(&self, cx: &DocContext<'_, '_, '_>) -> Arguments {
|
fn clean(&self, cx: &DocContext<'_>) -> Arguments {
|
||||||
Arguments {
|
Arguments {
|
||||||
values: self.0.iter().enumerate().map(|(i, ty)| {
|
values: self.0.iter().enumerate().map(|(i, ty)| {
|
||||||
let mut name = self.1.get(i).map(|ident| ident.to_string())
|
let mut name = self.1.get(i).map(|ident| ident.to_string())
|
||||||
|
@ -1818,7 +1818,7 @@ impl<'a> Clean<Arguments> for (&'a [hir::Ty], &'a [ast::Ident]) {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> Clean<Arguments> for (&'a [hir::Ty], hir::BodyId) {
|
impl<'a> Clean<Arguments> for (&'a [hir::Ty], hir::BodyId) {
|
||||||
fn clean(&self, cx: &DocContext<'_, '_, '_>) -> Arguments {
|
fn clean(&self, cx: &DocContext<'_>) -> Arguments {
|
||||||
let body = cx.tcx.hir().body(self.1);
|
let body = cx.tcx.hir().body(self.1);
|
||||||
|
|
||||||
Arguments {
|
Arguments {
|
||||||
|
@ -1835,7 +1835,7 @@ impl<'a> Clean<Arguments> for (&'a [hir::Ty], hir::BodyId) {
|
||||||
impl<'a, A: Copy> Clean<FnDecl> for (&'a hir::FnDecl, A)
|
impl<'a, A: Copy> Clean<FnDecl> for (&'a hir::FnDecl, A)
|
||||||
where (&'a [hir::Ty], A): Clean<Arguments>
|
where (&'a [hir::Ty], A): Clean<Arguments>
|
||||||
{
|
{
|
||||||
fn clean(&self, cx: &DocContext<'_, '_, '_>) -> FnDecl {
|
fn clean(&self, cx: &DocContext<'_>) -> FnDecl {
|
||||||
FnDecl {
|
FnDecl {
|
||||||
inputs: (&self.0.inputs[..], self.1).clean(cx),
|
inputs: (&self.0.inputs[..], self.1).clean(cx),
|
||||||
output: self.0.output.clean(cx),
|
output: self.0.output.clean(cx),
|
||||||
|
@ -1845,7 +1845,7 @@ impl<'a, A: Copy> Clean<FnDecl> for (&'a hir::FnDecl, A)
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a, 'tcx> Clean<FnDecl> for (DefId, ty::PolyFnSig<'tcx>) {
|
impl<'a, 'tcx> Clean<FnDecl> for (DefId, ty::PolyFnSig<'tcx>) {
|
||||||
fn clean(&self, cx: &DocContext<'_, '_, '_>) -> FnDecl {
|
fn clean(&self, cx: &DocContext<'_>) -> FnDecl {
|
||||||
let (did, sig) = *self;
|
let (did, sig) = *self;
|
||||||
let mut names = if cx.tcx.hir().as_local_hir_id(did).is_some() {
|
let mut names = if cx.tcx.hir().as_local_hir_id(did).is_some() {
|
||||||
vec![].into_iter()
|
vec![].into_iter()
|
||||||
|
@ -1905,7 +1905,7 @@ pub enum FunctionRetTy {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Clean<FunctionRetTy> for hir::FunctionRetTy {
|
impl Clean<FunctionRetTy> for hir::FunctionRetTy {
|
||||||
fn clean(&self, cx: &DocContext<'_, '_, '_>) -> FunctionRetTy {
|
fn clean(&self, cx: &DocContext<'_>) -> FunctionRetTy {
|
||||||
match *self {
|
match *self {
|
||||||
hir::Return(ref typ) => Return(typ.clean(cx)),
|
hir::Return(ref typ) => Return(typ.clean(cx)),
|
||||||
hir::DefaultReturn(..) => DefaultReturn,
|
hir::DefaultReturn(..) => DefaultReturn,
|
||||||
|
@ -1934,7 +1934,7 @@ pub struct Trait {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Clean<Item> for doctree::Trait {
|
impl Clean<Item> for doctree::Trait {
|
||||||
fn clean(&self, cx: &DocContext<'_, '_, '_>) -> Item {
|
fn clean(&self, cx: &DocContext<'_>) -> Item {
|
||||||
let attrs = self.attrs.clean(cx);
|
let attrs = self.attrs.clean(cx);
|
||||||
let is_spotlight = attrs.has_doc_flag("spotlight");
|
let is_spotlight = attrs.has_doc_flag("spotlight");
|
||||||
Item {
|
Item {
|
||||||
|
@ -1965,7 +1965,7 @@ pub struct TraitAlias {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Clean<Item> for doctree::TraitAlias {
|
impl Clean<Item> for doctree::TraitAlias {
|
||||||
fn clean(&self, cx: &DocContext<'_, '_, '_>) -> Item {
|
fn clean(&self, cx: &DocContext<'_>) -> Item {
|
||||||
let attrs = self.attrs.clean(cx);
|
let attrs = self.attrs.clean(cx);
|
||||||
Item {
|
Item {
|
||||||
name: Some(self.name.clean(cx)),
|
name: Some(self.name.clean(cx)),
|
||||||
|
@ -1984,7 +1984,7 @@ impl Clean<Item> for doctree::TraitAlias {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Clean<bool> for hir::IsAuto {
|
impl Clean<bool> for hir::IsAuto {
|
||||||
fn clean(&self, _: &DocContext<'_, '_, '_>) -> bool {
|
fn clean(&self, _: &DocContext<'_>) -> bool {
|
||||||
match *self {
|
match *self {
|
||||||
hir::IsAuto::Yes => true,
|
hir::IsAuto::Yes => true,
|
||||||
hir::IsAuto::No => false,
|
hir::IsAuto::No => false,
|
||||||
|
@ -1993,13 +1993,13 @@ impl Clean<bool> for hir::IsAuto {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Clean<Type> for hir::TraitRef {
|
impl Clean<Type> for hir::TraitRef {
|
||||||
fn clean(&self, cx: &DocContext<'_, '_, '_>) -> Type {
|
fn clean(&self, cx: &DocContext<'_>) -> Type {
|
||||||
resolve_type(cx, self.path.clean(cx), self.hir_ref_id)
|
resolve_type(cx, self.path.clean(cx), self.hir_ref_id)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Clean<PolyTrait> for hir::PolyTraitRef {
|
impl Clean<PolyTrait> for hir::PolyTraitRef {
|
||||||
fn clean(&self, cx: &DocContext<'_, '_, '_>) -> PolyTrait {
|
fn clean(&self, cx: &DocContext<'_>) -> PolyTrait {
|
||||||
PolyTrait {
|
PolyTrait {
|
||||||
trait_: self.trait_ref.clean(cx),
|
trait_: self.trait_ref.clean(cx),
|
||||||
generic_params: self.bound_generic_params.clean(cx)
|
generic_params: self.bound_generic_params.clean(cx)
|
||||||
|
@ -2008,7 +2008,7 @@ impl Clean<PolyTrait> for hir::PolyTraitRef {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Clean<Item> for hir::TraitItem {
|
impl Clean<Item> for hir::TraitItem {
|
||||||
fn clean(&self, cx: &DocContext<'_, '_, '_>) -> Item {
|
fn clean(&self, cx: &DocContext<'_>) -> Item {
|
||||||
let inner = match self.node {
|
let inner = match self.node {
|
||||||
hir::TraitItemKind::Const(ref ty, default) => {
|
hir::TraitItemKind::Const(ref ty, default) => {
|
||||||
AssociatedConstItem(ty.clean(cx),
|
AssociatedConstItem(ty.clean(cx),
|
||||||
|
@ -2046,7 +2046,7 @@ impl Clean<Item> for hir::TraitItem {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Clean<Item> for hir::ImplItem {
|
impl Clean<Item> for hir::ImplItem {
|
||||||
fn clean(&self, cx: &DocContext<'_, '_, '_>) -> Item {
|
fn clean(&self, cx: &DocContext<'_>) -> Item {
|
||||||
let inner = match self.node {
|
let inner = match self.node {
|
||||||
hir::ImplItemKind::Const(ref ty, expr) => {
|
hir::ImplItemKind::Const(ref ty, expr) => {
|
||||||
AssociatedConstItem(ty.clean(cx),
|
AssociatedConstItem(ty.clean(cx),
|
||||||
|
@ -2079,7 +2079,7 @@ impl Clean<Item> for hir::ImplItem {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'tcx> Clean<Item> for ty::AssociatedItem {
|
impl<'tcx> Clean<Item> for ty::AssociatedItem {
|
||||||
fn clean(&self, cx: &DocContext<'_, '_, '_>) -> Item {
|
fn clean(&self, cx: &DocContext<'_>) -> Item {
|
||||||
let inner = match self.kind {
|
let inner = match self.kind {
|
||||||
ty::AssociatedKind::Const => {
|
ty::AssociatedKind::Const => {
|
||||||
let ty = cx.tcx.type_of(self.def_id);
|
let ty = cx.tcx.type_of(self.def_id);
|
||||||
|
@ -2524,7 +2524,7 @@ impl From<ast::FloatTy> for PrimitiveType {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Clean<Type> for hir::Ty {
|
impl Clean<Type> for hir::Ty {
|
||||||
fn clean(&self, cx: &DocContext<'_, '_, '_>) -> Type {
|
fn clean(&self, cx: &DocContext<'_>) -> Type {
|
||||||
use rustc::hir::*;
|
use rustc::hir::*;
|
||||||
|
|
||||||
match self.node {
|
match self.node {
|
||||||
|
@ -2726,7 +2726,7 @@ impl Clean<Type> for hir::Ty {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'tcx> Clean<Type> for Ty<'tcx> {
|
impl<'tcx> Clean<Type> for Ty<'tcx> {
|
||||||
fn clean(&self, cx: &DocContext<'_, '_, '_>) -> Type {
|
fn clean(&self, cx: &DocContext<'_>) -> Type {
|
||||||
match self.sty {
|
match self.sty {
|
||||||
ty::Never => Never,
|
ty::Never => Never,
|
||||||
ty::Bool => Primitive(PrimitiveType::Bool),
|
ty::Bool => Primitive(PrimitiveType::Bool),
|
||||||
|
@ -2921,7 +2921,7 @@ impl<'tcx> Clean<Type> for Ty<'tcx> {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Clean<Item> for hir::StructField {
|
impl Clean<Item> for hir::StructField {
|
||||||
fn clean(&self, cx: &DocContext<'_, '_, '_>) -> Item {
|
fn clean(&self, cx: &DocContext<'_>) -> Item {
|
||||||
let local_did = cx.tcx.hir().local_def_id_from_hir_id(self.hir_id);
|
let local_did = cx.tcx.hir().local_def_id_from_hir_id(self.hir_id);
|
||||||
|
|
||||||
Item {
|
Item {
|
||||||
|
@ -2938,7 +2938,7 @@ impl Clean<Item> for hir::StructField {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'tcx> Clean<Item> for ty::FieldDef {
|
impl<'tcx> Clean<Item> for ty::FieldDef {
|
||||||
fn clean(&self, cx: &DocContext<'_, '_, '_>) -> Item {
|
fn clean(&self, cx: &DocContext<'_>) -> Item {
|
||||||
Item {
|
Item {
|
||||||
name: Some(self.ident.name).clean(cx),
|
name: Some(self.ident.name).clean(cx),
|
||||||
attrs: cx.tcx.get_attrs(self.did).clean(cx),
|
attrs: cx.tcx.get_attrs(self.did).clean(cx),
|
||||||
|
@ -2961,7 +2961,7 @@ pub enum Visibility {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Clean<Option<Visibility>> for hir::Visibility {
|
impl Clean<Option<Visibility>> for hir::Visibility {
|
||||||
fn clean(&self, cx: &DocContext<'_, '_, '_>) -> Option<Visibility> {
|
fn clean(&self, cx: &DocContext<'_>) -> Option<Visibility> {
|
||||||
Some(match self.node {
|
Some(match self.node {
|
||||||
hir::VisibilityKind::Public => Visibility::Public,
|
hir::VisibilityKind::Public => Visibility::Public,
|
||||||
hir::VisibilityKind::Inherited => Visibility::Inherited,
|
hir::VisibilityKind::Inherited => Visibility::Inherited,
|
||||||
|
@ -2976,7 +2976,7 @@ impl Clean<Option<Visibility>> for hir::Visibility {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Clean<Option<Visibility>> for ty::Visibility {
|
impl Clean<Option<Visibility>> for ty::Visibility {
|
||||||
fn clean(&self, _: &DocContext<'_, '_, '_>) -> Option<Visibility> {
|
fn clean(&self, _: &DocContext<'_>) -> Option<Visibility> {
|
||||||
Some(if *self == ty::Visibility::Public { Public } else { Inherited })
|
Some(if *self == ty::Visibility::Public { Public } else { Inherited })
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -2998,7 +2998,7 @@ pub struct Union {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Clean<Item> for doctree::Struct {
|
impl Clean<Item> for doctree::Struct {
|
||||||
fn clean(&self, cx: &DocContext<'_, '_, '_>) -> Item {
|
fn clean(&self, cx: &DocContext<'_>) -> Item {
|
||||||
Item {
|
Item {
|
||||||
name: Some(self.name.clean(cx)),
|
name: Some(self.name.clean(cx)),
|
||||||
attrs: self.attrs.clean(cx),
|
attrs: self.attrs.clean(cx),
|
||||||
|
@ -3018,7 +3018,7 @@ impl Clean<Item> for doctree::Struct {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Clean<Item> for doctree::Union {
|
impl Clean<Item> for doctree::Union {
|
||||||
fn clean(&self, cx: &DocContext<'_, '_, '_>) -> Item {
|
fn clean(&self, cx: &DocContext<'_>) -> Item {
|
||||||
Item {
|
Item {
|
||||||
name: Some(self.name.clean(cx)),
|
name: Some(self.name.clean(cx)),
|
||||||
attrs: self.attrs.clean(cx),
|
attrs: self.attrs.clean(cx),
|
||||||
|
@ -3048,7 +3048,7 @@ pub struct VariantStruct {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Clean<VariantStruct> for ::rustc::hir::VariantData {
|
impl Clean<VariantStruct> for ::rustc::hir::VariantData {
|
||||||
fn clean(&self, cx: &DocContext<'_, '_, '_>) -> VariantStruct {
|
fn clean(&self, cx: &DocContext<'_>) -> VariantStruct {
|
||||||
VariantStruct {
|
VariantStruct {
|
||||||
struct_type: doctree::struct_type_from_def(self),
|
struct_type: doctree::struct_type_from_def(self),
|
||||||
fields: self.fields().iter().map(|x| x.clean(cx)).collect(),
|
fields: self.fields().iter().map(|x| x.clean(cx)).collect(),
|
||||||
|
@ -3065,7 +3065,7 @@ pub struct Enum {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Clean<Item> for doctree::Enum {
|
impl Clean<Item> for doctree::Enum {
|
||||||
fn clean(&self, cx: &DocContext<'_, '_, '_>) -> Item {
|
fn clean(&self, cx: &DocContext<'_>) -> Item {
|
||||||
Item {
|
Item {
|
||||||
name: Some(self.name.clean(cx)),
|
name: Some(self.name.clean(cx)),
|
||||||
attrs: self.attrs.clean(cx),
|
attrs: self.attrs.clean(cx),
|
||||||
|
@ -3089,7 +3089,7 @@ pub struct Variant {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Clean<Item> for doctree::Variant {
|
impl Clean<Item> for doctree::Variant {
|
||||||
fn clean(&self, cx: &DocContext<'_, '_, '_>) -> Item {
|
fn clean(&self, cx: &DocContext<'_>) -> Item {
|
||||||
Item {
|
Item {
|
||||||
name: Some(self.name.clean(cx)),
|
name: Some(self.name.clean(cx)),
|
||||||
attrs: self.attrs.clean(cx),
|
attrs: self.attrs.clean(cx),
|
||||||
|
@ -3106,7 +3106,7 @@ impl Clean<Item> for doctree::Variant {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'tcx> Clean<Item> for ty::VariantDef {
|
impl<'tcx> Clean<Item> for ty::VariantDef {
|
||||||
fn clean(&self, cx: &DocContext<'_, '_, '_>) -> Item {
|
fn clean(&self, cx: &DocContext<'_>) -> Item {
|
||||||
let kind = match self.ctor_kind {
|
let kind = match self.ctor_kind {
|
||||||
CtorKind::Const => VariantKind::CLike,
|
CtorKind::Const => VariantKind::CLike,
|
||||||
CtorKind::Fn => {
|
CtorKind::Fn => {
|
||||||
|
@ -3154,7 +3154,7 @@ pub enum VariantKind {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Clean<VariantKind> for hir::VariantData {
|
impl Clean<VariantKind> for hir::VariantData {
|
||||||
fn clean(&self, cx: &DocContext<'_, '_, '_>) -> VariantKind {
|
fn clean(&self, cx: &DocContext<'_>) -> VariantKind {
|
||||||
if self.is_struct() {
|
if self.is_struct() {
|
||||||
VariantKind::Struct(self.clean(cx))
|
VariantKind::Struct(self.clean(cx))
|
||||||
} else if self.is_unit() {
|
} else if self.is_unit() {
|
||||||
|
@ -3185,7 +3185,7 @@ impl Span {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Clean<Span> for syntax_pos::Span {
|
impl Clean<Span> for syntax_pos::Span {
|
||||||
fn clean(&self, cx: &DocContext<'_, '_, '_>) -> Span {
|
fn clean(&self, cx: &DocContext<'_>) -> Span {
|
||||||
if self.is_dummy() {
|
if self.is_dummy() {
|
||||||
return Span::empty();
|
return Span::empty();
|
||||||
}
|
}
|
||||||
|
@ -3218,7 +3218,7 @@ impl Path {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Clean<Path> for hir::Path {
|
impl Clean<Path> for hir::Path {
|
||||||
fn clean(&self, cx: &DocContext<'_, '_, '_>) -> Path {
|
fn clean(&self, cx: &DocContext<'_>) -> Path {
|
||||||
Path {
|
Path {
|
||||||
global: self.is_global(),
|
global: self.is_global(),
|
||||||
def: self.def,
|
def: self.def,
|
||||||
|
@ -3241,7 +3241,7 @@ pub enum GenericArgs {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Clean<GenericArgs> for hir::GenericArgs {
|
impl Clean<GenericArgs> for hir::GenericArgs {
|
||||||
fn clean(&self, cx: &DocContext<'_, '_, '_>) -> GenericArgs {
|
fn clean(&self, cx: &DocContext<'_>) -> GenericArgs {
|
||||||
if self.parenthesized {
|
if self.parenthesized {
|
||||||
let output = self.bindings[0].ty.clean(cx);
|
let output = self.bindings[0].ty.clean(cx);
|
||||||
GenericArgs::Parenthesized {
|
GenericArgs::Parenthesized {
|
||||||
|
@ -3283,7 +3283,7 @@ pub struct PathSegment {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Clean<PathSegment> for hir::PathSegment {
|
impl Clean<PathSegment> for hir::PathSegment {
|
||||||
fn clean(&self, cx: &DocContext<'_, '_, '_>) -> PathSegment {
|
fn clean(&self, cx: &DocContext<'_>) -> PathSegment {
|
||||||
PathSegment {
|
PathSegment {
|
||||||
name: self.ident.name.clean(cx),
|
name: self.ident.name.clean(cx),
|
||||||
args: self.with_generic_args(|generic_args| generic_args.clean(cx))
|
args: self.with_generic_args(|generic_args| generic_args.clean(cx))
|
||||||
|
@ -3355,21 +3355,21 @@ fn qpath_to_string(p: &hir::QPath) -> String {
|
||||||
|
|
||||||
impl Clean<String> for Ident {
|
impl Clean<String> for Ident {
|
||||||
#[inline]
|
#[inline]
|
||||||
fn clean(&self, cx: &DocContext<'_, '_, '_>) -> String {
|
fn clean(&self, cx: &DocContext<'_>) -> String {
|
||||||
self.name.clean(cx)
|
self.name.clean(cx)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Clean<String> for ast::Name {
|
impl Clean<String> for ast::Name {
|
||||||
#[inline]
|
#[inline]
|
||||||
fn clean(&self, _: &DocContext<'_, '_, '_>) -> String {
|
fn clean(&self, _: &DocContext<'_>) -> String {
|
||||||
self.to_string()
|
self.to_string()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Clean<String> for InternedString {
|
impl Clean<String> for InternedString {
|
||||||
#[inline]
|
#[inline]
|
||||||
fn clean(&self, _: &DocContext<'_, '_, '_>) -> String {
|
fn clean(&self, _: &DocContext<'_>) -> String {
|
||||||
self.to_string()
|
self.to_string()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -3381,7 +3381,7 @@ pub struct Typedef {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Clean<Item> for doctree::Typedef {
|
impl Clean<Item> for doctree::Typedef {
|
||||||
fn clean(&self, cx: &DocContext<'_, '_, '_>) -> Item {
|
fn clean(&self, cx: &DocContext<'_>) -> Item {
|
||||||
Item {
|
Item {
|
||||||
name: Some(self.name.clean(cx)),
|
name: Some(self.name.clean(cx)),
|
||||||
attrs: self.attrs.clean(cx),
|
attrs: self.attrs.clean(cx),
|
||||||
|
@ -3405,7 +3405,7 @@ pub struct Existential {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Clean<Item> for doctree::Existential {
|
impl Clean<Item> for doctree::Existential {
|
||||||
fn clean(&self, cx: &DocContext<'_, '_, '_>) -> Item {
|
fn clean(&self, cx: &DocContext<'_>) -> Item {
|
||||||
Item {
|
Item {
|
||||||
name: Some(self.name.clean(cx)),
|
name: Some(self.name.clean(cx)),
|
||||||
attrs: self.attrs.clean(cx),
|
attrs: self.attrs.clean(cx),
|
||||||
|
@ -3431,7 +3431,7 @@ pub struct BareFunctionDecl {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Clean<BareFunctionDecl> for hir::BareFnTy {
|
impl Clean<BareFunctionDecl> for hir::BareFnTy {
|
||||||
fn clean(&self, cx: &DocContext<'_, '_, '_>) -> BareFunctionDecl {
|
fn clean(&self, cx: &DocContext<'_>) -> BareFunctionDecl {
|
||||||
let (generic_params, decl) = enter_impl_trait(cx, || {
|
let (generic_params, decl) = enter_impl_trait(cx, || {
|
||||||
(self.generic_params.clean(cx), (&*self.decl, &self.arg_names[..]).clean(cx))
|
(self.generic_params.clean(cx), (&*self.decl, &self.arg_names[..]).clean(cx))
|
||||||
});
|
});
|
||||||
|
@ -3455,7 +3455,7 @@ pub struct Static {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Clean<Item> for doctree::Static {
|
impl Clean<Item> for doctree::Static {
|
||||||
fn clean(&self, cx: &DocContext<'_, '_, '_>) -> Item {
|
fn clean(&self, cx: &DocContext<'_>) -> Item {
|
||||||
debug!("cleaning static {}: {:?}", self.name.clean(cx), self);
|
debug!("cleaning static {}: {:?}", self.name.clean(cx), self);
|
||||||
Item {
|
Item {
|
||||||
name: Some(self.name.clean(cx)),
|
name: Some(self.name.clean(cx)),
|
||||||
|
@ -3481,7 +3481,7 @@ pub struct Constant {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Clean<Item> for doctree::Constant {
|
impl Clean<Item> for doctree::Constant {
|
||||||
fn clean(&self, cx: &DocContext<'_, '_, '_>) -> Item {
|
fn clean(&self, cx: &DocContext<'_>) -> Item {
|
||||||
Item {
|
Item {
|
||||||
name: Some(self.name.clean(cx)),
|
name: Some(self.name.clean(cx)),
|
||||||
attrs: self.attrs.clean(cx),
|
attrs: self.attrs.clean(cx),
|
||||||
|
@ -3505,7 +3505,7 @@ pub enum Mutability {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Clean<Mutability> for hir::Mutability {
|
impl Clean<Mutability> for hir::Mutability {
|
||||||
fn clean(&self, _: &DocContext<'_, '_, '_>) -> Mutability {
|
fn clean(&self, _: &DocContext<'_>) -> Mutability {
|
||||||
match self {
|
match self {
|
||||||
&hir::MutMutable => Mutable,
|
&hir::MutMutable => Mutable,
|
||||||
&hir::MutImmutable => Immutable,
|
&hir::MutImmutable => Immutable,
|
||||||
|
@ -3520,7 +3520,7 @@ pub enum ImplPolarity {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Clean<ImplPolarity> for hir::ImplPolarity {
|
impl Clean<ImplPolarity> for hir::ImplPolarity {
|
||||||
fn clean(&self, _: &DocContext<'_, '_, '_>) -> ImplPolarity {
|
fn clean(&self, _: &DocContext<'_>) -> ImplPolarity {
|
||||||
match self {
|
match self {
|
||||||
&hir::ImplPolarity::Positive => ImplPolarity::Positive,
|
&hir::ImplPolarity::Positive => ImplPolarity::Positive,
|
||||||
&hir::ImplPolarity::Negative => ImplPolarity::Negative,
|
&hir::ImplPolarity::Negative => ImplPolarity::Negative,
|
||||||
|
@ -3542,7 +3542,7 @@ pub struct Impl {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_auto_traits_with_hir_id(
|
pub fn get_auto_traits_with_hir_id(
|
||||||
cx: &DocContext<'_, '_, '_>,
|
cx: &DocContext<'_>,
|
||||||
id: hir::HirId,
|
id: hir::HirId,
|
||||||
name: String
|
name: String
|
||||||
) -> Vec<Item> {
|
) -> Vec<Item> {
|
||||||
|
@ -3551,7 +3551,7 @@ pub fn get_auto_traits_with_hir_id(
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_auto_traits_with_def_id(
|
pub fn get_auto_traits_with_def_id(
|
||||||
cx: &DocContext<'_, '_, '_>,
|
cx: &DocContext<'_>,
|
||||||
id: DefId
|
id: DefId
|
||||||
) -> Vec<Item> {
|
) -> Vec<Item> {
|
||||||
let finder = AutoTraitFinder::new(cx);
|
let finder = AutoTraitFinder::new(cx);
|
||||||
|
@ -3560,7 +3560,7 @@ pub fn get_auto_traits_with_def_id(
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_blanket_impls_with_hir_id(
|
pub fn get_blanket_impls_with_hir_id(
|
||||||
cx: &DocContext<'_, '_, '_>,
|
cx: &DocContext<'_>,
|
||||||
id: hir::HirId,
|
id: hir::HirId,
|
||||||
name: String
|
name: String
|
||||||
) -> Vec<Item> {
|
) -> Vec<Item> {
|
||||||
|
@ -3569,7 +3569,7 @@ pub fn get_blanket_impls_with_hir_id(
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_blanket_impls_with_def_id(
|
pub fn get_blanket_impls_with_def_id(
|
||||||
cx: &DocContext<'_, '_, '_>,
|
cx: &DocContext<'_>,
|
||||||
id: DefId
|
id: DefId
|
||||||
) -> Vec<Item> {
|
) -> Vec<Item> {
|
||||||
let finder = BlanketImplFinder::new(cx);
|
let finder = BlanketImplFinder::new(cx);
|
||||||
|
@ -3578,7 +3578,7 @@ pub fn get_blanket_impls_with_def_id(
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Clean<Vec<Item>> for doctree::Impl {
|
impl Clean<Vec<Item>> for doctree::Impl {
|
||||||
fn clean(&self, cx: &DocContext<'_, '_, '_>) -> Vec<Item> {
|
fn clean(&self, cx: &DocContext<'_>) -> Vec<Item> {
|
||||||
let mut ret = Vec::new();
|
let mut ret = Vec::new();
|
||||||
let trait_ = self.trait_.clean(cx);
|
let trait_ = self.trait_.clean(cx);
|
||||||
let items = self.items.clean(cx);
|
let items = self.items.clean(cx);
|
||||||
|
@ -3620,7 +3620,7 @@ impl Clean<Vec<Item>> for doctree::Impl {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn build_deref_target_impls(cx: &DocContext<'_, '_, '_>,
|
fn build_deref_target_impls(cx: &DocContext<'_>,
|
||||||
items: &[Item],
|
items: &[Item],
|
||||||
ret: &mut Vec<Item>) {
|
ret: &mut Vec<Item>) {
|
||||||
use self::PrimitiveType::*;
|
use self::PrimitiveType::*;
|
||||||
|
@ -3679,7 +3679,7 @@ fn build_deref_target_impls(cx: &DocContext<'_, '_, '_>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Clean<Vec<Item>> for doctree::ExternCrate {
|
impl Clean<Vec<Item>> for doctree::ExternCrate {
|
||||||
fn clean(&self, cx: &DocContext<'_, '_, '_>) -> Vec<Item> {
|
fn clean(&self, cx: &DocContext<'_>) -> Vec<Item> {
|
||||||
|
|
||||||
let please_inline = self.vis.node.is_pub() && self.attrs.iter().any(|a| {
|
let please_inline = self.vis.node.is_pub() && self.attrs.iter().any(|a| {
|
||||||
a.name() == "doc" && match a.meta_item_list() {
|
a.name() == "doc" && match a.meta_item_list() {
|
||||||
|
@ -3715,7 +3715,7 @@ impl Clean<Vec<Item>> for doctree::ExternCrate {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Clean<Vec<Item>> for doctree::Import {
|
impl Clean<Vec<Item>> for doctree::Import {
|
||||||
fn clean(&self, cx: &DocContext<'_, '_, '_>) -> Vec<Item> {
|
fn clean(&self, cx: &DocContext<'_>) -> Vec<Item> {
|
||||||
// We consider inlining the documentation of `pub use` statements, but we
|
// We consider inlining the documentation of `pub use` statements, but we
|
||||||
// forcefully don't inline if this is not public or if the
|
// forcefully don't inline if this is not public or if the
|
||||||
// #[doc(no_inline)] attribute is present.
|
// #[doc(no_inline)] attribute is present.
|
||||||
|
@ -3789,7 +3789,7 @@ pub struct ImportSource {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Clean<Vec<Item>> for hir::ForeignMod {
|
impl Clean<Vec<Item>> for hir::ForeignMod {
|
||||||
fn clean(&self, cx: &DocContext<'_, '_, '_>) -> Vec<Item> {
|
fn clean(&self, cx: &DocContext<'_>) -> Vec<Item> {
|
||||||
let mut items = self.items.clean(cx);
|
let mut items = self.items.clean(cx);
|
||||||
for item in &mut items {
|
for item in &mut items {
|
||||||
if let ForeignFunctionItem(ref mut f) = item.inner {
|
if let ForeignFunctionItem(ref mut f) = item.inner {
|
||||||
|
@ -3801,7 +3801,7 @@ impl Clean<Vec<Item>> for hir::ForeignMod {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Clean<Item> for hir::ForeignItem {
|
impl Clean<Item> for hir::ForeignItem {
|
||||||
fn clean(&self, cx: &DocContext<'_, '_, '_>) -> Item {
|
fn clean(&self, cx: &DocContext<'_>) -> Item {
|
||||||
let inner = match self.node {
|
let inner = match self.node {
|
||||||
hir::ForeignItemKind::Fn(ref decl, ref names, ref generics) => {
|
hir::ForeignItemKind::Fn(ref decl, ref names, ref generics) => {
|
||||||
let (generics, decl) = enter_impl_trait(cx, || {
|
let (generics, decl) = enter_impl_trait(cx, || {
|
||||||
|
@ -3848,11 +3848,11 @@ impl Clean<Item> for hir::ForeignItem {
|
||||||
// Utilities
|
// Utilities
|
||||||
|
|
||||||
pub trait ToSource {
|
pub trait ToSource {
|
||||||
fn to_src(&self, cx: &DocContext<'_, '_, '_>) -> String;
|
fn to_src(&self, cx: &DocContext<'_>) -> String;
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ToSource for syntax_pos::Span {
|
impl ToSource for syntax_pos::Span {
|
||||||
fn to_src(&self, cx: &DocContext<'_, '_, '_>) -> String {
|
fn to_src(&self, cx: &DocContext<'_>) -> String {
|
||||||
debug!("converting span {:?} to snippet", self.clean(cx));
|
debug!("converting span {:?} to snippet", self.clean(cx));
|
||||||
let sn = match cx.sess().source_map().span_to_snippet(*self) {
|
let sn = match cx.sess().source_map().span_to_snippet(*self) {
|
||||||
Ok(x) => x,
|
Ok(x) => x,
|
||||||
|
@ -3899,7 +3899,7 @@ fn name_from_pat(p: &hir::Pat) -> String {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn print_const(cx: &DocContext<'_, '_, '_>, n: ty::LazyConst<'_>) -> String {
|
fn print_const(cx: &DocContext<'_>, n: ty::LazyConst<'_>) -> String {
|
||||||
match n {
|
match n {
|
||||||
ty::LazyConst::Unevaluated(def_id, _) => {
|
ty::LazyConst::Unevaluated(def_id, _) => {
|
||||||
if let Some(hir_id) = cx.tcx.hir().as_local_hir_id(def_id) {
|
if let Some(hir_id) = cx.tcx.hir().as_local_hir_id(def_id) {
|
||||||
|
@ -3921,12 +3921,12 @@ fn print_const(cx: &DocContext<'_, '_, '_>, n: ty::LazyConst<'_>) -> String {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn print_const_expr(cx: &DocContext<'_, '_, '_>, body: hir::BodyId) -> String {
|
fn print_const_expr(cx: &DocContext<'_>, body: hir::BodyId) -> String {
|
||||||
cx.tcx.hir().hir_to_pretty_string(body.hir_id)
|
cx.tcx.hir().hir_to_pretty_string(body.hir_id)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Given a type Path, resolve it to a Type using the TyCtxt
|
/// Given a type Path, resolve it to a Type using the TyCtxt
|
||||||
fn resolve_type(cx: &DocContext<'_, '_, '_>,
|
fn resolve_type(cx: &DocContext<'_>,
|
||||||
path: Path,
|
path: Path,
|
||||||
id: hir::HirId) -> Type {
|
id: hir::HirId) -> Type {
|
||||||
if id == hir::DUMMY_HIR_ID {
|
if id == hir::DUMMY_HIR_ID {
|
||||||
|
@ -3957,7 +3957,7 @@ fn resolve_type(cx: &DocContext<'_, '_, '_>,
|
||||||
ResolvedPath { path: path, typarams: None, did: did, is_generic: is_generic }
|
ResolvedPath { path: path, typarams: None, did: did, is_generic: is_generic }
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn register_def(cx: &DocContext<'_, '_, '_>, def: Def) -> DefId {
|
pub fn register_def(cx: &DocContext<'_>, def: Def) -> DefId {
|
||||||
debug!("register_def({:?})", def);
|
debug!("register_def({:?})", def);
|
||||||
|
|
||||||
let (did, kind) = match def {
|
let (did, kind) = match def {
|
||||||
|
@ -3992,7 +3992,7 @@ pub fn register_def(cx: &DocContext<'_, '_, '_>, def: Def) -> DefId {
|
||||||
did
|
did
|
||||||
}
|
}
|
||||||
|
|
||||||
fn resolve_use_source(cx: &DocContext<'_, '_, '_>, path: Path) -> ImportSource {
|
fn resolve_use_source(cx: &DocContext<'_>, path: Path) -> ImportSource {
|
||||||
ImportSource {
|
ImportSource {
|
||||||
did: if path.def.opt_def_id().is_none() {
|
did: if path.def.opt_def_id().is_none() {
|
||||||
None
|
None
|
||||||
|
@ -4010,7 +4010,7 @@ pub struct Macro {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Clean<Item> for doctree::Macro {
|
impl Clean<Item> for doctree::Macro {
|
||||||
fn clean(&self, cx: &DocContext<'_, '_, '_>) -> Item {
|
fn clean(&self, cx: &DocContext<'_>) -> Item {
|
||||||
let name = self.name.clean(cx);
|
let name = self.name.clean(cx);
|
||||||
Item {
|
Item {
|
||||||
name: Some(name.clone()),
|
name: Some(name.clone()),
|
||||||
|
@ -4039,7 +4039,7 @@ pub struct ProcMacro {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Clean<Item> for doctree::ProcMacro {
|
impl Clean<Item> for doctree::ProcMacro {
|
||||||
fn clean(&self, cx: &DocContext<'_, '_, '_>) -> Item {
|
fn clean(&self, cx: &DocContext<'_>) -> Item {
|
||||||
Item {
|
Item {
|
||||||
name: Some(self.name.clean(cx)),
|
name: Some(self.name.clean(cx)),
|
||||||
attrs: self.attrs.clean(cx),
|
attrs: self.attrs.clean(cx),
|
||||||
|
@ -4073,7 +4073,7 @@ pub struct Deprecation {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Clean<Stability> for attr::Stability {
|
impl Clean<Stability> for attr::Stability {
|
||||||
fn clean(&self, _: &DocContext<'_, '_, '_>) -> Stability {
|
fn clean(&self, _: &DocContext<'_>) -> Stability {
|
||||||
Stability {
|
Stability {
|
||||||
level: stability::StabilityLevel::from_attr_level(&self.level),
|
level: stability::StabilityLevel::from_attr_level(&self.level),
|
||||||
feature: Some(self.feature.to_string()).filter(|f| !f.is_empty()),
|
feature: Some(self.feature.to_string()).filter(|f| !f.is_empty()),
|
||||||
|
@ -4100,13 +4100,13 @@ impl Clean<Stability> for attr::Stability {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> Clean<Stability> for &'a attr::Stability {
|
impl<'a> Clean<Stability> for &'a attr::Stability {
|
||||||
fn clean(&self, dc: &DocContext<'_, '_, '_>) -> Stability {
|
fn clean(&self, dc: &DocContext<'_>) -> Stability {
|
||||||
(**self).clean(dc)
|
(**self).clean(dc)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Clean<Deprecation> for attr::Deprecation {
|
impl Clean<Deprecation> for attr::Deprecation {
|
||||||
fn clean(&self, _: &DocContext<'_, '_, '_>) -> Deprecation {
|
fn clean(&self, _: &DocContext<'_>) -> Deprecation {
|
||||||
Deprecation {
|
Deprecation {
|
||||||
since: self.since.map(|s| s.to_string()).filter(|s| !s.is_empty()),
|
since: self.since.map(|s| s.to_string()).filter(|s| !s.is_empty()),
|
||||||
note: self.note.map(|n| n.to_string()).filter(|n| !n.is_empty()),
|
note: self.note.map(|n| n.to_string()).filter(|n| !n.is_empty()),
|
||||||
|
@ -4122,7 +4122,7 @@ pub struct TypeBinding {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Clean<TypeBinding> for hir::TypeBinding {
|
impl Clean<TypeBinding> for hir::TypeBinding {
|
||||||
fn clean(&self, cx: &DocContext<'_, '_, '_>) -> TypeBinding {
|
fn clean(&self, cx: &DocContext<'_>) -> TypeBinding {
|
||||||
TypeBinding {
|
TypeBinding {
|
||||||
name: self.ident.name.clean(cx),
|
name: self.ident.name.clean(cx),
|
||||||
ty: self.ty.clean(cx)
|
ty: self.ty.clean(cx)
|
||||||
|
@ -4131,7 +4131,7 @@ impl Clean<TypeBinding> for hir::TypeBinding {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn def_id_to_path(
|
pub fn def_id_to_path(
|
||||||
cx: &DocContext<'_, '_, '_>,
|
cx: &DocContext<'_>,
|
||||||
did: DefId,
|
did: DefId,
|
||||||
name: Option<String>
|
name: Option<String>
|
||||||
) -> Vec<String> {
|
) -> Vec<String> {
|
||||||
|
@ -4148,7 +4148,7 @@ pub fn def_id_to_path(
|
||||||
once(crate_name).chain(relative).collect()
|
once(crate_name).chain(relative).collect()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn enter_impl_trait<F, R>(cx: &DocContext<'_, '_, '_>, f: F) -> R
|
pub fn enter_impl_trait<F, R>(cx: &DocContext<'_>, f: F) -> R
|
||||||
where
|
where
|
||||||
F: FnOnce() -> R,
|
F: FnOnce() -> R,
|
||||||
{
|
{
|
||||||
|
|
|
@ -22,7 +22,7 @@ use crate::clean::WherePredicate as WP;
|
||||||
use crate::clean;
|
use crate::clean;
|
||||||
use crate::core::DocContext;
|
use crate::core::DocContext;
|
||||||
|
|
||||||
pub fn where_clauses(cx: &DocContext<'_, '_, '_>, clauses: Vec<WP>) -> Vec<WP> {
|
pub fn where_clauses(cx: &DocContext<'_>, clauses: Vec<WP>) -> Vec<WP> {
|
||||||
// First, partition the where clause into its separate components
|
// First, partition the where clause into its separate components
|
||||||
let mut params: BTreeMap<_, Vec<_>> = BTreeMap::new();
|
let mut params: BTreeMap<_, Vec<_>> = BTreeMap::new();
|
||||||
let mut lifetimes = Vec::new();
|
let mut lifetimes = Vec::new();
|
||||||
|
@ -141,7 +141,7 @@ fn ty_bounds(bounds: Vec<clean::GenericBound>) -> Vec<clean::GenericBound> {
|
||||||
bounds
|
bounds
|
||||||
}
|
}
|
||||||
|
|
||||||
fn trait_is_same_or_supertrait(cx: &DocContext<'_, '_, '_>, child: DefId,
|
fn trait_is_same_or_supertrait(cx: &DocContext<'_>, child: DefId,
|
||||||
trait_: DefId) -> bool {
|
trait_: DefId) -> bool {
|
||||||
if child == trait_ {
|
if child == trait_ {
|
||||||
return true
|
return true
|
||||||
|
|
|
@ -203,7 +203,7 @@ pub struct RenderOptions {
|
||||||
impl Options {
|
impl Options {
|
||||||
/// Parses the given command-line for options. If an error message or other early-return has
|
/// Parses the given command-line for options. If an error message or other early-return has
|
||||||
/// been printed, returns `Err` with the exit code.
|
/// been printed, returns `Err` with the exit code.
|
||||||
pub fn from_matches(matches: &getopts::Matches) -> Result<Options, isize> {
|
pub fn from_matches(matches: &getopts::Matches) -> Result<Options, i32> {
|
||||||
// Check for unstable options.
|
// Check for unstable options.
|
||||||
nightly_options::check_nightly_options(&matches, &opts());
|
nightly_options::check_nightly_options(&matches, &opts());
|
||||||
|
|
||||||
|
|
|
@ -1,19 +1,18 @@
|
||||||
use rustc_lint;
|
use rustc_lint;
|
||||||
use rustc_driver::{driver, abort_on_err};
|
|
||||||
use rustc::session::{self, config};
|
use rustc::session::{self, config};
|
||||||
use rustc::hir::def_id::{DefId, DefIndex, DefIndexAddressSpace, CrateNum, LOCAL_CRATE};
|
use rustc::hir::def_id::{DefId, DefIndex, DefIndexAddressSpace, CrateNum, LOCAL_CRATE};
|
||||||
use rustc::hir::def::Def;
|
use rustc::hir::def::Def;
|
||||||
use rustc::hir::{self, HirId, HirVec};
|
use rustc::hir::{self, HirId, HirVec};
|
||||||
use rustc::middle::cstore::CrateStore;
|
use rustc::middle::cstore::CrateStore;
|
||||||
use rustc::middle::privacy::AccessLevels;
|
use rustc::middle::privacy::AccessLevels;
|
||||||
use rustc::ty::{self, TyCtxt, AllArenas};
|
use rustc::ty::{self, TyCtxt};
|
||||||
use rustc::hir::map as hir_map;
|
|
||||||
use rustc::lint::{self, LintPass};
|
use rustc::lint::{self, LintPass};
|
||||||
use rustc::session::config::ErrorOutputType;
|
use rustc::session::config::ErrorOutputType;
|
||||||
|
use rustc::session::DiagnosticOutput;
|
||||||
use rustc::util::nodemap::{FxHashMap, FxHashSet};
|
use rustc::util::nodemap::{FxHashMap, FxHashSet};
|
||||||
use rustc_interface::util;
|
use rustc_interface::interface;
|
||||||
|
use rustc_driver::abort_on_err;
|
||||||
use rustc_resolve as resolve;
|
use rustc_resolve as resolve;
|
||||||
use rustc_metadata::creader::CrateLoader;
|
|
||||||
use rustc_metadata::cstore::CStore;
|
use rustc_metadata::cstore::CStore;
|
||||||
use rustc_target::spec::TargetTriple;
|
use rustc_target::spec::TargetTriple;
|
||||||
|
|
||||||
|
@ -24,15 +23,15 @@ use syntax::json::JsonEmitter;
|
||||||
use syntax::ptr::P;
|
use syntax::ptr::P;
|
||||||
use syntax::symbol::keywords;
|
use syntax::symbol::keywords;
|
||||||
use syntax_pos::DUMMY_SP;
|
use syntax_pos::DUMMY_SP;
|
||||||
use errors::{self, FatalError};
|
use errors;
|
||||||
use errors::emitter::{Emitter, EmitterWriter};
|
use errors::emitter::{Emitter, EmitterWriter};
|
||||||
use parking_lot::ReentrantMutex;
|
use parking_lot::ReentrantMutex;
|
||||||
|
|
||||||
use std::cell::RefCell;
|
use std::cell::RefCell;
|
||||||
use std::mem;
|
use std::mem;
|
||||||
use rustc_data_structures::sync::{self, Lrc};
|
use rustc_data_structures::sync::{self, Lrc};
|
||||||
use std::rc::Rc;
|
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
use std::rc::Rc;
|
||||||
|
|
||||||
use crate::visit_ast::RustdocVisitor;
|
use crate::visit_ast::RustdocVisitor;
|
||||||
use crate::config::{Options as RustdocOptions, RenderOptions};
|
use crate::config::{Options as RustdocOptions, RenderOptions};
|
||||||
|
@ -47,12 +46,13 @@ pub use rustc::session::search_paths::SearchPath;
|
||||||
|
|
||||||
pub type ExternalPaths = FxHashMap<DefId, (Vec<String>, clean::TypeKind)>;
|
pub type ExternalPaths = FxHashMap<DefId, (Vec<String>, clean::TypeKind)>;
|
||||||
|
|
||||||
pub struct DocContext<'a, 'tcx: 'a, 'rcx: 'a> {
|
pub struct DocContext<'tcx> {
|
||||||
pub tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
|
||||||
pub resolver: &'a RefCell<resolve::Resolver<'rcx>>,
|
pub tcx: TyCtxt<'tcx, 'tcx, 'tcx>,
|
||||||
|
pub resolver: Rc<Option<RefCell<interface::BoxedResolver>>>,
|
||||||
/// The stack of module NodeIds up till this point
|
/// The stack of module NodeIds up till this point
|
||||||
pub crate_name: Option<String>,
|
pub crate_name: Option<String>,
|
||||||
pub cstore: Rc<CStore>,
|
pub cstore: Lrc<CStore>,
|
||||||
/// Later on moved into `html::render::CACHE_KEY`
|
/// Later on moved into `html::render::CACHE_KEY`
|
||||||
pub renderinfo: RefCell<RenderInfo>,
|
pub renderinfo: RefCell<RenderInfo>,
|
||||||
/// Later on moved through `clean::Crate` into `html::render::CACHE_KEY`
|
/// Later on moved through `clean::Crate` into `html::render::CACHE_KEY`
|
||||||
|
@ -79,11 +79,18 @@ pub struct DocContext<'a, 'tcx: 'a, 'rcx: 'a> {
|
||||||
pub all_traits: Vec<DefId>,
|
pub all_traits: Vec<DefId>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a, 'tcx, 'rcx> DocContext<'a, 'tcx, 'rcx> {
|
impl<'tcx> DocContext<'tcx> {
|
||||||
pub fn sess(&self) -> &session::Session {
|
pub fn sess(&self) -> &session::Session {
|
||||||
&self.tcx.sess
|
&self.tcx.sess
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn enter_resolver<F, R>(&self, f: F) -> R
|
||||||
|
where F: FnOnce(&mut resolve::Resolver<'_>) -> R {
|
||||||
|
let resolver = &*self.resolver;
|
||||||
|
let resolver = resolver.as_ref().unwrap();
|
||||||
|
resolver.borrow_mut().access(f)
|
||||||
|
}
|
||||||
|
|
||||||
/// Call the closure with the given parameters set as
|
/// Call the closure with the given parameters set as
|
||||||
/// the substitutions for a type alias' RHS.
|
/// the substitutions for a type alias' RHS.
|
||||||
pub fn enter_alias<F, R>(&self,
|
pub fn enter_alias<F, R>(&self,
|
||||||
|
@ -368,19 +375,31 @@ pub fn run_core(options: RustdocOptions) -> (clean::Crate, RenderInfo, RenderOpt
|
||||||
|
|
||||||
whitelisted_lints.extend(lint_opts.iter().map(|(lint, _)| lint).cloned());
|
whitelisted_lints.extend(lint_opts.iter().map(|(lint, _)| lint).cloned());
|
||||||
|
|
||||||
let lints = lint::builtin::HardwiredLints.get_lints()
|
let lints = || {
|
||||||
.into_iter()
|
lint::builtin::HardwiredLints
|
||||||
.chain(rustc_lint::SoftLints.get_lints().into_iter())
|
.get_lints()
|
||||||
.filter_map(|lint| {
|
.into_iter()
|
||||||
if lint.name == warnings_lint_name ||
|
.chain(rustc_lint::SoftLints.get_lints().into_iter())
|
||||||
lint.name == intra_link_resolution_failure_name {
|
};
|
||||||
None
|
|
||||||
} else {
|
let lint_opts = lints().filter_map(|lint| {
|
||||||
Some((lint.name_lower(), lint::Allow))
|
if lint.name == warnings_lint_name ||
|
||||||
}
|
lint.name == intra_link_resolution_failure_name {
|
||||||
})
|
None
|
||||||
.chain(lint_opts.into_iter())
|
} else {
|
||||||
.collect::<Vec<_>>();
|
Some((lint.name_lower(), lint::Allow))
|
||||||
|
}
|
||||||
|
}).chain(lint_opts.into_iter()).collect::<Vec<_>>();
|
||||||
|
|
||||||
|
let lint_caps = lints().filter_map(|lint| {
|
||||||
|
// We don't want to whitelist *all* lints so let's
|
||||||
|
// ignore those ones.
|
||||||
|
if whitelisted_lints.iter().any(|l| &lint.name == l) {
|
||||||
|
None
|
||||||
|
} else {
|
||||||
|
Some((lint::LintId::of(lint), lint::Allow))
|
||||||
|
}
|
||||||
|
}).collect();
|
||||||
|
|
||||||
let host_triple = TargetTriple::from_triple(config::host_triple());
|
let host_triple = TargetTriple::from_triple(config::host_triple());
|
||||||
// plays with error output here!
|
// plays with error output here!
|
||||||
|
@ -389,7 +408,7 @@ pub fn run_core(options: RustdocOptions) -> (clean::Crate, RenderInfo, RenderOpt
|
||||||
search_paths: libs,
|
search_paths: libs,
|
||||||
crate_types: vec![config::CrateType::Rlib],
|
crate_types: vec![config::CrateType::Rlib],
|
||||||
lint_opts: if !display_warnings {
|
lint_opts: if !display_warnings {
|
||||||
lints
|
lint_opts
|
||||||
} else {
|
} else {
|
||||||
vec![]
|
vec![]
|
||||||
},
|
},
|
||||||
|
@ -406,116 +425,42 @@ pub fn run_core(options: RustdocOptions) -> (clean::Crate, RenderInfo, RenderOpt
|
||||||
describe_lints,
|
describe_lints,
|
||||||
..Options::default()
|
..Options::default()
|
||||||
};
|
};
|
||||||
driver::spawn_thread_pool(sessopts, move |sessopts| {
|
|
||||||
let source_map = Lrc::new(source_map::SourceMap::new(sessopts.file_path_mapping()));
|
|
||||||
let diagnostic_handler = new_handler(error_format,
|
|
||||||
Some(source_map.clone()),
|
|
||||||
debugging_options.treat_err_as_bug,
|
|
||||||
debugging_options.ui_testing);
|
|
||||||
|
|
||||||
let mut sess = session::build_session_(
|
let config = interface::Config {
|
||||||
sessopts, cpath, diagnostic_handler, source_map, Default::default(),
|
opts: sessopts,
|
||||||
);
|
crate_cfg: config::parse_cfgspecs(cfgs),
|
||||||
|
input,
|
||||||
|
input_path: cpath,
|
||||||
|
output_file: None,
|
||||||
|
output_dir: None,
|
||||||
|
file_loader: None,
|
||||||
|
diagnostic_output: DiagnosticOutput::Default,
|
||||||
|
stderr: None,
|
||||||
|
crate_name: crate_name.clone(),
|
||||||
|
lint_caps,
|
||||||
|
};
|
||||||
|
|
||||||
lint::builtin::HardwiredLints.get_lints()
|
interface::run_compiler_in_existing_thread_pool(config, |compiler| {
|
||||||
.into_iter()
|
let sess = compiler.session();
|
||||||
.chain(rustc_lint::SoftLints.get_lints().into_iter())
|
|
||||||
.filter_map(|lint| {
|
|
||||||
// We don't want to whitelist *all* lints so let's
|
|
||||||
// ignore those ones.
|
|
||||||
if whitelisted_lints.iter().any(|l| &lint.name == l) {
|
|
||||||
None
|
|
||||||
} else {
|
|
||||||
Some(lint)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.for_each(|l| {
|
|
||||||
sess.driver_lint_caps.insert(lint::LintId::of(l),
|
|
||||||
lint::Allow);
|
|
||||||
});
|
|
||||||
|
|
||||||
let codegen_backend = util::get_codegen_backend(&sess);
|
// We need to hold on to the complete resolver, so we cause everything to be
|
||||||
let cstore = Rc::new(CStore::new(codegen_backend.metadata_loader()));
|
// cloned for the analysis passes to use. Suboptimal, but necessary in the
|
||||||
rustc_lint::register_builtins(&mut sess.lint_store.borrow_mut(), Some(&sess));
|
|
||||||
|
|
||||||
let mut cfg = config::build_configuration(&sess, config::parse_cfgspecs(cfgs));
|
|
||||||
util::add_configuration(&mut cfg, &sess, &*codegen_backend);
|
|
||||||
sess.parse_sess.config = cfg;
|
|
||||||
|
|
||||||
let control = &driver::CompileController::basic();
|
|
||||||
|
|
||||||
let krate = match driver::phase_1_parse_input(control, &sess, &input) {
|
|
||||||
Ok(krate) => krate,
|
|
||||||
Err(mut e) => {
|
|
||||||
e.emit();
|
|
||||||
FatalError.raise();
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
let name = match crate_name {
|
|
||||||
Some(ref crate_name) => crate_name.clone(),
|
|
||||||
None => ::rustc_codegen_utils::link::find_crate_name(Some(&sess), &krate.attrs, &input),
|
|
||||||
};
|
|
||||||
|
|
||||||
let mut crate_loader = CrateLoader::new(&sess, &cstore, &name);
|
|
||||||
|
|
||||||
let resolver_arenas = resolve::Resolver::arenas();
|
|
||||||
let result = driver::phase_2_configure_and_expand_inner(&sess,
|
|
||||||
&cstore,
|
|
||||||
krate,
|
|
||||||
None,
|
|
||||||
&name,
|
|
||||||
None,
|
|
||||||
&resolver_arenas,
|
|
||||||
&mut crate_loader,
|
|
||||||
|_| Ok(()));
|
|
||||||
let driver::InnerExpansionResult {
|
|
||||||
mut hir_forest,
|
|
||||||
resolver,
|
|
||||||
..
|
|
||||||
} = abort_on_err(result, &sess);
|
|
||||||
|
|
||||||
// We need to hold on to the complete resolver, so we clone everything
|
|
||||||
// for the analysis passes to use. Suboptimal, but necessary in the
|
|
||||||
// current architecture.
|
// current architecture.
|
||||||
let defs = resolver.definitions.clone();
|
let resolver = abort_on_err(compiler.expansion(), sess).peek().1.clone();
|
||||||
let resolutions = ty::Resolutions {
|
|
||||||
freevars: resolver.freevars.clone(),
|
|
||||||
export_map: resolver.export_map.clone(),
|
|
||||||
trait_map: resolver.trait_map.clone(),
|
|
||||||
glob_map: resolver.glob_map.clone(),
|
|
||||||
maybe_unused_trait_imports: resolver.maybe_unused_trait_imports.clone(),
|
|
||||||
maybe_unused_extern_crates: resolver.maybe_unused_extern_crates.clone(),
|
|
||||||
extern_prelude: resolver.extern_prelude.iter().map(|(ident, entry)| {
|
|
||||||
(ident.name, entry.introduced_by_item)
|
|
||||||
}).collect(),
|
|
||||||
};
|
|
||||||
|
|
||||||
let mut arenas = AllArenas::new();
|
if sess.err_count() > 0 {
|
||||||
let hir_map = hir_map::map_crate(&sess, &*cstore, &mut hir_forest, &defs);
|
sess.fatal("Compilation failed, aborting rustdoc");
|
||||||
let output_filenames = util::build_output_filenames(&input,
|
}
|
||||||
&None,
|
|
||||||
&None,
|
|
||||||
&[],
|
|
||||||
&sess);
|
|
||||||
|
|
||||||
let resolver = RefCell::new(resolver);
|
let mut global_ctxt = abort_on_err(compiler.global_ctxt(), sess).take();
|
||||||
driver::phase_3_run_analysis_passes(&*codegen_backend,
|
|
||||||
control,
|
global_ctxt.enter(|tcx| {
|
||||||
&sess,
|
tcx.analysis(LOCAL_CRATE).ok();
|
||||||
&*cstore,
|
|
||||||
hir_map,
|
// Abort if there were any errors so far
|
||||||
resolutions,
|
sess.abort_if_errors();
|
||||||
&mut arenas,
|
|
||||||
&name,
|
|
||||||
&output_filenames,
|
|
||||||
|tcx, _, result| {
|
|
||||||
if result.is_err() {
|
|
||||||
sess.fatal("Compilation failed, aborting rustdoc");
|
|
||||||
}
|
|
||||||
|
|
||||||
let access_levels = tcx.privacy_access_levels(LOCAL_CRATE);
|
let access_levels = tcx.privacy_access_levels(LOCAL_CRATE);
|
||||||
|
|
||||||
// Convert from a NodeId set to a DefId set since we don't always have easy access
|
// Convert from a NodeId set to a DefId set since we don't always have easy access
|
||||||
// to the map from defid -> nodeid
|
// to the map from defid -> nodeid
|
||||||
let access_levels = AccessLevels {
|
let access_levels = AccessLevels {
|
||||||
|
@ -535,9 +480,9 @@ pub fn run_core(options: RustdocOptions) -> (clean::Crate, RenderInfo, RenderOpt
|
||||||
|
|
||||||
let ctxt = DocContext {
|
let ctxt = DocContext {
|
||||||
tcx,
|
tcx,
|
||||||
resolver: &resolver,
|
resolver,
|
||||||
crate_name,
|
crate_name,
|
||||||
cstore: cstore.clone(),
|
cstore: compiler.cstore().clone(),
|
||||||
external_traits: Default::default(),
|
external_traits: Default::default(),
|
||||||
active_extern_traits: Default::default(),
|
active_extern_traits: Default::default(),
|
||||||
renderinfo: RefCell::new(renderinfo),
|
renderinfo: RefCell::new(renderinfo),
|
||||||
|
|
|
@ -23,14 +23,13 @@ extern crate getopts;
|
||||||
extern crate env_logger;
|
extern crate env_logger;
|
||||||
extern crate rustc;
|
extern crate rustc;
|
||||||
extern crate rustc_data_structures;
|
extern crate rustc_data_structures;
|
||||||
extern crate rustc_codegen_utils;
|
|
||||||
extern crate rustc_driver;
|
extern crate rustc_driver;
|
||||||
extern crate rustc_resolve;
|
extern crate rustc_resolve;
|
||||||
extern crate rustc_lint;
|
extern crate rustc_lint;
|
||||||
|
extern crate rustc_interface;
|
||||||
extern crate rustc_metadata;
|
extern crate rustc_metadata;
|
||||||
extern crate rustc_target;
|
extern crate rustc_target;
|
||||||
extern crate rustc_typeck;
|
extern crate rustc_typeck;
|
||||||
extern crate rustc_interface;
|
|
||||||
extern crate serialize;
|
extern crate serialize;
|
||||||
extern crate syntax;
|
extern crate syntax;
|
||||||
extern crate syntax_pos;
|
extern crate syntax_pos;
|
||||||
|
@ -91,11 +90,11 @@ pub fn main() {
|
||||||
rustc_driver::set_sigpipe_handler();
|
rustc_driver::set_sigpipe_handler();
|
||||||
env_logger::init();
|
env_logger::init();
|
||||||
let res = std::thread::Builder::new().stack_size(thread_stack_size).spawn(move || {
|
let res = std::thread::Builder::new().stack_size(thread_stack_size).spawn(move || {
|
||||||
syntax::with_globals(move || {
|
rustc_interface::interface::default_thread_pool(move || {
|
||||||
get_args().map(|args| main_args(&args)).unwrap_or(1)
|
get_args().map(|args| main_args(&args)).unwrap_or(1)
|
||||||
})
|
})
|
||||||
}).unwrap().join().unwrap_or(rustc_driver::EXIT_FAILURE);
|
}).unwrap().join().unwrap_or(rustc_driver::EXIT_FAILURE);
|
||||||
process::exit(res as i32);
|
process::exit(res);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_args() -> Option<Vec<String>> {
|
fn get_args() -> Option<Vec<String>> {
|
||||||
|
@ -364,7 +363,7 @@ fn usage(argv0: &str) {
|
||||||
println!("{}", options.usage(&format!("{} [options] <input>", argv0)));
|
println!("{}", options.usage(&format!("{} [options] <input>", argv0)));
|
||||||
}
|
}
|
||||||
|
|
||||||
fn main_args(args: &[String]) -> isize {
|
fn main_args(args: &[String]) -> i32 {
|
||||||
let mut options = getopts::Options::new();
|
let mut options = getopts::Options::new();
|
||||||
for option in opts() {
|
for option in opts() {
|
||||||
(option.apply)(&mut options);
|
(option.apply)(&mut options);
|
||||||
|
@ -441,7 +440,7 @@ where R: 'static + Send,
|
||||||
|
|
||||||
let (tx, rx) = channel();
|
let (tx, rx) = channel();
|
||||||
|
|
||||||
let result = rustc_driver::monitor(move || syntax::with_globals(move || {
|
let result = rustc_driver::report_ices_to_stderr_if_any(move || syntax::with_globals(move || {
|
||||||
let crate_name = options.crate_name.clone();
|
let crate_name = options.crate_name.clone();
|
||||||
let crate_version = options.crate_version.clone();
|
let crate_version = options.crate_version.clone();
|
||||||
let (mut krate, renderinfo, renderopts, passes) = core::run_core(options);
|
let (mut krate, renderinfo, renderopts, passes) = core::run_core(options);
|
||||||
|
|
|
@ -36,7 +36,7 @@ fn extract_leading_metadata<'a>(s: &'a str) -> (Vec<&'a str>, &'a str) {
|
||||||
|
|
||||||
/// Render `input` (e.g., "foo.md") into an HTML file in `output`
|
/// Render `input` (e.g., "foo.md") into an HTML file in `output`
|
||||||
/// (e.g., output = "bar" => "bar/foo.html").
|
/// (e.g., output = "bar" => "bar/foo.html").
|
||||||
pub fn render(input: PathBuf, options: RenderOptions, diag: &errors::Handler) -> isize {
|
pub fn render(input: PathBuf, options: RenderOptions, diag: &errors::Handler) -> i32 {
|
||||||
let mut output = options.output;
|
let mut output = options.output;
|
||||||
output.push(input.file_stem().unwrap());
|
output.push(input.file_stem().unwrap());
|
||||||
output.set_extension("html");
|
output.set_extension("html");
|
||||||
|
@ -126,7 +126,7 @@ pub fn render(input: PathBuf, options: RenderOptions, diag: &errors::Handler) ->
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Runs any tests/code examples in the markdown file `input`.
|
/// Runs any tests/code examples in the markdown file `input`.
|
||||||
pub fn test(mut options: Options, diag: &errors::Handler) -> isize {
|
pub fn test(mut options: Options, diag: &errors::Handler) -> i32 {
|
||||||
let input_str = match load_string(&options.input, diag) {
|
let input_str = match load_string(&options.input, diag) {
|
||||||
Ok(s) => s,
|
Ok(s) => s,
|
||||||
Err(LoadStringError::ReadFail) => return 1,
|
Err(LoadStringError::ReadFail) => return 1,
|
||||||
|
|
|
@ -15,7 +15,7 @@ pub const CALCULATE_DOC_COVERAGE: Pass = Pass {
|
||||||
description: "counts the number of items with and without documentation",
|
description: "counts the number of items with and without documentation",
|
||||||
};
|
};
|
||||||
|
|
||||||
fn calculate_doc_coverage(krate: clean::Crate, _: &DocContext<'_, '_, '_>) -> clean::Crate {
|
fn calculate_doc_coverage(krate: clean::Crate, _: &DocContext<'_>) -> clean::Crate {
|
||||||
let mut calc = CoverageCalculator::default();
|
let mut calc = CoverageCalculator::default();
|
||||||
let krate = calc.fold_crate(krate);
|
let krate = calc.fold_crate(krate);
|
||||||
|
|
||||||
|
|
|
@ -16,15 +16,15 @@ pub const CHECK_CODE_BLOCK_SYNTAX: Pass = Pass {
|
||||||
description: "validates syntax inside Rust code blocks",
|
description: "validates syntax inside Rust code blocks",
|
||||||
};
|
};
|
||||||
|
|
||||||
pub fn check_code_block_syntax(krate: clean::Crate, cx: &DocContext<'_, '_, '_>) -> clean::Crate {
|
pub fn check_code_block_syntax(krate: clean::Crate, cx: &DocContext<'_>) -> clean::Crate {
|
||||||
SyntaxChecker { cx }.fold_crate(krate)
|
SyntaxChecker { cx }.fold_crate(krate)
|
||||||
}
|
}
|
||||||
|
|
||||||
struct SyntaxChecker<'a, 'tcx: 'a, 'rcx: 'a> {
|
struct SyntaxChecker<'a, 'tcx: 'a> {
|
||||||
cx: &'a DocContext<'a, 'tcx, 'rcx>,
|
cx: &'a DocContext<'tcx>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a, 'tcx, 'rcx> SyntaxChecker<'a, 'tcx, 'rcx> {
|
impl<'a, 'tcx> SyntaxChecker<'a, 'tcx> {
|
||||||
fn check_rust_syntax(&self, item: &clean::Item, dox: &str, code_block: RustCodeBlock) {
|
fn check_rust_syntax(&self, item: &clean::Item, dox: &str, code_block: RustCodeBlock) {
|
||||||
let sess = ParseSess::new(FilePathMapping::empty());
|
let sess = ParseSess::new(FilePathMapping::empty());
|
||||||
let source_file = sess.source_map().new_source_file(
|
let source_file = sess.source_map().new_source_file(
|
||||||
|
@ -98,7 +98,7 @@ impl<'a, 'tcx, 'rcx> SyntaxChecker<'a, 'tcx, 'rcx> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a, 'tcx, 'rcx> DocFolder for SyntaxChecker<'a, 'tcx, 'rcx> {
|
impl<'a, 'tcx> DocFolder for SyntaxChecker<'a, 'tcx> {
|
||||||
fn fold_item(&mut self, item: clean::Item) -> Option<clean::Item> {
|
fn fold_item(&mut self, item: clean::Item) -> Option<clean::Item> {
|
||||||
if let Some(dox) = &item.attrs.collapsed_doc_value() {
|
if let Some(dox) = &item.attrs.collapsed_doc_value() {
|
||||||
for code_block in markdown::rust_code_blocks(&dox) {
|
for code_block in markdown::rust_code_blocks(&dox) {
|
||||||
|
|
|
@ -29,7 +29,7 @@ impl DocFragment {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn collapse_docs(krate: clean::Crate, _: &DocContext<'_, '_, '_>) -> clean::Crate {
|
pub fn collapse_docs(krate: clean::Crate, _: &DocContext<'_>) -> clean::Crate {
|
||||||
Collapser.fold_crate(krate)
|
Collapser.fold_crate(krate)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -25,7 +25,7 @@ pub const COLLECT_INTRA_DOC_LINKS: Pass = Pass {
|
||||||
description: "reads a crate's documentation to resolve intra-doc-links",
|
description: "reads a crate's documentation to resolve intra-doc-links",
|
||||||
};
|
};
|
||||||
|
|
||||||
pub fn collect_intra_doc_links(krate: Crate, cx: &DocContext<'_, '_, '_>) -> Crate {
|
pub fn collect_intra_doc_links(krate: Crate, cx: &DocContext<'_>) -> Crate {
|
||||||
if !UnstableFeatures::from_environment().is_nightly_build() {
|
if !UnstableFeatures::from_environment().is_nightly_build() {
|
||||||
krate
|
krate
|
||||||
} else {
|
} else {
|
||||||
|
@ -47,14 +47,14 @@ enum PathKind {
|
||||||
Type,
|
Type,
|
||||||
}
|
}
|
||||||
|
|
||||||
struct LinkCollector<'a, 'tcx: 'a, 'rcx: 'a> {
|
struct LinkCollector<'a, 'tcx> {
|
||||||
cx: &'a DocContext<'a, 'tcx, 'rcx>,
|
cx: &'a DocContext<'tcx>,
|
||||||
mod_ids: Vec<ast::NodeId>,
|
mod_ids: Vec<ast::NodeId>,
|
||||||
is_nightly_build: bool,
|
is_nightly_build: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a, 'tcx, 'rcx> LinkCollector<'a, 'tcx, 'rcx> {
|
impl<'a, 'tcx> LinkCollector<'a, 'tcx> {
|
||||||
fn new(cx: &'a DocContext<'a, 'tcx, 'rcx>) -> Self {
|
fn new(cx: &'a DocContext<'tcx>) -> Self {
|
||||||
LinkCollector {
|
LinkCollector {
|
||||||
cx,
|
cx,
|
||||||
mod_ids: Vec::new(),
|
mod_ids: Vec::new(),
|
||||||
|
@ -78,12 +78,11 @@ impl<'a, 'tcx, 'rcx> LinkCollector<'a, 'tcx, 'rcx> {
|
||||||
// path.
|
// path.
|
||||||
if let Some(id) = parent_id.or(self.mod_ids.last().cloned()) {
|
if let Some(id) = parent_id.or(self.mod_ids.last().cloned()) {
|
||||||
// FIXME: `with_scope` requires the `NodeId` of a module.
|
// FIXME: `with_scope` requires the `NodeId` of a module.
|
||||||
let result = cx.resolver.borrow_mut()
|
let result = cx.enter_resolver(|resolver| resolver.with_scope(id,
|
||||||
.with_scope(id,
|
|
||||||
|resolver| {
|
|resolver| {
|
||||||
resolver.resolve_str_path_error(DUMMY_SP,
|
resolver.resolve_str_path_error(DUMMY_SP,
|
||||||
&path_str, is_val)
|
&path_str, is_val)
|
||||||
});
|
}));
|
||||||
|
|
||||||
if let Ok(result) = result {
|
if let Ok(result) = result {
|
||||||
// In case this is a trait item, skip the
|
// In case this is a trait item, skip the
|
||||||
|
@ -142,11 +141,9 @@ impl<'a, 'tcx, 'rcx> LinkCollector<'a, 'tcx, 'rcx> {
|
||||||
}
|
}
|
||||||
|
|
||||||
// FIXME: `with_scope` requires the `NodeId` of a module.
|
// FIXME: `with_scope` requires the `NodeId` of a module.
|
||||||
let ty = cx.resolver.borrow_mut()
|
let ty = cx.enter_resolver(|resolver| resolver.with_scope(id, |resolver| {
|
||||||
.with_scope(id,
|
|
||||||
|resolver| {
|
|
||||||
resolver.resolve_str_path_error(DUMMY_SP, &path, false)
|
resolver.resolve_str_path_error(DUMMY_SP, &path, false)
|
||||||
})?;
|
}))?;
|
||||||
match ty.def {
|
match ty.def {
|
||||||
Def::Struct(did) | Def::Union(did) | Def::Enum(did) | Def::TyAlias(did) => {
|
Def::Struct(did) | Def::Union(did) | Def::Enum(did) | Def::TyAlias(did) => {
|
||||||
let item = cx.tcx.inherent_impls(did)
|
let item = cx.tcx.inherent_impls(did)
|
||||||
|
@ -218,7 +215,7 @@ impl<'a, 'tcx, 'rcx> LinkCollector<'a, 'tcx, 'rcx> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a, 'tcx, 'rcx> DocFolder for LinkCollector<'a, 'tcx, 'rcx> {
|
impl<'a, 'tcx> DocFolder for LinkCollector<'a, 'tcx> {
|
||||||
fn fold_item(&mut self, mut item: Item) -> Option<Item> {
|
fn fold_item(&mut self, mut item: Item) -> Option<Item> {
|
||||||
let item_hir_id = if item.is_mod() {
|
let item_hir_id = if item.is_mod() {
|
||||||
if let Some(id) = self.cx.tcx.hir().as_local_hir_id(item.def_id) {
|
if let Some(id) = self.cx.tcx.hir().as_local_hir_id(item.def_id) {
|
||||||
|
@ -437,26 +434,27 @@ impl<'a, 'tcx, 'rcx> DocFolder for LinkCollector<'a, 'tcx, 'rcx> {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Resolves a string as a macro.
|
/// Resolves a string as a macro.
|
||||||
fn macro_resolve(cx: &DocContext<'_, '_, '_>, path_str: &str) -> Option<Def> {
|
fn macro_resolve(cx: &DocContext<'_>, path_str: &str) -> Option<Def> {
|
||||||
use syntax::ext::base::{MacroKind, SyntaxExtension};
|
use syntax::ext::base::{MacroKind, SyntaxExtension};
|
||||||
let segment = ast::PathSegment::from_ident(Ident::from_str(path_str));
|
let segment = ast::PathSegment::from_ident(Ident::from_str(path_str));
|
||||||
let path = ast::Path { segments: vec![segment], span: DUMMY_SP };
|
let path = ast::Path { segments: vec![segment], span: DUMMY_SP };
|
||||||
let mut resolver = cx.resolver.borrow_mut();
|
cx.enter_resolver(|resolver| {
|
||||||
let parent_scope = resolver.dummy_parent_scope();
|
let parent_scope = resolver.dummy_parent_scope();
|
||||||
if let Ok(def) = resolver.resolve_macro_to_def_inner(&path, MacroKind::Bang,
|
if let Ok(def) = resolver.resolve_macro_to_def_inner(&path, MacroKind::Bang,
|
||||||
&parent_scope, false, false) {
|
&parent_scope, false, false) {
|
||||||
if let Def::Macro(_, MacroKind::ProcMacroStub) = def {
|
if let Def::Macro(_, MacroKind::ProcMacroStub) = def {
|
||||||
// skip proc-macro stubs, they'll cause `get_macro` to crash
|
// skip proc-macro stubs, they'll cause `get_macro` to crash
|
||||||
} else {
|
} else {
|
||||||
if let SyntaxExtension::DeclMacro { .. } = *resolver.get_macro(def) {
|
if let SyntaxExtension::DeclMacro { .. } = *resolver.get_macro(def) {
|
||||||
return Some(def);
|
return Some(def);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
if let Some(def) = resolver.all_macros.get(&Symbol::intern(path_str)) {
|
||||||
if let Some(def) = resolver.all_macros.get(&Symbol::intern(path_str)) {
|
return Some(*def);
|
||||||
return Some(*def);
|
}
|
||||||
}
|
None
|
||||||
None
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Reports a resolution failure diagnostic.
|
/// Reports a resolution failure diagnostic.
|
||||||
|
@ -465,7 +463,7 @@ fn macro_resolve(cx: &DocContext<'_, '_, '_>, path_str: &str) -> Option<Def> {
|
||||||
/// documentation attributes themselves. This is a little heavy-handed, so we display the markdown
|
/// documentation attributes themselves. This is a little heavy-handed, so we display the markdown
|
||||||
/// line containing the failure as a note as well.
|
/// line containing the failure as a note as well.
|
||||||
fn resolution_failure(
|
fn resolution_failure(
|
||||||
cx: &DocContext<'_, '_, '_>,
|
cx: &DocContext<'_>,
|
||||||
attrs: &Attributes,
|
attrs: &Attributes,
|
||||||
path_str: &str,
|
path_str: &str,
|
||||||
dox: &str,
|
dox: &str,
|
||||||
|
@ -507,7 +505,7 @@ fn resolution_failure(
|
||||||
diag.emit();
|
diag.emit();
|
||||||
}
|
}
|
||||||
|
|
||||||
fn ambiguity_error(cx: &DocContext<'_, '_, '_>, attrs: &Attributes,
|
fn ambiguity_error(cx: &DocContext<'_>, attrs: &Attributes,
|
||||||
path_str: &str,
|
path_str: &str,
|
||||||
article1: &str, kind1: &str, disambig1: &str,
|
article1: &str, kind1: &str, disambig1: &str,
|
||||||
article2: &str, kind2: &str, disambig2: &str) {
|
article2: &str, kind2: &str, disambig2: &str) {
|
||||||
|
@ -563,7 +561,7 @@ fn type_ns_kind(def: Def, path_str: &str) -> (&'static str, &'static str, String
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Given an enum variant's def, return the def of its enum and the associated fragment.
|
/// Given an enum variant's def, return the def of its enum and the associated fragment.
|
||||||
fn handle_variant(cx: &DocContext<'_, '_, '_>, def: Def) -> Result<(Def, Option<String>), ()> {
|
fn handle_variant(cx: &DocContext<'_>, def: Def) -> Result<(Def, Option<String>), ()> {
|
||||||
use rustc::ty::DefIdTree;
|
use rustc::ty::DefIdTree;
|
||||||
|
|
||||||
let parent = if let Some(parent) = cx.tcx.parent(def.def_id()) {
|
let parent = if let Some(parent) = cx.tcx.parent(def.def_id()) {
|
||||||
|
@ -604,7 +602,7 @@ fn is_primitive(path_str: &str, is_val: bool) -> Option<Def> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn primitive_impl(cx: &DocContext<'_, '_, '_>, path_str: &str) -> Option<DefId> {
|
fn primitive_impl(cx: &DocContext<'_>, path_str: &str) -> Option<DefId> {
|
||||||
let tcx = cx.tcx;
|
let tcx = cx.tcx;
|
||||||
match path_str {
|
match path_str {
|
||||||
"u8" => tcx.lang_items().u8_impl(),
|
"u8" => tcx.lang_items().u8_impl(),
|
||||||
|
|
|
@ -12,7 +12,7 @@ pub const COLLECT_TRAIT_IMPLS: Pass = Pass {
|
||||||
description: "retrieves trait impls for items in the crate",
|
description: "retrieves trait impls for items in the crate",
|
||||||
};
|
};
|
||||||
|
|
||||||
pub fn collect_trait_impls(krate: Crate, cx: &DocContext<'_, '_, '_>) -> Crate {
|
pub fn collect_trait_impls(krate: Crate, cx: &DocContext<'_>) -> Crate {
|
||||||
let mut synth = SyntheticImplCollector::new(cx);
|
let mut synth = SyntheticImplCollector::new(cx);
|
||||||
let mut krate = synth.fold_crate(krate);
|
let mut krate = synth.fold_crate(krate);
|
||||||
|
|
||||||
|
@ -138,13 +138,13 @@ pub fn collect_trait_impls(krate: Crate, cx: &DocContext<'_, '_, '_>) -> Crate {
|
||||||
krate
|
krate
|
||||||
}
|
}
|
||||||
|
|
||||||
struct SyntheticImplCollector<'a, 'tcx: 'a, 'rcx: 'a> {
|
struct SyntheticImplCollector<'a, 'tcx> {
|
||||||
cx: &'a DocContext<'a, 'tcx, 'rcx>,
|
cx: &'a DocContext<'tcx>,
|
||||||
impls: Vec<Item>,
|
impls: Vec<Item>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a, 'tcx, 'rcx> SyntheticImplCollector<'a, 'tcx, 'rcx> {
|
impl<'a, 'tcx> SyntheticImplCollector<'a, 'tcx> {
|
||||||
fn new(cx: &'a DocContext<'a, 'tcx, 'rcx>) -> Self {
|
fn new(cx: &'a DocContext<'tcx>) -> Self {
|
||||||
SyntheticImplCollector {
|
SyntheticImplCollector {
|
||||||
cx,
|
cx,
|
||||||
impls: Vec::new(),
|
impls: Vec::new(),
|
||||||
|
@ -152,7 +152,7 @@ impl<'a, 'tcx, 'rcx> SyntheticImplCollector<'a, 'tcx, 'rcx> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a, 'tcx, 'rcx> DocFolder for SyntheticImplCollector<'a, 'tcx, 'rcx> {
|
impl<'a, 'tcx> DocFolder for SyntheticImplCollector<'a, 'tcx> {
|
||||||
fn fold_item(&mut self, i: Item) -> Option<Item> {
|
fn fold_item(&mut self, i: Item) -> Option<Item> {
|
||||||
if i.is_struct() || i.is_enum() || i.is_union() {
|
if i.is_struct() || i.is_enum() || i.is_union() {
|
||||||
if let (Some(hir_id), Some(name)) =
|
if let (Some(hir_id), Some(name)) =
|
||||||
|
|
|
@ -54,7 +54,7 @@ pub use self::calculate_doc_coverage::CALCULATE_DOC_COVERAGE;
|
||||||
#[derive(Copy, Clone)]
|
#[derive(Copy, Clone)]
|
||||||
pub struct Pass {
|
pub struct Pass {
|
||||||
pub name: &'static str,
|
pub name: &'static str,
|
||||||
pub pass: fn(clean::Crate, &DocContext<'_, '_, '_>) -> clean::Crate,
|
pub pass: fn(clean::Crate, &DocContext<'_>) -> clean::Crate,
|
||||||
pub description: &'static str,
|
pub description: &'static str,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -308,8 +308,8 @@ impl DocFolder for ImportStripper {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn look_for_tests<'a, 'tcx: 'a, 'rcx: 'a>(
|
pub fn look_for_tests<'tcx>(
|
||||||
cx: &'a DocContext<'a, 'tcx, 'rcx>,
|
cx: &DocContext<'tcx>,
|
||||||
dox: &str,
|
dox: &str,
|
||||||
item: &Item,
|
item: &Item,
|
||||||
check_missing_code: bool,
|
check_missing_code: bool,
|
||||||
|
@ -370,7 +370,7 @@ crate fn span_of_attrs(attrs: &clean::Attributes) -> Span {
|
||||||
/// attributes are not all sugared doc comments. It's difficult to calculate the correct span in
|
/// attributes are not all sugared doc comments. It's difficult to calculate the correct span in
|
||||||
/// that case due to escaping and other source features.
|
/// that case due to escaping and other source features.
|
||||||
crate fn source_span_for_markdown_range(
|
crate fn source_span_for_markdown_range(
|
||||||
cx: &DocContext<'_, '_, '_>,
|
cx: &DocContext<'_>,
|
||||||
markdown: &str,
|
markdown: &str,
|
||||||
md_range: &Range<usize>,
|
md_range: &Range<usize>,
|
||||||
attrs: &clean::Attributes,
|
attrs: &clean::Attributes,
|
||||||
|
|
|
@ -9,25 +9,25 @@ pub const CHECK_PRIVATE_ITEMS_DOC_TESTS: Pass = Pass {
|
||||||
description: "check private items doc tests",
|
description: "check private items doc tests",
|
||||||
};
|
};
|
||||||
|
|
||||||
struct PrivateItemDocTestLinter<'a, 'tcx: 'a, 'rcx: 'a> {
|
struct PrivateItemDocTestLinter<'a, 'tcx> {
|
||||||
cx: &'a DocContext<'a, 'tcx, 'rcx>,
|
cx: &'a DocContext<'tcx>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a, 'tcx, 'rcx> PrivateItemDocTestLinter<'a, 'tcx, 'rcx> {
|
impl<'a, 'tcx> PrivateItemDocTestLinter<'a, 'tcx> {
|
||||||
fn new(cx: &'a DocContext<'a, 'tcx, 'rcx>) -> Self {
|
fn new(cx: &'a DocContext<'tcx>) -> Self {
|
||||||
PrivateItemDocTestLinter {
|
PrivateItemDocTestLinter {
|
||||||
cx,
|
cx,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn check_private_items_doc_tests(krate: Crate, cx: &DocContext<'_, '_, '_>) -> Crate {
|
pub fn check_private_items_doc_tests(krate: Crate, cx: &DocContext<'_>) -> Crate {
|
||||||
let mut coll = PrivateItemDocTestLinter::new(cx);
|
let mut coll = PrivateItemDocTestLinter::new(cx);
|
||||||
|
|
||||||
coll.fold_crate(krate)
|
coll.fold_crate(krate)
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a, 'tcx, 'rcx> DocFolder for PrivateItemDocTestLinter<'a, 'tcx, 'rcx> {
|
impl<'a, 'tcx> DocFolder for PrivateItemDocTestLinter<'a, 'tcx> {
|
||||||
fn fold_item(&mut self, item: Item) -> Option<Item> {
|
fn fold_item(&mut self, item: Item) -> Option<Item> {
|
||||||
let cx = self.cx;
|
let cx = self.cx;
|
||||||
let dox = item.attrs.collapsed_doc_value().unwrap_or_else(String::new);
|
let dox = item.attrs.collapsed_doc_value().unwrap_or_else(String::new);
|
||||||
|
|
|
@ -12,7 +12,7 @@ pub const PROPAGATE_DOC_CFG: Pass = Pass {
|
||||||
description: "propagates `#[doc(cfg(...))]` to child items",
|
description: "propagates `#[doc(cfg(...))]` to child items",
|
||||||
};
|
};
|
||||||
|
|
||||||
pub fn propagate_doc_cfg(cr: Crate, _: &DocContext<'_, '_, '_>) -> Crate {
|
pub fn propagate_doc_cfg(cr: Crate, _: &DocContext<'_>) -> Crate {
|
||||||
CfgPropagator { parent_cfg: None }.fold_crate(cr)
|
CfgPropagator { parent_cfg: None }.fold_crate(cr)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -14,7 +14,7 @@ pub const STRIP_HIDDEN: Pass = Pass {
|
||||||
};
|
};
|
||||||
|
|
||||||
/// Strip items marked `#[doc(hidden)]`
|
/// Strip items marked `#[doc(hidden)]`
|
||||||
pub fn strip_hidden(krate: clean::Crate, _: &DocContext<'_, '_, '_>) -> clean::Crate {
|
pub fn strip_hidden(krate: clean::Crate, _: &DocContext<'_>) -> clean::Crate {
|
||||||
let mut retained = DefIdSet::default();
|
let mut retained = DefIdSet::default();
|
||||||
|
|
||||||
// strip all #[doc(hidden)] items
|
// strip all #[doc(hidden)] items
|
||||||
|
|
|
@ -9,6 +9,6 @@ pub const STRIP_PRIV_IMPORTS: Pass = Pass {
|
||||||
description: "strips all private import statements (`use`, `extern crate`) from a crate",
|
description: "strips all private import statements (`use`, `extern crate`) from a crate",
|
||||||
};
|
};
|
||||||
|
|
||||||
pub fn strip_priv_imports(krate: clean::Crate, _: &DocContext<'_, '_, '_>) -> clean::Crate {
|
pub fn strip_priv_imports(krate: clean::Crate, _: &DocContext<'_>) -> clean::Crate {
|
||||||
ImportStripper.fold_crate(krate)
|
ImportStripper.fold_crate(krate)
|
||||||
}
|
}
|
||||||
|
|
|
@ -14,7 +14,7 @@ pub const STRIP_PRIVATE: Pass = Pass {
|
||||||
|
|
||||||
/// Strip private items from the point of view of a crate or externally from a
|
/// Strip private items from the point of view of a crate or externally from a
|
||||||
/// crate, specified by the `xcrate` flag.
|
/// crate, specified by the `xcrate` flag.
|
||||||
pub fn strip_private(mut krate: clean::Crate, cx: &DocContext<'_, '_, '_>) -> clean::Crate {
|
pub fn strip_private(mut krate: clean::Crate, cx: &DocContext<'_>) -> clean::Crate {
|
||||||
// This stripper collects all *retained* nodes.
|
// This stripper collects all *retained* nodes.
|
||||||
let mut retained = DefIdSet::default();
|
let mut retained = DefIdSet::default();
|
||||||
let access_levels = cx.renderinfo.borrow().access_levels.clone();
|
let access_levels = cx.renderinfo.borrow().access_levels.clone();
|
||||||
|
|
|
@ -13,7 +13,7 @@ pub const UNINDENT_COMMENTS: Pass = Pass {
|
||||||
description: "removes excess indentation on comments in order for markdown to like it",
|
description: "removes excess indentation on comments in order for markdown to like it",
|
||||||
};
|
};
|
||||||
|
|
||||||
pub fn unindent_comments(krate: clean::Crate, _: &DocContext<'_, '_, '_>) -> clean::Crate {
|
pub fn unindent_comments(krate: clean::Crate, _: &DocContext<'_>) -> clean::Crate {
|
||||||
CommentCleaner.fold_crate(krate)
|
CommentCleaner.fold_crate(krate)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,21 +1,16 @@
|
||||||
use errors::{self, FatalError};
|
|
||||||
use errors::emitter::ColorConfig;
|
|
||||||
use rustc_data_structures::sync::Lrc;
|
use rustc_data_structures::sync::Lrc;
|
||||||
use rustc_lint;
|
use rustc_interface::interface;
|
||||||
use rustc_driver::{self, driver, Compilation};
|
|
||||||
use rustc_driver::driver::phase_2_configure_and_expand;
|
|
||||||
use rustc_metadata::cstore::CStore;
|
|
||||||
use rustc_interface::util;
|
|
||||||
use rustc::hir;
|
use rustc::hir;
|
||||||
use rustc::hir::intravisit;
|
use rustc::hir::intravisit;
|
||||||
use rustc::session::{self, CompileIncomplete, config};
|
use rustc::hir::def_id::LOCAL_CRATE;
|
||||||
|
use rustc::session::{self, config, DiagnosticOutput};
|
||||||
use rustc::session::config::{OutputType, OutputTypes, Externs, CodegenOptions};
|
use rustc::session::config::{OutputType, OutputTypes, Externs, CodegenOptions};
|
||||||
use rustc::session::search_paths::SearchPath;
|
use rustc::session::search_paths::SearchPath;
|
||||||
|
use rustc::util::common::ErrorReported;
|
||||||
use syntax::ast;
|
use syntax::ast;
|
||||||
use syntax::source_map::SourceMap;
|
use syntax::source_map::SourceMap;
|
||||||
use syntax::edition::Edition;
|
use syntax::edition::Edition;
|
||||||
use syntax::feature_gate::UnstableFeatures;
|
use syntax::feature_gate::UnstableFeatures;
|
||||||
use syntax::with_globals;
|
|
||||||
use syntax_pos::{BytePos, DUMMY_SP, Pos, Span, FileName};
|
use syntax_pos::{BytePos, DUMMY_SP, Pos, Span, FileName};
|
||||||
use tempfile::Builder as TempFileBuilder;
|
use tempfile::Builder as TempFileBuilder;
|
||||||
use testing;
|
use testing;
|
||||||
|
@ -23,8 +18,8 @@ use testing;
|
||||||
use std::env;
|
use std::env;
|
||||||
use std::io::prelude::*;
|
use std::io::prelude::*;
|
||||||
use std::io;
|
use std::io;
|
||||||
use std::path::PathBuf;
|
|
||||||
use std::panic::{self, AssertUnwindSafe};
|
use std::panic::{self, AssertUnwindSafe};
|
||||||
|
use std::path::PathBuf;
|
||||||
use std::process::Command;
|
use std::process::Command;
|
||||||
use std::str;
|
use std::str;
|
||||||
use std::sync::{Arc, Mutex};
|
use std::sync::{Arc, Mutex};
|
||||||
|
@ -44,7 +39,7 @@ pub struct TestOptions {
|
||||||
pub attrs: Vec<String>,
|
pub attrs: Vec<String>,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn run(mut options: Options) -> isize {
|
pub fn run(options: Options) -> i32 {
|
||||||
let input = config::Input::File(options.input.clone());
|
let input = config::Input::File(options.input.clone());
|
||||||
|
|
||||||
let sessopts = config::Options {
|
let sessopts = config::Options {
|
||||||
|
@ -63,52 +58,31 @@ pub fn run(mut options: Options) -> isize {
|
||||||
edition: options.edition,
|
edition: options.edition,
|
||||||
..config::Options::default()
|
..config::Options::default()
|
||||||
};
|
};
|
||||||
driver::spawn_thread_pool(sessopts, |sessopts| {
|
|
||||||
let source_map = Lrc::new(SourceMap::new(sessopts.file_path_mapping()));
|
|
||||||
let handler =
|
|
||||||
errors::Handler::with_tty_emitter(ColorConfig::Auto,
|
|
||||||
true, None,
|
|
||||||
Some(source_map.clone()));
|
|
||||||
|
|
||||||
let mut sess = session::build_session_(
|
let config = interface::Config {
|
||||||
sessopts, Some(options.input), handler, source_map.clone(), Default::default(),
|
opts: sessopts,
|
||||||
);
|
crate_cfg: config::parse_cfgspecs(options.cfgs.clone()),
|
||||||
let codegen_backend = util::get_codegen_backend(&sess);
|
input,
|
||||||
let cstore = CStore::new(codegen_backend.metadata_loader());
|
input_path: None,
|
||||||
rustc_lint::register_builtins(&mut sess.lint_store.borrow_mut(), Some(&sess));
|
output_file: None,
|
||||||
|
output_dir: None,
|
||||||
|
file_loader: None,
|
||||||
|
diagnostic_output: DiagnosticOutput::Default,
|
||||||
|
stderr: None,
|
||||||
|
crate_name: options.crate_name.clone(),
|
||||||
|
lint_caps: Default::default(),
|
||||||
|
};
|
||||||
|
|
||||||
let mut cfg = config::build_configuration(&sess,
|
let mut test_args = options.test_args.clone();
|
||||||
config::parse_cfgspecs(options.cfgs.clone()));
|
let display_warnings = options.display_warnings;
|
||||||
util::add_configuration(&mut cfg, &sess, &*codegen_backend);
|
|
||||||
sess.parse_sess.config = cfg;
|
|
||||||
|
|
||||||
let krate =
|
let tests = interface::run_compiler(config, |compiler| -> Result<_, ErrorReported> {
|
||||||
match driver::phase_1_parse_input(&driver::CompileController::basic(), &sess, &input) {
|
let lower_to_hir = compiler.lower_to_hir()?;
|
||||||
Ok(krate) => krate,
|
|
||||||
Err(mut e) => {
|
|
||||||
e.emit();
|
|
||||||
FatalError.raise();
|
|
||||||
}
|
|
||||||
};
|
|
||||||
let driver::ExpansionResult { defs, mut hir_forest, .. } = {
|
|
||||||
phase_2_configure_and_expand(
|
|
||||||
&sess,
|
|
||||||
&cstore,
|
|
||||||
krate,
|
|
||||||
None,
|
|
||||||
"rustdoc-test",
|
|
||||||
None,
|
|
||||||
|_| Ok(()),
|
|
||||||
).expect("phase_2_configure_and_expand aborted in rustdoc!")
|
|
||||||
};
|
|
||||||
|
|
||||||
let crate_name = options.crate_name.unwrap_or_else(|| {
|
let mut opts = scrape_test_config(lower_to_hir.peek().0.borrow().krate());
|
||||||
::rustc_codegen_utils::link::find_crate_name(None, &hir_forest.krate().attrs, &input)
|
|
||||||
});
|
|
||||||
let mut opts = scrape_test_config(hir_forest.krate());
|
|
||||||
opts.display_warnings |= options.display_warnings;
|
opts.display_warnings |= options.display_warnings;
|
||||||
let mut collector = Collector::new(
|
let mut collector = Collector::new(
|
||||||
crate_name,
|
compiler.crate_name()?.peek().to_string(),
|
||||||
options.cfgs,
|
options.cfgs,
|
||||||
options.libs,
|
options.libs,
|
||||||
options.codegen_options,
|
options.codegen_options,
|
||||||
|
@ -116,34 +90,40 @@ pub fn run(mut options: Options) -> isize {
|
||||||
false,
|
false,
|
||||||
opts,
|
opts,
|
||||||
options.maybe_sysroot,
|
options.maybe_sysroot,
|
||||||
Some(source_map),
|
Some(compiler.source_map().clone()),
|
||||||
None,
|
None,
|
||||||
options.linker,
|
options.linker,
|
||||||
options.edition,
|
options.edition,
|
||||||
options.persist_doctests,
|
options.persist_doctests,
|
||||||
);
|
);
|
||||||
|
|
||||||
{
|
let mut global_ctxt = compiler.global_ctxt()?.take();
|
||||||
let map = hir::map::map_crate(&sess, &cstore, &mut hir_forest, &defs);
|
global_ctxt.enter(|tcx| {
|
||||||
let krate = map.krate();
|
let krate = tcx.hir().krate();
|
||||||
let mut hir_collector = HirCollector {
|
let mut hir_collector = HirCollector {
|
||||||
sess: &sess,
|
sess: compiler.session(),
|
||||||
collector: &mut collector,
|
collector: &mut collector,
|
||||||
map: &map,
|
map: tcx.hir(),
|
||||||
codes: ErrorCodes::from(sess.opts.unstable_features.is_nightly_build()),
|
codes: ErrorCodes::from(compiler.session().opts
|
||||||
|
.unstable_features.is_nightly_build()),
|
||||||
};
|
};
|
||||||
hir_collector.visit_testable("".to_string(), &krate.attrs, |this| {
|
hir_collector.visit_testable("".to_string(), &krate.attrs, |this| {
|
||||||
intravisit::walk_crate(this, krate);
|
intravisit::walk_crate(this, krate);
|
||||||
});
|
});
|
||||||
}
|
});
|
||||||
|
|
||||||
options.test_args.insert(0, "rustdoctest".to_string());
|
Ok(collector.tests)
|
||||||
|
}).expect("compiler aborted in rustdoc!");
|
||||||
|
|
||||||
testing::test_main(&options.test_args,
|
test_args.insert(0, "rustdoctest".to_string());
|
||||||
collector.tests.into_iter().collect(),
|
|
||||||
testing::Options::new().display_output(options.display_warnings));
|
testing::test_main(
|
||||||
0
|
&test_args,
|
||||||
})
|
tests,
|
||||||
|
testing::Options::new().display_output(display_warnings)
|
||||||
|
);
|
||||||
|
|
||||||
|
0
|
||||||
}
|
}
|
||||||
|
|
||||||
// Look for `#![doc(test(no_crate_inject))]`, used by crates in the std facade.
|
// Look for `#![doc(test(no_crate_inject))]`, used by crates in the std facade.
|
||||||
|
@ -239,16 +219,18 @@ fn run_test(test: &str, cratename: &str, filename: &FileName, line: usize,
|
||||||
}
|
}
|
||||||
fn flush(&mut self) -> io::Result<()> { Ok(()) }
|
fn flush(&mut self) -> io::Result<()> { Ok(()) }
|
||||||
}
|
}
|
||||||
struct Bomb(Arc<Mutex<Vec<u8>>>, Box<dyn Write+Send>);
|
struct Bomb(Arc<Mutex<Vec<u8>>>, Option<Box<dyn Write+Send>>);
|
||||||
impl Drop for Bomb {
|
impl Drop for Bomb {
|
||||||
fn drop(&mut self) {
|
fn drop(&mut self) {
|
||||||
let _ = self.1.write_all(&self.0.lock().unwrap());
|
let mut old = self.1.take().unwrap();
|
||||||
|
let _ = old.write_all(&self.0.lock().unwrap());
|
||||||
|
io::set_panic(Some(old));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
let data = Arc::new(Mutex::new(Vec::new()));
|
let data = Arc::new(Mutex::new(Vec::new()));
|
||||||
|
|
||||||
let old = io::set_panic(Some(box Sink(data.clone())));
|
let old = io::set_panic(Some(box Sink(data.clone())));
|
||||||
let _bomb = Bomb(data.clone(), old.unwrap_or(box io::stdout()));
|
let _bomb = Bomb(data.clone(), Some(old.unwrap_or(box io::stdout())));
|
||||||
|
|
||||||
enum DirState {
|
enum DirState {
|
||||||
Temp(tempfile::TempDir),
|
Temp(tempfile::TempDir),
|
||||||
|
@ -264,91 +246,67 @@ fn run_test(test: &str, cratename: &str, filename: &FileName, line: usize,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let (outdir, compile_result) = driver::spawn_thread_pool(sessopts, |sessopts| {
|
let outdir = if let Some(mut path) = persist_doctests {
|
||||||
let source_map = Lrc::new(SourceMap::new(sessopts.file_path_mapping()));
|
path.push(format!("{}_{}",
|
||||||
let emitter = errors::emitter::EmitterWriter::new(box Sink(data.clone()),
|
filename
|
||||||
Some(source_map.clone()),
|
.to_string()
|
||||||
false,
|
.rsplit('/')
|
||||||
false);
|
.next()
|
||||||
|
.unwrap()
|
||||||
// Compile the code
|
.replace(".", "_"),
|
||||||
let diagnostic_handler = errors::Handler::with_emitter(true, None, box emitter);
|
line)
|
||||||
|
|
||||||
let mut sess = session::build_session_(
|
|
||||||
sessopts, None, diagnostic_handler, source_map, Default::default(),
|
|
||||||
);
|
);
|
||||||
let codegen_backend = util::get_codegen_backend(&sess);
|
std::fs::create_dir_all(&path)
|
||||||
let cstore = CStore::new(codegen_backend.metadata_loader());
|
.expect("Couldn't create directory for doctest executables");
|
||||||
rustc_lint::register_builtins(&mut sess.lint_store.borrow_mut(), Some(&sess));
|
|
||||||
|
|
||||||
let outdir = Mutex::new(
|
DirState::Perm(path)
|
||||||
if let Some(mut path) = persist_doctests {
|
} else {
|
||||||
path.push(format!("{}_{}",
|
DirState::Temp(TempFileBuilder::new()
|
||||||
filename
|
.prefix("rustdoctest")
|
||||||
.to_string()
|
.tempdir()
|
||||||
.rsplit('/')
|
.expect("rustdoc needs a tempdir"))
|
||||||
.next()
|
};
|
||||||
.unwrap()
|
let output_file = outdir.path().join("rust_out");
|
||||||
.replace(".", "_"),
|
|
||||||
line)
|
|
||||||
);
|
|
||||||
std::fs::create_dir_all(&path)
|
|
||||||
.expect("Couldn't create directory for doctest executables");
|
|
||||||
|
|
||||||
DirState::Perm(path)
|
let config = interface::Config {
|
||||||
|
opts: sessopts,
|
||||||
|
crate_cfg: config::parse_cfgspecs(cfgs),
|
||||||
|
input,
|
||||||
|
input_path: None,
|
||||||
|
output_file: Some(output_file.clone()),
|
||||||
|
output_dir: None,
|
||||||
|
file_loader: None,
|
||||||
|
diagnostic_output: DiagnosticOutput::Raw(box Sink(data.clone())),
|
||||||
|
stderr: Some(data.clone()),
|
||||||
|
crate_name: None,
|
||||||
|
lint_caps: Default::default(),
|
||||||
|
};
|
||||||
|
|
||||||
|
let compile_result = panic::catch_unwind(AssertUnwindSafe(|| {
|
||||||
|
interface::run_compiler(config, |compiler| {
|
||||||
|
if no_run {
|
||||||
|
compiler.global_ctxt().and_then(|global_ctxt| global_ctxt.take().enter(|tcx| {
|
||||||
|
tcx.analysis(LOCAL_CRATE)
|
||||||
|
})).ok();
|
||||||
} else {
|
} else {
|
||||||
DirState::Temp(TempFileBuilder::new()
|
compiler.compile().ok();
|
||||||
.prefix("rustdoctest")
|
};
|
||||||
.tempdir()
|
compiler.session().compile_status()
|
||||||
.expect("rustdoc needs a tempdir"))
|
})
|
||||||
}
|
})).map_err(|_| ()).and_then(|s| s.map_err(|_| ()));
|
||||||
);
|
|
||||||
let mut control = driver::CompileController::basic();
|
|
||||||
|
|
||||||
let mut cfg = config::build_configuration(&sess, config::parse_cfgspecs(cfgs.clone()));
|
|
||||||
util::add_configuration(&mut cfg, &sess, &*codegen_backend);
|
|
||||||
sess.parse_sess.config = cfg;
|
|
||||||
|
|
||||||
let out = Some(outdir.lock().unwrap().path().join("rust_out"));
|
|
||||||
|
|
||||||
if no_run {
|
|
||||||
control.after_analysis.stop = Compilation::Stop;
|
|
||||||
}
|
|
||||||
|
|
||||||
let res = panic::catch_unwind(AssertUnwindSafe(|| {
|
|
||||||
driver::compile_input(
|
|
||||||
codegen_backend,
|
|
||||||
&sess,
|
|
||||||
&cstore,
|
|
||||||
&None,
|
|
||||||
&input,
|
|
||||||
&None,
|
|
||||||
&out,
|
|
||||||
None,
|
|
||||||
&control
|
|
||||||
)
|
|
||||||
}));
|
|
||||||
|
|
||||||
let compile_result = match res {
|
|
||||||
Ok(Ok(())) | Ok(Err(CompileIncomplete::Stopped)) => Ok(()),
|
|
||||||
Err(_) | Ok(Err(CompileIncomplete::Errored(_))) => Err(())
|
|
||||||
};
|
|
||||||
|
|
||||||
(outdir, compile_result)
|
|
||||||
});
|
|
||||||
|
|
||||||
match (compile_result, compile_fail) {
|
match (compile_result, compile_fail) {
|
||||||
(Ok(()), true) => {
|
(Ok(()), true) => {
|
||||||
panic!("test compiled while it wasn't supposed to")
|
panic!("test compiled while it wasn't supposed to")
|
||||||
}
|
}
|
||||||
(Ok(()), false) => {}
|
(Ok(()), false) => {}
|
||||||
(Err(()), true) => {
|
(Err(_), true) => {
|
||||||
if error_codes.len() > 0 {
|
if error_codes.len() > 0 {
|
||||||
let out = String::from_utf8(data.lock().unwrap().to_vec()).unwrap();
|
let out = String::from_utf8(data.lock().unwrap().to_vec()).unwrap();
|
||||||
error_codes.retain(|err| !out.contains(err));
|
error_codes.retain(|err| !out.contains(err));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
(Err(()), false) => {
|
(Err(_), false) => {
|
||||||
panic!("couldn't compile the test")
|
panic!("couldn't compile the test")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -360,7 +318,8 @@ fn run_test(test: &str, cratename: &str, filename: &FileName, line: usize,
|
||||||
if no_run { return }
|
if no_run { return }
|
||||||
|
|
||||||
// Run the code!
|
// Run the code!
|
||||||
let mut cmd = Command::new(&outdir.lock().unwrap().path().join("rust_out"));
|
let mut cmd = Command::new(output_file);
|
||||||
|
|
||||||
match cmd.output() {
|
match cmd.output() {
|
||||||
Err(e) => panic!("couldn't run the test: {}{}", e,
|
Err(e) => panic!("couldn't run the test: {}{}", e,
|
||||||
if e.kind() == io::ErrorKind::PermissionDenied {
|
if e.kind() == io::ErrorKind::PermissionDenied {
|
||||||
|
@ -735,35 +694,26 @@ impl Tester for Collector {
|
||||||
allow_fail: config.allow_fail,
|
allow_fail: config.allow_fail,
|
||||||
},
|
},
|
||||||
testfn: testing::DynTestFn(box move || {
|
testfn: testing::DynTestFn(box move || {
|
||||||
let panic = io::set_panic(None);
|
run_test(
|
||||||
let print = io::set_print(None);
|
&test,
|
||||||
match {
|
&cratename,
|
||||||
rustc_driver::in_named_rustc_thread(name, move || with_globals(move || {
|
&filename,
|
||||||
io::set_panic(panic);
|
line,
|
||||||
io::set_print(print);
|
cfgs,
|
||||||
run_test(&test,
|
libs,
|
||||||
&cratename,
|
cg,
|
||||||
&filename,
|
externs,
|
||||||
line,
|
config.should_panic,
|
||||||
cfgs,
|
config.no_run,
|
||||||
libs,
|
config.test_harness,
|
||||||
cg,
|
config.compile_fail,
|
||||||
externs,
|
config.error_codes,
|
||||||
config.should_panic,
|
&opts,
|
||||||
config.no_run,
|
maybe_sysroot,
|
||||||
config.test_harness,
|
linker,
|
||||||
config.compile_fail,
|
edition,
|
||||||
config.error_codes,
|
persist_doctests
|
||||||
&opts,
|
)
|
||||||
maybe_sysroot,
|
|
||||||
linker,
|
|
||||||
edition,
|
|
||||||
persist_doctests)
|
|
||||||
}))
|
|
||||||
} {
|
|
||||||
Ok(()) => (),
|
|
||||||
Err(err) => panic::resume_unwind(err),
|
|
||||||
}
|
|
||||||
}),
|
}),
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
|
@ -27,10 +27,10 @@ use crate::doctree::*;
|
||||||
// Also, is there some reason that this doesn't use the 'visit'
|
// Also, is there some reason that this doesn't use the 'visit'
|
||||||
// framework from syntax?.
|
// framework from syntax?.
|
||||||
|
|
||||||
pub struct RustdocVisitor<'a, 'tcx: 'a, 'rcx: 'a> {
|
pub struct RustdocVisitor<'a, 'tcx> {
|
||||||
pub module: Module,
|
pub module: Module,
|
||||||
pub attrs: hir::HirVec<ast::Attribute>,
|
pub attrs: hir::HirVec<ast::Attribute>,
|
||||||
pub cx: &'a core::DocContext<'a, 'tcx, 'rcx>,
|
pub cx: &'a core::DocContext<'tcx>,
|
||||||
view_item_stack: FxHashSet<ast::NodeId>,
|
view_item_stack: FxHashSet<ast::NodeId>,
|
||||||
inlining: bool,
|
inlining: bool,
|
||||||
/// Are the current module and all of its parents public?
|
/// Are the current module and all of its parents public?
|
||||||
|
@ -38,10 +38,10 @@ pub struct RustdocVisitor<'a, 'tcx: 'a, 'rcx: 'a> {
|
||||||
exact_paths: Option<FxHashMap<DefId, Vec<String>>>,
|
exact_paths: Option<FxHashMap<DefId, Vec<String>>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a, 'tcx, 'rcx> RustdocVisitor<'a, 'tcx, 'rcx> {
|
impl<'a, 'tcx> RustdocVisitor<'a, 'tcx> {
|
||||||
pub fn new(
|
pub fn new(
|
||||||
cx: &'a core::DocContext<'a, 'tcx, 'rcx>
|
cx: &'a core::DocContext<'tcx>
|
||||||
) -> RustdocVisitor<'a, 'tcx, 'rcx> {
|
) -> RustdocVisitor<'a, 'tcx> {
|
||||||
// If the root is re-exported, terminate all recursion.
|
// If the root is re-exported, terminate all recursion.
|
||||||
let mut stack = FxHashSet::default();
|
let mut stack = FxHashSet::default();
|
||||||
stack.insert(ast::CRATE_NODE_ID);
|
stack.insert(ast::CRATE_NODE_ID);
|
||||||
|
@ -269,7 +269,7 @@ impl<'a, 'tcx, 'rcx> RustdocVisitor<'a, 'tcx, 'rcx> {
|
||||||
om: &mut Module,
|
om: &mut Module,
|
||||||
please_inline: bool) -> bool {
|
please_inline: bool) -> bool {
|
||||||
|
|
||||||
fn inherits_doc_hidden(cx: &core::DocContext<'_, '_, '_>, mut node: ast::NodeId) -> bool {
|
fn inherits_doc_hidden(cx: &core::DocContext<'_>, mut node: ast::NodeId) -> bool {
|
||||||
while let Some(id) = cx.tcx.hir().get_enclosing_scope(node) {
|
while let Some(id) = cx.tcx.hir().get_enclosing_scope(node) {
|
||||||
node = id;
|
node = id;
|
||||||
if cx.tcx.hir().attrs(node).lists("doc").has_word("hidden") {
|
if cx.tcx.hir().attrs(node).lists("doc").has_word("hidden") {
|
||||||
|
|
|
@ -12,8 +12,8 @@ use crate::clean::{AttributesExt, NestedAttributesExt};
|
||||||
|
|
||||||
/// Similar to `librustc_privacy::EmbargoVisitor`, but also takes
|
/// Similar to `librustc_privacy::EmbargoVisitor`, but also takes
|
||||||
/// specific rustdoc annotations into account (i.e., `doc(hidden)`)
|
/// specific rustdoc annotations into account (i.e., `doc(hidden)`)
|
||||||
pub struct LibEmbargoVisitor<'a, 'tcx: 'a, 'rcx: 'a> {
|
pub struct LibEmbargoVisitor<'a, 'tcx> {
|
||||||
cx: &'a crate::core::DocContext<'a, 'tcx, 'rcx>,
|
cx: &'a crate::core::DocContext<'tcx>,
|
||||||
// Accessibility levels for reachable nodes
|
// Accessibility levels for reachable nodes
|
||||||
access_levels: RefMut<'a, AccessLevels<DefId>>,
|
access_levels: RefMut<'a, AccessLevels<DefId>>,
|
||||||
// Previous accessibility level, None means unreachable
|
// Previous accessibility level, None means unreachable
|
||||||
|
@ -22,10 +22,10 @@ pub struct LibEmbargoVisitor<'a, 'tcx: 'a, 'rcx: 'a> {
|
||||||
visited_mods: FxHashSet<DefId>,
|
visited_mods: FxHashSet<DefId>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a, 'tcx, 'rcx> LibEmbargoVisitor<'a, 'tcx, 'rcx> {
|
impl<'a, 'tcx> LibEmbargoVisitor<'a, 'tcx> {
|
||||||
pub fn new(
|
pub fn new(
|
||||||
cx: &'a crate::core::DocContext<'a, 'tcx, 'rcx>
|
cx: &'a crate::core::DocContext<'tcx>
|
||||||
) -> LibEmbargoVisitor<'a, 'tcx, 'rcx> {
|
) -> LibEmbargoVisitor<'a, 'tcx> {
|
||||||
LibEmbargoVisitor {
|
LibEmbargoVisitor {
|
||||||
cx,
|
cx,
|
||||||
access_levels: RefMut::map(cx.renderinfo.borrow_mut(), |ri| &mut ri.access_levels),
|
access_levels: RefMut::map(cx.renderinfo.borrow_mut(), |ri| &mut ri.access_levels),
|
||||||
|
|
|
@ -1,14 +1,3 @@
|
||||||
#![feature(link_args)]
|
|
||||||
|
|
||||||
// Set the stack size at link time on Windows. See rustc_driver::in_rustc_thread
|
|
||||||
// for the rationale.
|
|
||||||
#[allow(unused_attributes)]
|
|
||||||
#[cfg_attr(all(windows, target_env = "msvc"), link_args = "/STACK:16777216")]
|
|
||||||
// We only build for msvc and gnu now, but we use a exhaustive condition here
|
|
||||||
// so we can expect either the stack size to be set or the build fails.
|
|
||||||
#[cfg_attr(all(windows, not(target_env = "msvc")), link_args = "-Wl,--stack,16777216")]
|
|
||||||
// Also, don't forget to set this for rustdoc.
|
|
||||||
extern {}
|
|
||||||
|
|
||||||
fn main() {
|
fn main() {
|
||||||
// Pull in jemalloc when enabled.
|
// Pull in jemalloc when enabled.
|
||||||
|
|
|
@ -7,12 +7,13 @@ extern crate rustc_codegen_utils;
|
||||||
use std::any::Any;
|
use std::any::Any;
|
||||||
use std::sync::mpsc;
|
use std::sync::mpsc;
|
||||||
use syntax::symbol::Symbol;
|
use syntax::symbol::Symbol;
|
||||||
use rustc::session::{Session, CompileIncomplete};
|
use rustc::session::Session;
|
||||||
use rustc::session::config::OutputFilenames;
|
use rustc::session::config::OutputFilenames;
|
||||||
use rustc::ty::TyCtxt;
|
use rustc::ty::TyCtxt;
|
||||||
use rustc::ty::query::Providers;
|
use rustc::ty::query::Providers;
|
||||||
use rustc::middle::cstore::MetadataLoader;
|
use rustc::middle::cstore::MetadataLoader;
|
||||||
use rustc::dep_graph::DepGraph;
|
use rustc::dep_graph::DepGraph;
|
||||||
|
use rustc::util::common::ErrorReported;
|
||||||
use rustc_codegen_utils::codegen_backend::{CodegenBackend, MetadataOnlyCodegenBackend};
|
use rustc_codegen_utils::codegen_backend::{CodegenBackend, MetadataOnlyCodegenBackend};
|
||||||
|
|
||||||
struct TheBackend(Box<CodegenBackend>);
|
struct TheBackend(Box<CodegenBackend>);
|
||||||
|
@ -46,7 +47,7 @@ impl CodegenBackend for TheBackend {
|
||||||
sess: &Session,
|
sess: &Session,
|
||||||
_dep_graph: &DepGraph,
|
_dep_graph: &DepGraph,
|
||||||
outputs: &OutputFilenames,
|
outputs: &OutputFilenames,
|
||||||
) -> Result<(), CompileIncomplete> {
|
) -> Result<(), ErrorReported> {
|
||||||
use std::io::Write;
|
use std::io::Write;
|
||||||
use rustc::session::config::CrateType;
|
use rustc::session::config::CrateType;
|
||||||
use rustc_codegen_utils::link::out_filename;
|
use rustc_codegen_utils::link::out_filename;
|
||||||
|
|
|
@ -1,26 +1,16 @@
|
||||||
#![feature(rustc_private)]
|
#![feature(rustc_private)]
|
||||||
|
|
||||||
extern crate rustc;
|
extern crate rustc;
|
||||||
extern crate rustc_driver;
|
|
||||||
extern crate rustc_lint;
|
|
||||||
extern crate rustc_metadata;
|
|
||||||
extern crate rustc_errors;
|
|
||||||
extern crate rustc_codegen_utils;
|
|
||||||
extern crate rustc_interface;
|
extern crate rustc_interface;
|
||||||
extern crate syntax;
|
extern crate syntax;
|
||||||
|
|
||||||
use rustc::session::{build_session, Session};
|
use rustc::session::DiagnosticOutput;
|
||||||
use rustc::session::config::{Input, Options,
|
use rustc::session::config::{Input, Options,
|
||||||
OutputType, OutputTypes};
|
OutputType, OutputTypes};
|
||||||
use rustc_driver::driver::{self, compile_input, CompileController};
|
use rustc_interface::interface;
|
||||||
use rustc_metadata::cstore::CStore;
|
|
||||||
use rustc_errors::registry::Registry;
|
|
||||||
use rustc_interface::util;
|
|
||||||
use syntax::source_map::FileName;
|
use syntax::source_map::FileName;
|
||||||
use rustc_codegen_utils::codegen_backend::CodegenBackend;
|
|
||||||
|
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
use std::rc::Rc;
|
|
||||||
|
|
||||||
fn main() {
|
fn main() {
|
||||||
let src = r#"
|
let src = r#"
|
||||||
|
@ -44,39 +34,33 @@ fn main() {
|
||||||
compile(src.to_string(), tmpdir.join("out"), sysroot.clone());
|
compile(src.to_string(), tmpdir.join("out"), sysroot.clone());
|
||||||
}
|
}
|
||||||
|
|
||||||
fn basic_sess(opts: Options) -> (Session, Rc<CStore>, Box<CodegenBackend>) {
|
|
||||||
let descriptions = Registry::new(&rustc::DIAGNOSTICS);
|
|
||||||
let sess = build_session(opts, None, descriptions);
|
|
||||||
let codegen_backend = util::get_codegen_backend(&sess);
|
|
||||||
let cstore = Rc::new(CStore::new(codegen_backend.metadata_loader()));
|
|
||||||
rustc_lint::register_builtins(&mut sess.lint_store.borrow_mut(), Some(&sess));
|
|
||||||
(sess, cstore, codegen_backend)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn compile(code: String, output: PathBuf, sysroot: PathBuf) {
|
fn compile(code: String, output: PathBuf, sysroot: PathBuf) {
|
||||||
syntax::with_globals(|| {
|
let mut opts = Options::default();
|
||||||
let mut opts = Options::default();
|
opts.output_types = OutputTypes::new(&[(OutputType::Exe, None)]);
|
||||||
opts.output_types = OutputTypes::new(&[(OutputType::Exe, None)]);
|
opts.maybe_sysroot = Some(sysroot);
|
||||||
opts.maybe_sysroot = Some(sysroot);
|
|
||||||
if let Ok(linker) = std::env::var("RUSTC_LINKER") {
|
if let Ok(linker) = std::env::var("RUSTC_LINKER") {
|
||||||
opts.cg.linker = Some(linker.into());
|
opts.cg.linker = Some(linker.into());
|
||||||
}
|
}
|
||||||
driver::spawn_thread_pool(opts, |opts| {
|
|
||||||
let (sess, cstore, codegen_backend) = basic_sess(opts);
|
let name = FileName::anon_source_code(&code);
|
||||||
let control = CompileController::basic();
|
let input = Input::Str { name, input: code };
|
||||||
let name = FileName::anon_source_code(&code);
|
|
||||||
let input = Input::Str { name, input: code };
|
let config = interface::Config {
|
||||||
let _ = compile_input(
|
opts,
|
||||||
codegen_backend,
|
crate_cfg: Default::default(),
|
||||||
&sess,
|
input,
|
||||||
&cstore,
|
input_path: None,
|
||||||
&None,
|
output_file: Some(output),
|
||||||
&input,
|
output_dir: None,
|
||||||
&None,
|
file_loader: None,
|
||||||
&Some(output),
|
diagnostic_output: DiagnosticOutput::Default,
|
||||||
None,
|
stderr: None,
|
||||||
&control
|
crate_name: None,
|
||||||
);
|
lint_caps: Default::default(),
|
||||||
});
|
};
|
||||||
|
|
||||||
|
interface::run_compiler(config, |compiler| {
|
||||||
|
compiler.compile().ok();
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,91 +1,30 @@
|
||||||
// Test that the CompilerCalls interface to the compiler works.
|
// Test that the Callbacks interface to the compiler works.
|
||||||
|
|
||||||
// ignore-cross-compile
|
// ignore-cross-compile
|
||||||
// ignore-stage1
|
// ignore-stage1
|
||||||
|
|
||||||
#![feature(rustc_private)]
|
#![feature(rustc_private)]
|
||||||
|
|
||||||
extern crate getopts;
|
|
||||||
extern crate rustc;
|
|
||||||
extern crate rustc_driver;
|
extern crate rustc_driver;
|
||||||
extern crate rustc_codegen_utils;
|
extern crate rustc_interface;
|
||||||
extern crate syntax;
|
|
||||||
extern crate rustc_errors as errors;
|
|
||||||
extern crate rustc_metadata;
|
|
||||||
|
|
||||||
use rustc::session::Session;
|
use rustc_interface::interface;
|
||||||
use rustc::session::config::{self, Input};
|
|
||||||
use rustc_driver::{driver, CompilerCalls, Compilation};
|
|
||||||
use rustc_codegen_utils::codegen_backend::CodegenBackend;
|
|
||||||
use rustc_metadata::cstore::CStore;
|
|
||||||
use syntax::ast;
|
|
||||||
|
|
||||||
use std::path::PathBuf;
|
|
||||||
|
|
||||||
struct TestCalls<'a> {
|
struct TestCalls<'a> {
|
||||||
count: &'a mut u32
|
count: &'a mut u32
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> CompilerCalls<'a> for TestCalls<'a> {
|
impl rustc_driver::Callbacks for TestCalls<'_> {
|
||||||
fn early_callback(&mut self,
|
fn config(&mut self, _config: &mut interface::Config) {
|
||||||
_: &getopts::Matches,
|
|
||||||
_: &config::Options,
|
|
||||||
_: &ast::CrateConfig,
|
|
||||||
_: &errors::registry::Registry,
|
|
||||||
_: config::ErrorOutputType)
|
|
||||||
-> Compilation {
|
|
||||||
*self.count *= 2;
|
*self.count *= 2;
|
||||||
Compilation::Continue
|
|
||||||
}
|
|
||||||
|
|
||||||
fn late_callback(&mut self,
|
|
||||||
_: &CodegenBackend,
|
|
||||||
_: &getopts::Matches,
|
|
||||||
_: &Session,
|
|
||||||
_: &CStore,
|
|
||||||
_: &Input,
|
|
||||||
_: &Option<PathBuf>,
|
|
||||||
_: &Option<PathBuf>)
|
|
||||||
-> Compilation {
|
|
||||||
*self.count *= 3;
|
|
||||||
Compilation::Stop
|
|
||||||
}
|
|
||||||
|
|
||||||
fn some_input(&mut self, input: Input, input_path: Option<PathBuf>)
|
|
||||||
-> (Input, Option<PathBuf>) {
|
|
||||||
*self.count *= 5;
|
|
||||||
(input, input_path)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn no_input(&mut self,
|
|
||||||
_: &getopts::Matches,
|
|
||||||
_: &config::Options,
|
|
||||||
_: &ast::CrateConfig,
|
|
||||||
_: &Option<PathBuf>,
|
|
||||||
_: &Option<PathBuf>,
|
|
||||||
_: &errors::registry::Registry)
|
|
||||||
-> Option<(Input, Option<PathBuf>)> {
|
|
||||||
panic!("This shouldn't happen");
|
|
||||||
}
|
|
||||||
|
|
||||||
fn build_controller(self: Box<Self>,
|
|
||||||
_: &Session,
|
|
||||||
_: &getopts::Matches)
|
|
||||||
-> driver::CompileController<'a> {
|
|
||||||
panic!("This shouldn't be called");
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
fn main() {
|
fn main() {
|
||||||
let mut count = 1;
|
let mut count = 1;
|
||||||
{
|
let args = vec!["compiler-calls".to_string(), "foo.rs".to_string()];
|
||||||
let tc = TestCalls { count: &mut count };
|
rustc_driver::report_ices_to_stderr_if_any(|| {
|
||||||
// we should never get use this filename, but lets make sure they are valid args.
|
rustc_driver::run_compiler(&args, &mut TestCalls { count: &mut count }, None, None).ok();
|
||||||
let args = vec!["compiler-calls".to_string(), "foo.rs".to_string()];
|
}).ok();
|
||||||
syntax::with_globals(|| {
|
assert_eq!(count, 2);
|
||||||
rustc_driver::run_compiler(&args, Box::new(tc), None, None);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
assert_eq!(count, 30);
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -11,3 +11,5 @@ LL | #![deny(intra_doc_link_resolution_failure)]
|
||||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
= help: to escape `[` and `]` characters, just add '/' before them like `/[` or `/]`
|
= help: to escape `[` and `]` characters, just add '/' before them like `/[` or `/]`
|
||||||
|
|
||||||
|
error: aborting due to previous error
|
||||||
|
|
||||||
|
|
|
@ -18,5 +18,5 @@ error: missing documentation for a struct
|
||||||
LL | pub struct Foo; //~ ERROR
|
LL | pub struct Foo; //~ ERROR
|
||||||
| ^^^^^^^^^^^^^^^
|
| ^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
error: Compilation failed, aborting rustdoc
|
error: aborting due to 2 previous errors
|
||||||
|
|
||||||
|
|
|
@ -10,5 +10,5 @@ note: lint level defined here
|
||||||
LL | #![deny(missing_docs)]
|
LL | #![deny(missing_docs)]
|
||||||
| ^^^^^^^^^^^^
|
| ^^^^^^^^^^^^
|
||||||
|
|
||||||
error: Compilation failed, aborting rustdoc
|
error: aborting due to previous error
|
||||||
|
|
||||||
|
|
|
@ -24,3 +24,5 @@ error: Missing code example in this documentation
|
||||||
LL | /// Or maybe not because she saved herself!
|
LL | /// Or maybe not because she saved herself!
|
||||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
|
error: aborting due to 4 previous errors
|
||||||
|
|
||||||
|
|
|
@ -12,7 +12,10 @@ error[E0425]: cannot find value `no` in this scope
|
||||||
3 | no
|
3 | no
|
||||||
| ^^ not found in this scope
|
| ^^ not found in this scope
|
||||||
|
|
||||||
thread '$DIR/failed-doctest-output.rs - OtherStruct (line 17)' panicked at 'couldn't compile the test', src/librustdoc/test.rs:352:13
|
error: aborting due to previous error
|
||||||
|
|
||||||
|
For more information about this error, try `rustc --explain E0425`.
|
||||||
|
thread '$DIR/failed-doctest-output.rs - OtherStruct (line 17)' panicked at 'couldn't compile the test', src/librustdoc/test.rs:310:13
|
||||||
note: Run with `RUST_BACKTRACE=1` environment variable to display a backtrace.
|
note: Run with `RUST_BACKTRACE=1` environment variable to display a backtrace.
|
||||||
|
|
||||||
---- $DIR/failed-doctest-output.rs - SomeStruct (line 11) stdout ----
|
---- $DIR/failed-doctest-output.rs - SomeStruct (line 11) stdout ----
|
||||||
|
@ -21,7 +24,7 @@ thread '$DIR/failed-doctest-output.rs - SomeStruct (line 11)' panicked at 'test
|
||||||
thread 'main' panicked at 'oh no', $DIR/failed-doctest-output.rs:3:1
|
thread 'main' panicked at 'oh no', $DIR/failed-doctest-output.rs:3:1
|
||||||
note: Run with `RUST_BACKTRACE=1` environment variable to display a backtrace.
|
note: Run with `RUST_BACKTRACE=1` environment variable to display a backtrace.
|
||||||
|
|
||||||
', src/librustdoc/test.rs:373:17
|
', src/librustdoc/test.rs:332:17
|
||||||
|
|
||||||
|
|
||||||
failures:
|
failures:
|
||||||
|
|
|
@ -11,3 +11,5 @@ LL | #![deny(intra_doc_link_resolution_failure)]
|
||||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
= help: to escape `[` and `]` characters, just add '/' before them like `/[` or `/]`
|
= help: to escape `[` and `]` characters, just add '/' before them like `/[` or `/]`
|
||||||
|
|
||||||
|
error: aborting due to previous error
|
||||||
|
|
||||||
|
|
|
@ -11,3 +11,5 @@ LL | #![deny(intra_doc_link_resolution_failure)]
|
||||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
= help: to escape `[` and `]` characters, just add '/' before them like `/[` or `/]`
|
= help: to escape `[` and `]` characters, just add '/' before them like `/[` or `/]`
|
||||||
|
|
||||||
|
error: aborting due to previous error
|
||||||
|
|
||||||
|
|
|
@ -42,3 +42,5 @@ LL | #![deny(rustdoc)]
|
||||||
| ^^^^^^^
|
| ^^^^^^^
|
||||||
= note: #[deny(missing_doc_code_examples)] implied by #[deny(rustdoc)]
|
= note: #[deny(missing_doc_code_examples)] implied by #[deny(rustdoc)]
|
||||||
|
|
||||||
|
error: aborting due to 3 previous errors
|
||||||
|
|
||||||
|
|
|
@ -14,3 +14,5 @@ note: lint level defined here
|
||||||
LL | #![deny(private_doc_tests)]
|
LL | #![deny(private_doc_tests)]
|
||||||
| ^^^^^^^^^^^^^^^^^
|
| ^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
|
error: aborting due to previous error
|
||||||
|
|
||||||
|
|
|
@ -1,15 +1,3 @@
|
||||||
#![deny(rust_2018_idioms)]
|
#![deny(rust_2018_idioms)]
|
||||||
|
|
||||||
#![feature(link_args)]
|
|
||||||
|
|
||||||
#[allow(unused_attributes)]
|
|
||||||
// Set the stack size at link time on Windows. See rustc_driver::in_rustc_thread
|
|
||||||
// for the rationale.
|
|
||||||
#[cfg_attr(all(windows, target_env = "msvc"), link_args = "/STACK:16777216")]
|
|
||||||
// We only build for msvc and gnu now, but we use a exhaustive condition here
|
|
||||||
// so we can expect either the stack size to be set or the build fails.
|
|
||||||
#[cfg_attr(all(windows, not(target_env = "msvc")), link_args = "-Wl,--stack,16777216")]
|
|
||||||
// See src/rustc/rustc.rs for the corresponding rustc settings.
|
|
||||||
extern {}
|
|
||||||
|
|
||||||
fn main() { rustdoc::main() }
|
fn main() { rustdoc::main() }
|
||||||
|
|
Loading…
Add table
Reference in a new issue