Auto merge of #105220 - oli-obk:feeding, r=cjgillot
feed resolver_for_lowering instead of storing it in a field r? `@cjgillot` opening this as * a discussion for `no_hash` + `feedable` queries. I think we'll want those, but I don't quite understand why they are rejected beyond a double check of the stable hashes for situations where the query is fed but also read from incremental caches. * and a discussion on removing all untracked fields from TyCtxt and setting it up so that they are fed queries instead
This commit is contained in:
commit
ed61c139c2
11 changed files with 86 additions and 57 deletions
|
@ -12,6 +12,7 @@ use rustc_ast::{self as ast, visit};
|
||||||
use rustc_borrowck as mir_borrowck;
|
use rustc_borrowck as mir_borrowck;
|
||||||
use rustc_codegen_ssa::traits::CodegenBackend;
|
use rustc_codegen_ssa::traits::CodegenBackend;
|
||||||
use rustc_data_structures::parallel;
|
use rustc_data_structures::parallel;
|
||||||
|
use rustc_data_structures::steal::Steal;
|
||||||
use rustc_data_structures::sync::{Lrc, OnceCell, WorkerLocal};
|
use rustc_data_structures::sync::{Lrc, OnceCell, WorkerLocal};
|
||||||
use rustc_errors::{ErrorGuaranteed, PResult};
|
use rustc_errors::{ErrorGuaranteed, PResult};
|
||||||
use rustc_expand::base::{ExtCtxt, LintStoreExpand, ResolverExpand};
|
use rustc_expand::base::{ExtCtxt, LintStoreExpand, ResolverExpand};
|
||||||
|
@ -801,6 +802,12 @@ pub fn create_global_ctxt<'tcx>(
|
||||||
TcxQueries::new(local_providers, extern_providers, query_result_on_disk_cache)
|
TcxQueries::new(local_providers, extern_providers, query_result_on_disk_cache)
|
||||||
});
|
});
|
||||||
|
|
||||||
|
let ty::ResolverOutputs {
|
||||||
|
definitions,
|
||||||
|
global_ctxt: untracked_resolutions,
|
||||||
|
ast_lowering: untracked_resolver_for_lowering,
|
||||||
|
} = resolver_outputs;
|
||||||
|
|
||||||
let gcx = sess.time("setup_global_ctxt", || {
|
let gcx = sess.time("setup_global_ctxt", || {
|
||||||
global_ctxt.get_or_init(move || {
|
global_ctxt.get_or_init(move || {
|
||||||
TyCtxt::create_global_ctxt(
|
TyCtxt::create_global_ctxt(
|
||||||
|
@ -808,7 +815,8 @@ pub fn create_global_ctxt<'tcx>(
|
||||||
lint_store,
|
lint_store,
|
||||||
arena,
|
arena,
|
||||||
hir_arena,
|
hir_arena,
|
||||||
resolver_outputs,
|
definitions,
|
||||||
|
untracked_resolutions,
|
||||||
krate,
|
krate,
|
||||||
dep_graph,
|
dep_graph,
|
||||||
queries.on_disk_cache.as_ref().map(OnDiskCache::as_dyn),
|
queries.on_disk_cache.as_ref().map(OnDiskCache::as_dyn),
|
||||||
|
@ -820,7 +828,12 @@ pub fn create_global_ctxt<'tcx>(
|
||||||
})
|
})
|
||||||
});
|
});
|
||||||
|
|
||||||
QueryContext { gcx }
|
let mut qcx = QueryContext { gcx };
|
||||||
|
qcx.enter(|tcx| {
|
||||||
|
tcx.feed_unit_query()
|
||||||
|
.resolver_for_lowering(tcx.arena.alloc(Steal::new(untracked_resolver_for_lowering)))
|
||||||
|
});
|
||||||
|
qcx
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Runs the resolution, type-checking, region checking and other
|
/// Runs the resolution, type-checking, region checking and other
|
||||||
|
@ -965,12 +978,10 @@ fn analysis(tcx: TyCtxt<'_>, (): ()) -> Result<()> {
|
||||||
pub fn start_codegen<'tcx>(
|
pub fn start_codegen<'tcx>(
|
||||||
codegen_backend: &dyn CodegenBackend,
|
codegen_backend: &dyn CodegenBackend,
|
||||||
tcx: TyCtxt<'tcx>,
|
tcx: TyCtxt<'tcx>,
|
||||||
outputs: &OutputFilenames,
|
|
||||||
) -> Box<dyn Any> {
|
) -> Box<dyn Any> {
|
||||||
info!("Pre-codegen\n{:?}", tcx.debug_stats());
|
info!("Pre-codegen\n{:?}", tcx.debug_stats());
|
||||||
|
|
||||||
let (metadata, need_metadata_module) =
|
let (metadata, need_metadata_module) = rustc_metadata::fs::encode_and_write_metadata(tcx);
|
||||||
rustc_metadata::fs::encode_and_write_metadata(tcx, outputs);
|
|
||||||
|
|
||||||
let codegen = tcx.sess.time("codegen_crate", move || {
|
let codegen = tcx.sess.time("codegen_crate", move || {
|
||||||
codegen_backend.codegen_crate(tcx, metadata, need_metadata_module)
|
codegen_backend.codegen_crate(tcx, metadata, need_metadata_module)
|
||||||
|
@ -986,7 +997,7 @@ pub fn start_codegen<'tcx>(
|
||||||
info!("Post-codegen\n{:?}", tcx.debug_stats());
|
info!("Post-codegen\n{:?}", tcx.debug_stats());
|
||||||
|
|
||||||
if tcx.sess.opts.output_types.contains_key(&OutputType::Mir) {
|
if tcx.sess.opts.output_types.contains_key(&OutputType::Mir) {
|
||||||
if let Err(error) = rustc_mir_transform::dump_mir::emit_mir(tcx, outputs) {
|
if let Err(error) = rustc_mir_transform::dump_mir::emit_mir(tcx) {
|
||||||
tcx.sess.emit_err(CantEmitMIR { error });
|
tcx.sess.emit_err(CantEmitMIR { error });
|
||||||
tcx.sess.abort_if_errors();
|
tcx.sess.abort_if_errors();
|
||||||
}
|
}
|
||||||
|
|
|
@ -20,6 +20,7 @@ use rustc_span::symbol::sym;
|
||||||
use std::any::Any;
|
use std::any::Any;
|
||||||
use std::cell::{Ref, RefCell, RefMut};
|
use std::cell::{Ref, RefCell, RefMut};
|
||||||
use std::rc::Rc;
|
use std::rc::Rc;
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
/// Represent the result of a query.
|
/// Represent the result of a query.
|
||||||
///
|
///
|
||||||
|
@ -214,7 +215,7 @@ impl<'tcx> Queries<'tcx> {
|
||||||
pub fn global_ctxt(&'tcx self) -> Result<&Query<QueryContext<'tcx>>> {
|
pub fn global_ctxt(&'tcx self) -> Result<&Query<QueryContext<'tcx>>> {
|
||||||
self.global_ctxt.compute(|| {
|
self.global_ctxt.compute(|| {
|
||||||
let crate_name = self.crate_name()?.peek().clone();
|
let crate_name = self.crate_name()?.peek().clone();
|
||||||
let outputs = self.prepare_outputs()?.peek().clone();
|
let outputs = self.prepare_outputs()?.take();
|
||||||
let dep_graph = self.dep_graph()?.peek().clone();
|
let dep_graph = self.dep_graph()?.peek().clone();
|
||||||
let (krate, resolver, lint_store) = self.expansion()?.take();
|
let (krate, resolver, lint_store) = self.expansion()?.take();
|
||||||
Ok(passes::create_global_ctxt(
|
Ok(passes::create_global_ctxt(
|
||||||
|
@ -235,7 +236,6 @@ impl<'tcx> Queries<'tcx> {
|
||||||
|
|
||||||
pub fn ongoing_codegen(&'tcx self) -> Result<&Query<Box<dyn Any>>> {
|
pub fn ongoing_codegen(&'tcx self) -> Result<&Query<Box<dyn Any>>> {
|
||||||
self.ongoing_codegen.compute(|| {
|
self.ongoing_codegen.compute(|| {
|
||||||
let outputs = self.prepare_outputs()?;
|
|
||||||
self.global_ctxt()?.peek_mut().enter(|tcx| {
|
self.global_ctxt()?.peek_mut().enter(|tcx| {
|
||||||
tcx.analysis(()).ok();
|
tcx.analysis(()).ok();
|
||||||
|
|
||||||
|
@ -249,7 +249,7 @@ impl<'tcx> Queries<'tcx> {
|
||||||
// Hook for UI tests.
|
// Hook for UI tests.
|
||||||
Self::check_for_rustc_errors_attr(tcx);
|
Self::check_for_rustc_errors_attr(tcx);
|
||||||
|
|
||||||
Ok(passes::start_codegen(&***self.codegen_backend(), tcx, &*outputs.peek()))
|
Ok(passes::start_codegen(&***self.codegen_backend(), tcx))
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@ -293,8 +293,10 @@ impl<'tcx> Queries<'tcx> {
|
||||||
let codegen_backend = self.codegen_backend().clone();
|
let codegen_backend = self.codegen_backend().clone();
|
||||||
|
|
||||||
let dep_graph = self.dep_graph()?.peek().clone();
|
let dep_graph = self.dep_graph()?.peek().clone();
|
||||||
let prepare_outputs = self.prepare_outputs()?.take();
|
let (crate_hash, prepare_outputs) = self
|
||||||
let crate_hash = self.global_ctxt()?.peek_mut().enter(|tcx| tcx.crate_hash(LOCAL_CRATE));
|
.global_ctxt()?
|
||||||
|
.peek_mut()
|
||||||
|
.enter(|tcx| (tcx.crate_hash(LOCAL_CRATE), tcx.output_filenames(()).clone()));
|
||||||
let ongoing_codegen = self.ongoing_codegen()?.take();
|
let ongoing_codegen = self.ongoing_codegen()?.take();
|
||||||
|
|
||||||
Ok(Linker {
|
Ok(Linker {
|
||||||
|
@ -316,7 +318,7 @@ pub struct Linker {
|
||||||
|
|
||||||
// compilation outputs
|
// compilation outputs
|
||||||
dep_graph: DepGraph,
|
dep_graph: DepGraph,
|
||||||
prepare_outputs: OutputFilenames,
|
prepare_outputs: Arc<OutputFilenames>,
|
||||||
crate_hash: Svh,
|
crate_hash: Svh,
|
||||||
ongoing_codegen: Box<dyn Any>,
|
ongoing_codegen: Box<dyn Any>,
|
||||||
}
|
}
|
||||||
|
|
|
@ -364,10 +364,6 @@ pub fn rustc_queries(input: TokenStream) -> TokenStream {
|
||||||
modifiers.eval_always.is_none(),
|
modifiers.eval_always.is_none(),
|
||||||
"Query {name} cannot be both `feedable` and `eval_always`."
|
"Query {name} cannot be both `feedable` and `eval_always`."
|
||||||
);
|
);
|
||||||
assert!(
|
|
||||||
modifiers.no_hash.is_none(),
|
|
||||||
"Query {name} cannot be both `feedable` and `no_hash`."
|
|
||||||
);
|
|
||||||
feedable_queries.extend(quote! {
|
feedable_queries.extend(quote! {
|
||||||
#(#doc_comments)*
|
#(#doc_comments)*
|
||||||
[#attribute_stream] fn #name(#arg) #result,
|
[#attribute_stream] fn #name(#arg) #result,
|
||||||
|
|
|
@ -6,7 +6,7 @@ use crate::{encode_metadata, EncodedMetadata};
|
||||||
use rustc_data_structures::temp_dir::MaybeTempDir;
|
use rustc_data_structures::temp_dir::MaybeTempDir;
|
||||||
use rustc_hir::def_id::LOCAL_CRATE;
|
use rustc_hir::def_id::LOCAL_CRATE;
|
||||||
use rustc_middle::ty::TyCtxt;
|
use rustc_middle::ty::TyCtxt;
|
||||||
use rustc_session::config::{CrateType, OutputFilenames, OutputType};
|
use rustc_session::config::{CrateType, OutputType};
|
||||||
use rustc_session::output::filename_for_metadata;
|
use rustc_session::output::filename_for_metadata;
|
||||||
use rustc_session::Session;
|
use rustc_session::Session;
|
||||||
use tempfile::Builder as TempFileBuilder;
|
use tempfile::Builder as TempFileBuilder;
|
||||||
|
@ -38,10 +38,7 @@ pub fn emit_wrapper_file(
|
||||||
out_filename
|
out_filename
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn encode_and_write_metadata(
|
pub fn encode_and_write_metadata(tcx: TyCtxt<'_>) -> (EncodedMetadata, bool) {
|
||||||
tcx: TyCtxt<'_>,
|
|
||||||
outputs: &OutputFilenames,
|
|
||||||
) -> (EncodedMetadata, bool) {
|
|
||||||
#[derive(PartialEq, Eq, PartialOrd, Ord)]
|
#[derive(PartialEq, Eq, PartialOrd, Ord)]
|
||||||
enum MetadataKind {
|
enum MetadataKind {
|
||||||
None,
|
None,
|
||||||
|
@ -64,7 +61,8 @@ pub fn encode_and_write_metadata(
|
||||||
.unwrap_or(MetadataKind::None);
|
.unwrap_or(MetadataKind::None);
|
||||||
|
|
||||||
let crate_name = tcx.crate_name(LOCAL_CRATE);
|
let crate_name = tcx.crate_name(LOCAL_CRATE);
|
||||||
let out_filename = filename_for_metadata(tcx.sess, crate_name.as_str(), outputs);
|
let out_filename =
|
||||||
|
filename_for_metadata(tcx.sess, crate_name.as_str(), tcx.output_filenames(()));
|
||||||
// To avoid races with another rustc process scanning the output directory,
|
// To avoid races with another rustc process scanning the output directory,
|
||||||
// we need to write the file somewhere else and atomically move it to its
|
// we need to write the file somewhere else and atomically move it to its
|
||||||
// final destination, with an `fs::rename` call. In order for the rename to
|
// final destination, with an `fs::rename` call. In order for the rename to
|
||||||
|
|
|
@ -28,6 +28,7 @@ macro_rules! arena_types {
|
||||||
[decode] typeck_results: rustc_middle::ty::TypeckResults<'tcx>,
|
[decode] typeck_results: rustc_middle::ty::TypeckResults<'tcx>,
|
||||||
[decode] borrowck_result:
|
[decode] borrowck_result:
|
||||||
rustc_middle::mir::BorrowCheckResult<'tcx>,
|
rustc_middle::mir::BorrowCheckResult<'tcx>,
|
||||||
|
[] resolver: rustc_data_structures::steal::Steal<rustc_middle::ty::ResolverAstLowering>,
|
||||||
[decode] unsafety_check_result: rustc_middle::mir::UnsafetyCheckResult,
|
[decode] unsafety_check_result: rustc_middle::mir::UnsafetyCheckResult,
|
||||||
[decode] code_region: rustc_middle::mir::coverage::CodeRegion,
|
[decode] code_region: rustc_middle::mir::coverage::CodeRegion,
|
||||||
[] const_allocs: rustc_middle::mir::interpret::Allocation,
|
[] const_allocs: rustc_middle::mir::interpret::Allocation,
|
||||||
|
|
|
@ -33,7 +33,7 @@ rustc_queries! {
|
||||||
}
|
}
|
||||||
|
|
||||||
query resolver_for_lowering(_: ()) -> &'tcx Steal<ty::ResolverAstLowering> {
|
query resolver_for_lowering(_: ()) -> &'tcx Steal<ty::ResolverAstLowering> {
|
||||||
eval_always
|
feedable
|
||||||
no_hash
|
no_hash
|
||||||
desc { "getting the resolver for lowering" }
|
desc { "getting the resolver for lowering" }
|
||||||
}
|
}
|
||||||
|
|
|
@ -81,7 +81,7 @@ use std::mem;
|
||||||
use std::ops::{Bound, Deref};
|
use std::ops::{Bound, Deref};
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
use super::{ImplPolarity, ResolverOutputs, RvalueScopes};
|
use super::{ImplPolarity, RvalueScopes};
|
||||||
|
|
||||||
pub trait OnDiskCache<'tcx>: rustc_data_structures::sync::Sync {
|
pub trait OnDiskCache<'tcx>: rustc_data_structures::sync::Sync {
|
||||||
/// Creates a new `OnDiskCache` instance from the serialized data in `data`.
|
/// Creates a new `OnDiskCache` instance from the serialized data in `data`.
|
||||||
|
@ -1034,16 +1034,29 @@ pub struct FreeRegionInfo {
|
||||||
|
|
||||||
/// This struct should only be created by `create_def`.
|
/// This struct should only be created by `create_def`.
|
||||||
#[derive(Copy, Clone)]
|
#[derive(Copy, Clone)]
|
||||||
pub struct TyCtxtFeed<'tcx> {
|
pub struct TyCtxtFeed<'tcx, KEY: Copy> {
|
||||||
pub tcx: TyCtxt<'tcx>,
|
pub tcx: TyCtxt<'tcx>,
|
||||||
// Do not allow direct access, as downstream code must not mutate this field.
|
// Do not allow direct access, as downstream code must not mutate this field.
|
||||||
def_id: LocalDefId,
|
key: KEY,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'tcx> TyCtxtFeed<'tcx> {
|
impl<'tcx> TyCtxt<'tcx> {
|
||||||
|
pub fn feed_unit_query(self) -> TyCtxtFeed<'tcx, ()> {
|
||||||
|
TyCtxtFeed { tcx: self, key: () }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'tcx, KEY: Copy> TyCtxtFeed<'tcx, KEY> {
|
||||||
|
#[inline(always)]
|
||||||
|
pub fn key(&self) -> KEY {
|
||||||
|
self.key
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'tcx> TyCtxtFeed<'tcx, LocalDefId> {
|
||||||
#[inline(always)]
|
#[inline(always)]
|
||||||
pub fn def_id(&self) -> LocalDefId {
|
pub fn def_id(&self) -> LocalDefId {
|
||||||
self.def_id
|
self.key
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1099,7 +1112,6 @@ pub struct GlobalCtxt<'tcx> {
|
||||||
|
|
||||||
/// Output of the resolver.
|
/// Output of the resolver.
|
||||||
pub(crate) untracked_resolutions: ty::ResolverGlobalCtxt,
|
pub(crate) untracked_resolutions: ty::ResolverGlobalCtxt,
|
||||||
untracked_resolver_for_lowering: Steal<ty::ResolverAstLowering>,
|
|
||||||
/// The entire crate as AST. This field serves as the input for the hir_crate query,
|
/// The entire crate as AST. This field serves as the input for the hir_crate query,
|
||||||
/// which lowers it from AST to HIR. It must not be read or used by anything else.
|
/// which lowers it from AST to HIR. It must not be read or used by anything else.
|
||||||
pub untracked_crate: Steal<Lrc<ast::Crate>>,
|
pub untracked_crate: Steal<Lrc<ast::Crate>>,
|
||||||
|
@ -1262,7 +1274,8 @@ impl<'tcx> TyCtxt<'tcx> {
|
||||||
lint_store: Lrc<dyn Any + sync::Send + sync::Sync>,
|
lint_store: Lrc<dyn Any + sync::Send + sync::Sync>,
|
||||||
arena: &'tcx WorkerLocal<Arena<'tcx>>,
|
arena: &'tcx WorkerLocal<Arena<'tcx>>,
|
||||||
hir_arena: &'tcx WorkerLocal<hir::Arena<'tcx>>,
|
hir_arena: &'tcx WorkerLocal<hir::Arena<'tcx>>,
|
||||||
resolver_outputs: ResolverOutputs,
|
definitions: Definitions,
|
||||||
|
untracked_resolutions: ty::ResolverGlobalCtxt,
|
||||||
krate: Lrc<ast::Crate>,
|
krate: Lrc<ast::Crate>,
|
||||||
dep_graph: DepGraph,
|
dep_graph: DepGraph,
|
||||||
on_disk_cache: Option<&'tcx dyn OnDiskCache<'tcx>>,
|
on_disk_cache: Option<&'tcx dyn OnDiskCache<'tcx>>,
|
||||||
|
@ -1271,11 +1284,6 @@ impl<'tcx> TyCtxt<'tcx> {
|
||||||
crate_name: &str,
|
crate_name: &str,
|
||||||
output_filenames: OutputFilenames,
|
output_filenames: OutputFilenames,
|
||||||
) -> GlobalCtxt<'tcx> {
|
) -> GlobalCtxt<'tcx> {
|
||||||
let ResolverOutputs {
|
|
||||||
definitions,
|
|
||||||
global_ctxt: untracked_resolutions,
|
|
||||||
ast_lowering: untracked_resolver_for_lowering,
|
|
||||||
} = resolver_outputs;
|
|
||||||
let data_layout = s.target.parse_data_layout().unwrap_or_else(|err| {
|
let data_layout = s.target.parse_data_layout().unwrap_or_else(|err| {
|
||||||
s.emit_fatal(err);
|
s.emit_fatal(err);
|
||||||
});
|
});
|
||||||
|
@ -1304,7 +1312,6 @@ impl<'tcx> TyCtxt<'tcx> {
|
||||||
lifetimes: common_lifetimes,
|
lifetimes: common_lifetimes,
|
||||||
consts: common_consts,
|
consts: common_consts,
|
||||||
untracked_resolutions,
|
untracked_resolutions,
|
||||||
untracked_resolver_for_lowering: Steal::new(untracked_resolver_for_lowering),
|
|
||||||
untracked_crate: Steal::new(krate),
|
untracked_crate: Steal::new(krate),
|
||||||
on_disk_cache,
|
on_disk_cache,
|
||||||
queries,
|
queries,
|
||||||
|
@ -1515,7 +1522,7 @@ impl<'tcx> TyCtxtAt<'tcx> {
|
||||||
self,
|
self,
|
||||||
parent: LocalDefId,
|
parent: LocalDefId,
|
||||||
data: hir::definitions::DefPathData,
|
data: hir::definitions::DefPathData,
|
||||||
) -> TyCtxtFeed<'tcx> {
|
) -> TyCtxtFeed<'tcx, LocalDefId> {
|
||||||
// This function modifies `self.definitions` using a side-effect.
|
// This function modifies `self.definitions` using a side-effect.
|
||||||
// We need to ensure that these side effects are re-run by the incr. comp. engine.
|
// We need to ensure that these side effects are re-run by the incr. comp. engine.
|
||||||
// Depending on the forever-red node will tell the graph that the calling query
|
// Depending on the forever-red node will tell the graph that the calling query
|
||||||
|
@ -1536,9 +1543,9 @@ impl<'tcx> TyCtxtAt<'tcx> {
|
||||||
// This is fine because:
|
// This is fine because:
|
||||||
// - those queries are `eval_always` so we won't miss their result changing;
|
// - those queries are `eval_always` so we won't miss their result changing;
|
||||||
// - this write will have happened before these queries are called.
|
// - this write will have happened before these queries are called.
|
||||||
let def_id = self.definitions.write().create_def(parent, data);
|
let key = self.definitions.write().create_def(parent, data);
|
||||||
|
|
||||||
let feed = TyCtxtFeed { tcx: self.tcx, def_id };
|
let feed = TyCtxtFeed { tcx: self.tcx, key };
|
||||||
feed.def_span(self.span);
|
feed.def_span(self.span);
|
||||||
feed
|
feed
|
||||||
}
|
}
|
||||||
|
@ -3107,7 +3114,6 @@ fn ptr_eq<T, U>(t: *const T, u: *const U) -> bool {
|
||||||
|
|
||||||
pub fn provide(providers: &mut ty::query::Providers) {
|
pub fn provide(providers: &mut ty::query::Providers) {
|
||||||
providers.resolutions = |tcx, ()| &tcx.untracked_resolutions;
|
providers.resolutions = |tcx, ()| &tcx.untracked_resolutions;
|
||||||
providers.resolver_for_lowering = |tcx, ()| &tcx.untracked_resolver_for_lowering;
|
|
||||||
providers.module_reexports =
|
providers.module_reexports =
|
||||||
|tcx, id| tcx.resolutions(()).reexport_map.get(&id).map(|v| &v[..]);
|
|tcx, id| tcx.resolutions(()).reexport_map.get(&id).map(|v| &v[..]);
|
||||||
providers.crate_name = |tcx, id| {
|
providers.crate_name = |tcx, id| {
|
||||||
|
|
|
@ -82,8 +82,8 @@ pub use self::consts::{
|
||||||
pub use self::context::{
|
pub use self::context::{
|
||||||
tls, CanonicalUserType, CanonicalUserTypeAnnotation, CanonicalUserTypeAnnotations,
|
tls, CanonicalUserType, CanonicalUserTypeAnnotation, CanonicalUserTypeAnnotations,
|
||||||
CtxtInterners, DeducedParamAttrs, FreeRegionInfo, GeneratorDiagnosticData,
|
CtxtInterners, DeducedParamAttrs, FreeRegionInfo, GeneratorDiagnosticData,
|
||||||
GeneratorInteriorTypeCause, GlobalCtxt, Lift, OnDiskCache, TyCtxt, TypeckResults, UserType,
|
GeneratorInteriorTypeCause, GlobalCtxt, Lift, OnDiskCache, TyCtxt, TyCtxtFeed, TypeckResults,
|
||||||
UserTypeAnnotationIndex,
|
UserType, UserTypeAnnotationIndex,
|
||||||
};
|
};
|
||||||
pub use self::instance::{Instance, InstanceDef, ShortInstance};
|
pub use self::instance::{Instance, InstanceDef, ShortInstance};
|
||||||
pub use self::list::List;
|
pub use self::list::List;
|
||||||
|
|
|
@ -328,13 +328,25 @@ macro_rules! define_callbacks {
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
macro_rules! hash_result {
|
||||||
|
([]) => {{
|
||||||
|
Some(dep_graph::hash_result)
|
||||||
|
}};
|
||||||
|
([(no_hash) $($rest:tt)*]) => {{
|
||||||
|
None
|
||||||
|
}};
|
||||||
|
([$other:tt $($modifiers:tt)*]) => {
|
||||||
|
hash_result!([$($modifiers)*])
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
macro_rules! define_feedable {
|
macro_rules! define_feedable {
|
||||||
($($(#[$attr:meta])* [$($modifiers:tt)*] fn $name:ident($($K:tt)*) -> $V:ty,)*) => {
|
($($(#[$attr:meta])* [$($modifiers:tt)*] fn $name:ident($($K:tt)*) -> $V:ty,)*) => {
|
||||||
impl<'tcx> TyCtxtFeed<'tcx> {
|
$(impl<'tcx, K: IntoQueryParam<$($K)*> + Copy> TyCtxtFeed<'tcx, K> {
|
||||||
$($(#[$attr])*
|
$(#[$attr])*
|
||||||
#[inline(always)]
|
#[inline(always)]
|
||||||
pub fn $name(self, value: $V) -> query_stored::$name<'tcx> {
|
pub fn $name(self, value: $V) -> query_stored::$name<'tcx> {
|
||||||
let key = self.def_id().into_query_param();
|
let key = self.key().into_query_param();
|
||||||
opt_remap_env_constness!([$($modifiers)*][key]);
|
opt_remap_env_constness!([$($modifiers)*][key]);
|
||||||
|
|
||||||
let tcx = self.tcx;
|
let tcx = self.tcx;
|
||||||
|
@ -358,11 +370,11 @@ macro_rules! define_feedable {
|
||||||
tcx,
|
tcx,
|
||||||
key,
|
key,
|
||||||
&value,
|
&value,
|
||||||
dep_graph::hash_result,
|
hash_result!([$($modifiers)*]),
|
||||||
);
|
);
|
||||||
cache.complete(key, value, dep_node_index)
|
cache.complete(key, value, dep_node_index)
|
||||||
})*
|
|
||||||
}
|
}
|
||||||
|
})*
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -7,7 +7,7 @@ use crate::MirPass;
|
||||||
use rustc_middle::mir::write_mir_pretty;
|
use rustc_middle::mir::write_mir_pretty;
|
||||||
use rustc_middle::mir::Body;
|
use rustc_middle::mir::Body;
|
||||||
use rustc_middle::ty::TyCtxt;
|
use rustc_middle::ty::TyCtxt;
|
||||||
use rustc_session::config::{OutputFilenames, OutputType};
|
use rustc_session::config::OutputType;
|
||||||
|
|
||||||
pub struct Marker(pub &'static str);
|
pub struct Marker(pub &'static str);
|
||||||
|
|
||||||
|
@ -19,8 +19,8 @@ impl<'tcx> MirPass<'tcx> for Marker {
|
||||||
fn run_pass(&self, _tcx: TyCtxt<'tcx>, _body: &mut Body<'tcx>) {}
|
fn run_pass(&self, _tcx: TyCtxt<'tcx>, _body: &mut Body<'tcx>) {}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn emit_mir(tcx: TyCtxt<'_>, outputs: &OutputFilenames) -> io::Result<()> {
|
pub fn emit_mir(tcx: TyCtxt<'_>) -> io::Result<()> {
|
||||||
let path = outputs.path(OutputType::Mir);
|
let path = tcx.output_filenames(()).path(OutputType::Mir);
|
||||||
let mut f = io::BufWriter::new(File::create(&path)?);
|
let mut f = io::BufWriter::new(File::create(&path)?);
|
||||||
write_mir_pretty(tcx, None, &mut f)?;
|
write_mir_pretty(tcx, None, &mut f)?;
|
||||||
Ok(())
|
Ok(())
|
||||||
|
|
|
@ -510,7 +510,7 @@ impl<K: DepKind> DepGraph<K> {
|
||||||
cx: Ctxt,
|
cx: Ctxt,
|
||||||
key: A,
|
key: A,
|
||||||
result: &R,
|
result: &R,
|
||||||
hash_result: fn(&mut StableHashingContext<'_>, &R) -> Fingerprint,
|
hash_result: Option<fn(&mut StableHashingContext<'_>, &R) -> Fingerprint>,
|
||||||
) -> DepNodeIndex {
|
) -> DepNodeIndex {
|
||||||
if let Some(data) = self.data.as_ref() {
|
if let Some(data) = self.data.as_ref() {
|
||||||
// The caller query has more dependencies than the node we are creating. We may
|
// The caller query has more dependencies than the node we are creating. We may
|
||||||
|
@ -521,10 +521,12 @@ impl<K: DepKind> DepGraph<K> {
|
||||||
// For sanity, we still check that the loaded stable hash and the new one match.
|
// For sanity, we still check that the loaded stable hash and the new one match.
|
||||||
if let Some(dep_node_index) = self.dep_node_index_of_opt(&node) {
|
if let Some(dep_node_index) = self.dep_node_index_of_opt(&node) {
|
||||||
let _current_fingerprint =
|
let _current_fingerprint =
|
||||||
crate::query::incremental_verify_ich(cx, result, &node, Some(hash_result));
|
crate::query::incremental_verify_ich(cx, result, &node, hash_result);
|
||||||
|
|
||||||
#[cfg(debug_assertions)]
|
#[cfg(debug_assertions)]
|
||||||
|
if hash_result.is_some() {
|
||||||
data.current.record_edge(dep_node_index, node, _current_fingerprint);
|
data.current.record_edge(dep_node_index, node, _current_fingerprint);
|
||||||
|
}
|
||||||
|
|
||||||
return dep_node_index;
|
return dep_node_index;
|
||||||
}
|
}
|
||||||
|
@ -539,8 +541,9 @@ impl<K: DepKind> DepGraph<K> {
|
||||||
});
|
});
|
||||||
|
|
||||||
let hashing_timer = cx.profiler().incr_result_hashing();
|
let hashing_timer = cx.profiler().incr_result_hashing();
|
||||||
let current_fingerprint =
|
let current_fingerprint = hash_result.map(|hash_result| {
|
||||||
cx.with_stable_hashing_context(|mut hcx| hash_result(&mut hcx, result));
|
cx.with_stable_hashing_context(|mut hcx| hash_result(&mut hcx, result))
|
||||||
|
});
|
||||||
|
|
||||||
let print_status = cfg!(debug_assertions) && cx.sess().opts.unstable_opts.dep_tasks;
|
let print_status = cfg!(debug_assertions) && cx.sess().opts.unstable_opts.dep_tasks;
|
||||||
|
|
||||||
|
@ -550,7 +553,7 @@ impl<K: DepKind> DepGraph<K> {
|
||||||
&data.previous,
|
&data.previous,
|
||||||
node,
|
node,
|
||||||
edges,
|
edges,
|
||||||
Some(current_fingerprint),
|
current_fingerprint,
|
||||||
print_status,
|
print_status,
|
||||||
);
|
);
|
||||||
|
|
||||||
|
|
Loading…
Add table
Reference in a new issue