Auto merge of #32016 - nikomatsakis:incr-comp-save, r=mw

Save/load incremental compilation dep graph

Contains the code to serialize/deserialize the dep graph to disk between executions. We also hash the item contents and compare to the new hashes. Also includes a unit test harness. There are definitely some known limitations, such as https://github.com/rust-lang/rust/issues/32014 and https://github.com/rust-lang/rust/issues/32015, but I am leaving those for follow-up work.

Note that this PR builds on https://github.com/rust-lang/rust/pull/32007, so the overlapping commits can be excluded from review.

r? @michaelwoerister
This commit is contained in:
bors 2016-04-07 10:55:37 -07:00
commit 7979dd6089
56 changed files with 2053 additions and 712 deletions

View file

@ -58,7 +58,7 @@ RUSTC_CRATES := rustc rustc_typeck rustc_mir rustc_borrowck rustc_resolve rustc_
rustc_trans rustc_back rustc_llvm rustc_privacy rustc_lint \
rustc_data_structures rustc_platform_intrinsics \
rustc_plugin rustc_metadata rustc_passes rustc_save_analysis \
rustc_const_eval rustc_const_math
rustc_const_eval rustc_const_math rustc_incremental
HOST_CRATES := syntax syntax_ext $(RUSTC_CRATES) rustdoc fmt_macros \
flate arena graphviz rbml log serialize
TOOLS := compiletest rustdoc rustc rustbook error_index_generator
@ -105,7 +105,8 @@ DEPS_rustc_data_structures := std log serialize
DEPS_rustc_driver := arena flate getopts graphviz libc rustc rustc_back rustc_borrowck \
rustc_typeck rustc_mir rustc_resolve log syntax serialize rustc_llvm \
rustc_trans rustc_privacy rustc_lint rustc_plugin \
rustc_metadata syntax_ext rustc_passes rustc_save_analysis rustc_const_eval
rustc_metadata syntax_ext rustc_passes rustc_save_analysis rustc_const_eval \
rustc_incremental
DEPS_rustc_lint := rustc log syntax rustc_const_eval
DEPS_rustc_llvm := native:rustllvm libc std rustc_bitflags
DEPS_rustc_metadata := rustc syntax rbml rustc_const_math
@ -117,7 +118,8 @@ DEPS_rustc_plugin := rustc rustc_metadata syntax rustc_mir
DEPS_rustc_privacy := rustc log syntax
DEPS_rustc_trans := arena flate getopts graphviz libc rustc rustc_back rustc_mir \
log syntax serialize rustc_llvm rustc_platform_intrinsics \
rustc_const_math rustc_const_eval
rustc_const_math rustc_const_eval rustc_incremental
DEPS_rustc_incremental := rbml rustc serialize rustc_data_structures
DEPS_rustc_save_analysis := rustc log syntax
DEPS_rustc_typeck := rustc syntax rustc_platform_intrinsics rustc_const_math \
rustc_const_eval

View file

@ -305,6 +305,7 @@ check-stage$(1)-T-$(2)-H-$(3)-exec: \
check-stage$(1)-T-$(2)-H-$(3)-doc-crates-exec \
check-stage$(1)-T-$(2)-H-$(3)-debuginfo-gdb-exec \
check-stage$(1)-T-$(2)-H-$(3)-debuginfo-lldb-exec \
check-stage$(1)-T-$(2)-H-$(3)-incremental-exec \
check-stage$(1)-T-$(2)-H-$(3)-doc-exec \
check-stage$(1)-T-$(2)-H-$(3)-pretty-exec
@ -481,6 +482,7 @@ DEBUGINFO_LLDB_RS := $(call rwildcard,$(S)src/test/debuginfo/,*.rs)
CODEGEN_RS := $(call rwildcard,$(S)src/test/codegen/,*.rs)
CODEGEN_CC := $(call rwildcard,$(S)src/test/codegen/,*.cc)
CODEGEN_UNITS_RS := $(call rwildcard,$(S)src/test/codegen-units/,*.rs)
INCREMENTAL_RS := $(call rwildcard,$(S)src/test/incremental/,*.rs)
RUSTDOCCK_RS := $(call rwildcard,$(S)src/test/rustdoc/,*.rs)
RPASS_TESTS := $(RPASS_RS)
@ -496,6 +498,7 @@ DEBUGINFO_GDB_TESTS := $(DEBUGINFO_GDB_RS)
DEBUGINFO_LLDB_TESTS := $(DEBUGINFO_LLDB_RS)
CODEGEN_TESTS := $(CODEGEN_RS) $(CODEGEN_CC)
CODEGEN_UNITS_TESTS := $(CODEGEN_UNITS_RS)
INCREMENTAL_TESTS := $(INCREMENTAL_RS)
RUSTDOCCK_TESTS := $(RUSTDOCCK_RS)
CTEST_SRC_BASE_rpass = run-pass
@ -558,6 +561,11 @@ CTEST_BUILD_BASE_codegen-units = codegen-units
CTEST_MODE_codegen-units = codegen-units
CTEST_RUNTOOL_codegen-units = $(CTEST_RUNTOOL)
CTEST_SRC_BASE_incremental = incremental
CTEST_BUILD_BASE_incremental = incremental
CTEST_MODE_incremental = incremental
CTEST_RUNTOOL_incremental = $(CTEST_RUNTOOL)
CTEST_SRC_BASE_rustdocck = rustdoc
CTEST_BUILD_BASE_rustdocck = rustdoc
CTEST_MODE_rustdocck = rustdoc
@ -681,6 +689,7 @@ CTEST_DEPS_debuginfo-lldb_$(1)-T-$(2)-H-$(3) = $$(DEBUGINFO_LLDB_TESTS) \
$(S)src/etc/lldb_rust_formatters.py
CTEST_DEPS_codegen_$(1)-T-$(2)-H-$(3) = $$(CODEGEN_TESTS)
CTEST_DEPS_codegen-units_$(1)-T-$(2)-H-$(3) = $$(CODEGEN_UNITS_TESTS)
CTEST_DEPS_incremental_$(1)-T-$(2)-H-$(3) = $$(INCREMENTAL_TESTS)
CTEST_DEPS_rustdocck_$(1)-T-$(2)-H-$(3) = $$(RUSTDOCCK_TESTS) \
$$(HBIN$(1)_H_$(3))/rustdoc$$(X_$(3)) \
$(S)src/etc/htmldocck.py
@ -747,7 +756,7 @@ endif
endef
CTEST_NAMES = rpass rpass-valgrind rpass-full rfail-full cfail-full rfail cfail pfail \
debuginfo-gdb debuginfo-lldb codegen codegen-units rustdocck
debuginfo-gdb debuginfo-lldb codegen codegen-units rustdocck incremental
$(foreach host,$(CFG_HOST), \
$(eval $(foreach target,$(CFG_TARGET), \
@ -945,6 +954,7 @@ TEST_GROUPS = \
debuginfo-lldb \
codegen \
codegen-units \
incremental \
doc \
$(foreach docname,$(DOC_NAMES),doc-$(docname)) \
pretty \

View file

@ -25,7 +25,8 @@ pub enum Mode {
DebugInfoLldb,
Codegen,
Rustdoc,
CodegenUnits
CodegenUnits,
Incremental,
}
impl FromStr for Mode {
@ -43,6 +44,7 @@ impl FromStr for Mode {
"codegen" => Ok(Codegen),
"rustdoc" => Ok(Rustdoc),
"codegen-units" => Ok(CodegenUnits),
"incremental" => Ok(Incremental),
_ => Err(()),
}
}
@ -62,6 +64,7 @@ impl fmt::Display for Mode {
Codegen => "codegen",
Rustdoc => "rustdoc",
CodegenUnits => "codegen-units",
Incremental => "incremental",
}, f)
}
}

View file

@ -71,7 +71,8 @@ pub fn parse_config(args: Vec<String> ) -> Config {
reqopt("", "aux-base", "directory to find auxiliary test files", "PATH"),
reqopt("", "stage-id", "the target-stage identifier", "stageN-TARGET"),
reqopt("", "mode", "which sort of compile tests to run",
"(compile-fail|parse-fail|run-fail|run-pass|run-pass-valgrind|pretty|debug-info)"),
"(compile-fail|parse-fail|run-fail|run-pass|\
run-pass-valgrind|pretty|debug-info|incremental)"),
optflag("", "ignored", "run tests marked as ignored"),
optopt("", "runtool", "supervisor program to run tests under \
(eg. emulator, valgrind)", "PROGRAM"),

View file

@ -11,6 +11,7 @@
use common::Config;
use common::{CompileFail, ParseFail, Pretty, RunFail, RunPass, RunPassValgrind};
use common::{Codegen, DebugInfoLldb, DebugInfoGdb, Rustdoc, CodegenUnits};
use common::{Incremental};
use errors::{self, ErrorKind};
use header::TestProps;
use header;
@ -59,6 +60,7 @@ pub fn run(config: Config, testpaths: &TestPaths) {
Codegen => run_codegen_test(&config, &props, &testpaths),
Rustdoc => run_rustdoc_test(&config, &props, &testpaths),
CodegenUnits => run_codegen_units_test(&config, &props, &testpaths),
Incremental => run_incremental_test(&config, &props, &testpaths),
}
}
@ -1966,3 +1968,67 @@ fn run_codegen_units_test(config: &Config, props: &TestProps, testpaths: &TestPa
panic!();
}
}
fn run_incremental_test(config: &Config, props: &TestProps, testpaths: &TestPaths) {
// Basic plan for a test incremental/foo/bar.rs:
// - load list of revisions pass1, fail2, pass3
// - each should begin with `rpass`, `rfail`, or `cfail`
// - if `rpass`, expect compile and execution to succeed
// - if `cfail`, expect compilation to fail
// - if `rfail`, expect execution to fail
// - create a directory build/foo/bar.incremental
// - compile foo/bar.rs with -Z incremental=.../foo/bar.incremental and -C pass1
// - because name of revision starts with "pass", expect success
// - compile foo/bar.rs with -Z incremental=.../foo/bar.incremental and -C fail2
// - because name of revision starts with "fail", expect an error
// - load expected errors as usual, but filter for those that end in `[fail2]`
// - compile foo/bar.rs with -Z incremental=.../foo/bar.incremental and -C pass3
// - because name of revision starts with "pass", expect success
// - execute build/foo/bar.exe and save output
//
// FIXME -- use non-incremental mode as an oracle? That doesn't apply
// to #[rustc_dirty] and clean tests I guess
assert!(!props.revisions.is_empty(), "incremental tests require a list of revisions");
let output_base_name = output_base_name(config, testpaths);
// Create the incremental workproduct directory.
let incremental_dir = output_base_name.with_extension("incremental");
if incremental_dir.exists() {
fs::remove_dir_all(&incremental_dir).unwrap();
}
fs::create_dir_all(&incremental_dir).unwrap();
if config.verbose {
print!("incremental_dir={}", incremental_dir.display());
}
for revision in &props.revisions {
let mut revision_props = props.clone();
header::load_props_into(&mut revision_props, &testpaths.file, Some(&revision));
revision_props.compile_flags.extend(vec![
format!("-Z"),
format!("incremental={}", incremental_dir.display()),
format!("--cfg"),
format!("{}", revision),
]);
if config.verbose {
print!("revision={:?} revision_props={:#?}", revision, revision_props);
}
if revision.starts_with("rpass") {
run_rpass_test_revision(config, &revision_props, testpaths, Some(&revision));
} else if revision.starts_with("rfail") {
run_rfail_test_revision(config, &revision_props, testpaths, Some(&revision));
} else if revision.starts_with("cfail") {
run_cfail_test_revision(config, &revision_props, testpaths, Some(&revision));
} else {
fatal(
Some(revision),
"revision name must begin with rpass, rfail, or cfail");
}
}
}

View file

@ -0,0 +1,207 @@
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::fmt::Debug;
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable)]
pub enum DepNode<D: Clone + Debug> {
// The `D` type is "how definitions are identified".
// During compilation, it is always `DefId`, but when serializing
// it is mapped to `DefPath`.
// Represents the `Krate` as a whole (the `hir::Krate` value) (as
// distinct from the krate module). This is basically a hash of
// the entire krate, so if you read from `Krate` (e.g., by calling
// `tcx.map.krate()`), we will have to assume that any change
// means that you need to be recompiled. This is because the
// `Krate` value gives you access to all other items. To avoid
// this fate, do not call `tcx.map.krate()`; instead, prefer
// wrappers like `tcx.visit_all_items_in_krate()`. If there is no
// suitable wrapper, you can use `tcx.dep_graph.ignore()` to gain
// access to the krate, but you must remember to add suitable
// edges yourself for the individual items that you read.
Krate,
// Represents the HIR node with the given node-id
Hir(D),
// Represents different phases in the compiler.
CrateReader,
CollectLanguageItems,
CheckStaticRecursion,
ResolveLifetimes,
RegionResolveCrate,
CheckLoops,
PluginRegistrar,
StabilityIndex,
CollectItem(D),
Coherence,
EffectCheck,
Liveness,
Resolve,
EntryPoint,
CheckEntryFn,
CoherenceCheckImpl(D),
CoherenceOverlapCheck(D),
CoherenceOverlapCheckSpecial(D),
CoherenceOverlapInherentCheck(D),
CoherenceOrphanCheck(D),
Variance,
WfCheck(D),
TypeckItemType(D),
TypeckItemBody(D),
Dropck,
DropckImpl(D),
CheckConst(D),
Privacy,
IntrinsicCheck(D),
MatchCheck(D),
MirMapConstruction(D),
MirTypeck(D),
BorrowCheck(D),
RvalueCheck(D),
Reachability,
DeadCheck,
StabilityCheck,
LateLintCheck,
IntrinsicUseCheck,
TransCrate,
TransCrateItem(D),
TransInlinedItem(D),
TransWriteMetadata,
// Nodes representing bits of computed IR in the tcx. Each shared
// table in the tcx (or elsewhere) maps to one of these
// nodes. Often we map multiple tables to the same node if there
// is no point in distinguishing them (e.g., both the type and
// predicates for an item wind up in `ItemSignature`). Other
// times, such as `ImplItems` vs `TraitItemDefIds`, tables which
// might be mergable are kept distinct because the sets of def-ids
// to which they apply are disjoint, and hence we might as well
// have distinct labels for easier debugging.
ImplOrTraitItems(D),
ItemSignature(D),
FieldTy(D),
TraitItemDefIds(D),
InherentImpls(D),
ImplItems(D),
// The set of impls for a given trait. Ultimately, it would be
// nice to get more fine-grained here (e.g., to include a
// simplified type), but we can't do that until we restructure the
// HIR to distinguish the *header* of an impl from its body. This
// is because changes to the header may change the self-type of
// the impl and hence would require us to be more conservative
// than changes in the impl body.
TraitImpls(D),
// Nodes representing caches. To properly handle a true cache, we
// don't use a DepTrackingMap, but rather we push a task node.
// Otherwise the write into the map would be incorrectly
// attributed to the first task that happened to fill the cache,
// which would yield an overly conservative dep-graph.
TraitItems(D),
ReprHints(D),
TraitSelect(D),
}
impl<D: Clone + Debug> DepNode<D> {
/// Used in testing
pub fn from_label_string(label: &str, data: D) -> Result<DepNode<D>, ()> {
macro_rules! check {
($($name:ident,)*) => {
match label {
$(stringify!($name) => Ok(DepNode::$name(data)),)*
_ => Err(())
}
}
}
check! {
CollectItem,
BorrowCheck,
TransCrateItem,
TypeckItemType,
TypeckItemBody,
ImplOrTraitItems,
ItemSignature,
FieldTy,
TraitItemDefIds,
InherentImpls,
ImplItems,
TraitImpls,
ReprHints,
}
}
pub fn map_def<E, OP>(&self, mut op: OP) -> Option<DepNode<E>>
where OP: FnMut(&D) -> Option<E>, E: Clone + Debug
{
use self::DepNode::*;
match *self {
Krate => Some(Krate),
CrateReader => Some(CrateReader),
CollectLanguageItems => Some(CollectLanguageItems),
CheckStaticRecursion => Some(CheckStaticRecursion),
ResolveLifetimes => Some(ResolveLifetimes),
RegionResolveCrate => Some(RegionResolveCrate),
CheckLoops => Some(CheckLoops),
PluginRegistrar => Some(PluginRegistrar),
StabilityIndex => Some(StabilityIndex),
Coherence => Some(Coherence),
EffectCheck => Some(EffectCheck),
Liveness => Some(Liveness),
Resolve => Some(Resolve),
EntryPoint => Some(EntryPoint),
CheckEntryFn => Some(CheckEntryFn),
Variance => Some(Variance),
Dropck => Some(Dropck),
Privacy => Some(Privacy),
Reachability => Some(Reachability),
DeadCheck => Some(DeadCheck),
StabilityCheck => Some(StabilityCheck),
LateLintCheck => Some(LateLintCheck),
IntrinsicUseCheck => Some(IntrinsicUseCheck),
TransCrate => Some(TransCrate),
TransWriteMetadata => Some(TransWriteMetadata),
Hir(ref d) => op(d).map(Hir),
CollectItem(ref d) => op(d).map(CollectItem),
CoherenceCheckImpl(ref d) => op(d).map(CoherenceCheckImpl),
CoherenceOverlapCheck(ref d) => op(d).map(CoherenceOverlapCheck),
CoherenceOverlapCheckSpecial(ref d) => op(d).map(CoherenceOverlapCheckSpecial),
CoherenceOverlapInherentCheck(ref d) => op(d).map(CoherenceOverlapInherentCheck),
CoherenceOrphanCheck(ref d) => op(d).map(CoherenceOrphanCheck),
WfCheck(ref d) => op(d).map(WfCheck),
TypeckItemType(ref d) => op(d).map(TypeckItemType),
TypeckItemBody(ref d) => op(d).map(TypeckItemBody),
DropckImpl(ref d) => op(d).map(DropckImpl),
CheckConst(ref d) => op(d).map(CheckConst),
IntrinsicCheck(ref d) => op(d).map(IntrinsicCheck),
MatchCheck(ref d) => op(d).map(MatchCheck),
MirMapConstruction(ref d) => op(d).map(MirMapConstruction),
MirTypeck(ref d) => op(d).map(MirTypeck),
BorrowCheck(ref d) => op(d).map(BorrowCheck),
RvalueCheck(ref d) => op(d).map(RvalueCheck),
TransCrateItem(ref d) => op(d).map(TransCrateItem),
TransInlinedItem(ref d) => op(d).map(TransInlinedItem),
ImplOrTraitItems(ref d) => op(d).map(ImplOrTraitItems),
ItemSignature(ref d) => op(d).map(ItemSignature),
FieldTy(ref d) => op(d).map(FieldTy),
TraitItemDefIds(ref d) => op(d).map(TraitItemDefIds),
InherentImpls(ref d) => op(d).map(InherentImpls),
ImplItems(ref d) => op(d).map(ImplItems),
TraitImpls(ref d) => op(d).map(TraitImpls),
TraitItems(ref d) => op(d).map(TraitItems),
ReprHints(ref d) => op(d).map(ReprHints),
TraitSelect(ref d) => op(d).map(TraitSelect),
}
}
}

View file

@ -8,6 +8,7 @@
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use hir::def_id::DefId;
use rustc_data_structures::fnv::FnvHashMap;
use std::cell::RefCell;
use std::ops::Index;
@ -29,7 +30,7 @@ pub struct DepTrackingMap<M: DepTrackingMapConfig> {
pub trait DepTrackingMapConfig {
type Key: Eq + Hash + Clone;
type Value: Clone;
fn to_dep_node(key: &Self::Key) -> DepNode;
fn to_dep_node(key: &Self::Key) -> DepNode<DefId>;
}
impl<M: DepTrackingMapConfig> DepTrackingMap<M> {

View file

@ -9,11 +9,13 @@
// except according to those terms.
use rustc_data_structures::fnv::{FnvHashMap, FnvHashSet};
use std::fmt::Debug;
use std::hash::Hash;
use super::{DepGraphQuery, DepNode};
pub struct DepGraphEdges {
nodes: Vec<DepNode>,
indices: FnvHashMap<DepNode, IdIndex>,
pub struct DepGraphEdges<D: Clone + Debug + Eq + Hash> {
nodes: Vec<DepNode<D>>,
indices: FnvHashMap<DepNode<D>, IdIndex>,
edges: FnvHashSet<(IdIndex, IdIndex)>,
open_nodes: Vec<OpenNode>,
}
@ -40,8 +42,8 @@ enum OpenNode {
Ignore,
}
impl DepGraphEdges {
pub fn new() -> DepGraphEdges {
impl<D: Clone + Debug + Eq + Hash> DepGraphEdges<D> {
pub fn new() -> DepGraphEdges<D> {
DepGraphEdges {
nodes: vec![],
indices: FnvHashMap(),
@ -50,12 +52,12 @@ impl DepGraphEdges {
}
}
fn id(&self, index: IdIndex) -> DepNode {
self.nodes[index.index()]
fn id(&self, index: IdIndex) -> DepNode<D> {
self.nodes[index.index()].clone()
}
/// Creates a node for `id` in the graph.
fn make_node(&mut self, id: DepNode) -> IdIndex {
fn make_node(&mut self, id: DepNode<D>) -> IdIndex {
if let Some(&i) = self.indices.get(&id) {
return i;
}
@ -80,7 +82,7 @@ impl DepGraphEdges {
assert_eq!(popped_node, OpenNode::Ignore);
}
pub fn push_task(&mut self, key: DepNode) {
pub fn push_task(&mut self, key: DepNode<D>) {
let top_node = self.current_node();
let new_node = self.make_node(key);
@ -93,7 +95,7 @@ impl DepGraphEdges {
}
}
pub fn pop_task(&mut self, key: DepNode) {
pub fn pop_task(&mut self, key: DepNode<D>) {
let popped_node = self.open_nodes.pop().unwrap();
assert_eq!(OpenNode::Node(self.indices[&key]), popped_node);
}
@ -101,7 +103,7 @@ impl DepGraphEdges {
/// Indicates that the current task `C` reads `v` by adding an
/// edge from `v` to `C`. If there is no current task, panics. If
/// you want to suppress this edge, use `ignore`.
pub fn read(&mut self, v: DepNode) {
pub fn read(&mut self, v: DepNode<D>) {
let source = self.make_node(v);
self.add_edge_from_current_node(|current| (source, current))
}
@ -109,7 +111,7 @@ impl DepGraphEdges {
/// Indicates that the current task `C` writes `v` by adding an
/// edge from `C` to `v`. If there is no current task, panics. If
/// you want to suppress this edge, use `ignore`.
pub fn write(&mut self, v: DepNode) {
pub fn write(&mut self, v: DepNode<D>) {
let target = self.make_node(v);
self.add_edge_from_current_node(|current| (current, target))
}
@ -153,7 +155,7 @@ impl DepGraphEdges {
}
}
pub fn query(&self) -> DepGraphQuery {
pub fn query(&self) -> DepGraphQuery<D> {
let edges: Vec<_> = self.edges.iter()
.map(|&(i, j)| (self.id(i), self.id(j)))
.collect();

View file

@ -0,0 +1,71 @@
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use hir::def_id::DefId;
use std::rc::Rc;
use super::dep_node::DepNode;
use super::query::DepGraphQuery;
use super::raii;
use super::thread::{DepGraphThreadData, DepMessage};
#[derive(Clone)]
pub struct DepGraph {
data: Rc<DepGraphThreadData>
}
impl DepGraph {
pub fn new(enabled: bool) -> DepGraph {
DepGraph {
data: Rc::new(DepGraphThreadData::new(enabled))
}
}
/// True if we are actually building a dep-graph. If this returns false,
/// then the other methods on this `DepGraph` will have no net effect.
#[inline]
pub fn enabled(&self) -> bool {
self.data.enabled()
}
pub fn query(&self) -> DepGraphQuery<DefId> {
self.data.query()
}
pub fn in_ignore<'graph>(&'graph self) -> raii::IgnoreTask<'graph> {
raii::IgnoreTask::new(&self.data)
}
pub fn in_task<'graph>(&'graph self, key: DepNode<DefId>) -> raii::DepTask<'graph> {
raii::DepTask::new(&self.data, key)
}
pub fn with_ignore<OP,R>(&self, op: OP) -> R
where OP: FnOnce() -> R
{
let _task = self.in_ignore();
op()
}
pub fn with_task<OP,R>(&self, key: DepNode<DefId>, op: OP) -> R
where OP: FnOnce() -> R
{
let _task = self.in_task(key);
op()
}
pub fn read(&self, v: DepNode<DefId>) {
self.data.enqueue(DepMessage::Read(v));
}
pub fn write(&self, v: DepNode<DefId>) {
self.data.enqueue(DepMessage::Write(v));
}
}

View file

@ -8,211 +8,17 @@
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use self::thread::{DepGraphThreadData, DepMessage};
use hir::def_id::DefId;
use syntax::ast::NodeId;
use ty::TyCtxt;
use hir;
use hir::intravisit::Visitor;
use std::rc::Rc;
mod dep_node;
mod dep_tracking_map;
mod edges;
mod graph;
mod query;
mod raii;
mod thread;
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
pub enum DepNode {
// Represents the `Krate` as a whole (the `hir::Krate` value) (as
// distinct from the krate module). This is basically a hash of
// the entire krate, so if you read from `Krate` (e.g., by calling
// `tcx.map.krate()`), we will have to assume that any change
// means that you need to be recompiled. This is because the
// `Krate` value gives you access to all other items. To avoid
// this fate, do not call `tcx.map.krate()`; instead, prefer
// wrappers like `tcx.visit_all_items_in_krate()`. If there is no
// suitable wrapper, you can use `tcx.dep_graph.ignore()` to gain
// access to the krate, but you must remember to add suitable
// edges yourself for the individual items that you read.
Krate,
// Represents the HIR node with the given node-id
Hir(DefId),
// Represents different phases in the compiler.
CrateReader,
CollectLanguageItems,
CheckStaticRecursion,
ResolveLifetimes,
RegionResolveCrate,
CheckLoops,
PluginRegistrar,
StabilityIndex,
CollectItem(DefId),
Coherence,
EffectCheck,
Liveness,
Resolve,
EntryPoint,
CheckEntryFn,
CoherenceCheckImpl(DefId),
CoherenceOverlapCheck(DefId),
CoherenceOverlapCheckSpecial(DefId),
CoherenceOverlapInherentCheck(DefId),
CoherenceOrphanCheck(DefId),
Variance,
WfCheck(DefId),
TypeckItemType(DefId),
TypeckItemBody(DefId),
Dropck,
DropckImpl(DefId),
CheckConst(DefId),
Privacy,
IntrinsicCheck(DefId),
MatchCheck(DefId),
MirMapConstruction(DefId),
MirTypeck(NodeId),
BorrowCheck(DefId),
RvalueCheck(DefId),
Reachability,
DeadCheck,
StabilityCheck,
LateLintCheck,
IntrinsicUseCheck,
TransCrate,
TransCrateItem(DefId),
TransInlinedItem(DefId),
TransWriteMetadata,
// Nodes representing bits of computed IR in the tcx. Each shared
// table in the tcx (or elsewhere) maps to one of these
// nodes. Often we map multiple tables to the same node if there
// is no point in distinguishing them (e.g., both the type and
// predicates for an item wind up in `ItemSignature`). Other
// times, such as `ImplItems` vs `TraitItemDefIds`, tables which
// might be mergable are kept distinct because the sets of def-ids
// to which they apply are disjoint, and hence we might as well
// have distinct labels for easier debugging.
ImplOrTraitItems(DefId),
ItemSignature(DefId),
FieldTy(DefId),
TraitItemDefIds(DefId),
InherentImpls(DefId),
ImplItems(DefId),
// The set of impls for a given trait. Ultimately, it would be
// nice to get more fine-grained here (e.g., to include a
// simplified type), but we can't do that until we restructure the
// HIR to distinguish the *header* of an impl from its body. This
// is because changes to the header may change the self-type of
// the impl and hence would require us to be more conservative
// than changes in the impl body.
TraitImpls(DefId),
// Nodes representing caches. To properly handle a true cache, we
// don't use a DepTrackingMap, but rather we push a task node.
// Otherwise the write into the map would be incorrectly
// attributed to the first task that happened to fill the cache,
// which would yield an overly conservative dep-graph.
TraitItems(DefId),
ReprHints(DefId),
TraitSelect(DefId),
}
#[derive(Clone)]
pub struct DepGraph {
data: Rc<DepGraphThreadData>
}
impl DepGraph {
pub fn new(enabled: bool) -> DepGraph {
DepGraph {
data: Rc::new(DepGraphThreadData::new(enabled))
}
}
/// True if we are actually building a dep-graph. If this returns false,
/// then the other methods on this `DepGraph` will have no net effect.
#[inline]
pub fn enabled(&self) -> bool {
self.data.enabled()
}
pub fn query(&self) -> DepGraphQuery {
self.data.query()
}
pub fn in_ignore<'graph>(&'graph self) -> raii::IgnoreTask<'graph> {
raii::IgnoreTask::new(&self.data)
}
pub fn in_task<'graph>(&'graph self, key: DepNode) -> raii::DepTask<'graph> {
raii::DepTask::new(&self.data, key)
}
pub fn with_ignore<OP,R>(&self, op: OP) -> R
where OP: FnOnce() -> R
{
let _task = self.in_ignore();
op()
}
pub fn with_task<OP,R>(&self, key: DepNode, op: OP) -> R
where OP: FnOnce() -> R
{
let _task = self.in_task(key);
op()
}
pub fn read(&self, v: DepNode) {
self.data.enqueue(DepMessage::Read(v));
}
pub fn write(&self, v: DepNode) {
self.data.enqueue(DepMessage::Write(v));
}
}
mod visit;
pub use self::dep_tracking_map::{DepTrackingMap, DepTrackingMapConfig};
pub use self::dep_node::DepNode;
pub use self::graph::DepGraph;
pub use self::query::DepGraphQuery;
/// Visit all the items in the krate in some order. When visiting a
/// particular item, first create a dep-node by calling `dep_node_fn`
/// and push that onto the dep-graph stack of tasks, and also create a
/// read edge from the corresponding AST node. This is used in
/// compiler passes to automatically record the item that they are
/// working on.
pub fn visit_all_items_in_krate<'tcx,V,F>(tcx: &TyCtxt<'tcx>,
mut dep_node_fn: F,
visitor: &mut V)
where F: FnMut(DefId) -> DepNode, V: Visitor<'tcx>
{
struct TrackingVisitor<'visit, 'tcx: 'visit, F: 'visit, V: 'visit> {
tcx: &'visit TyCtxt<'tcx>,
dep_node_fn: &'visit mut F,
visitor: &'visit mut V
}
impl<'visit, 'tcx, F, V> Visitor<'tcx> for TrackingVisitor<'visit, 'tcx, F, V>
where F: FnMut(DefId) -> DepNode, V: Visitor<'tcx>
{
fn visit_item(&mut self, i: &'tcx hir::Item) {
let item_def_id = self.tcx.map.local_def_id(i.id);
let task_id = (self.dep_node_fn)(item_def_id);
let _task = self.tcx.dep_graph.in_task(task_id);
debug!("Started task {:?}", task_id);
self.tcx.dep_graph.read(DepNode::Hir(item_def_id));
self.visitor.visit_item(i)
}
}
let krate = tcx.dep_graph.with_ignore(|| tcx.map.krate());
let mut tracking_visitor = TrackingVisitor {
tcx: tcx,
dep_node_fn: &mut dep_node_fn,
visitor: visitor
};
krate.visit_all_items(&mut tracking_visitor)
}
pub use self::visit::visit_all_items_in_krate;

View file

@ -10,16 +10,20 @@
use rustc_data_structures::fnv::FnvHashMap;
use rustc_data_structures::graph::{Graph, NodeIndex};
use std::fmt::Debug;
use std::hash::Hash;
use super::DepNode;
pub struct DepGraphQuery {
pub graph: Graph<DepNode, ()>,
pub indices: FnvHashMap<DepNode, NodeIndex>,
pub struct DepGraphQuery<D: Clone + Debug + Hash + Eq> {
pub graph: Graph<DepNode<D>, ()>,
pub indices: FnvHashMap<DepNode<D>, NodeIndex>,
}
impl DepGraphQuery {
pub fn new(nodes: &[DepNode], edges: &[(DepNode, DepNode)]) -> DepGraphQuery {
impl<D: Clone + Debug + Hash + Eq> DepGraphQuery<D> {
pub fn new(nodes: &[DepNode<D>],
edges: &[(DepNode<D>, DepNode<D>)])
-> DepGraphQuery<D> {
let mut graph = Graph::new();
let mut indices = FnvHashMap();
for node in nodes {
@ -39,27 +43,43 @@ impl DepGraphQuery {
}
}
pub fn nodes(&self) -> Vec<DepNode> {
pub fn contains_node(&self, node: &DepNode<D>) -> bool {
self.indices.contains_key(&node)
}
pub fn nodes(&self) -> Vec<DepNode<D>> {
self.graph.all_nodes()
.iter()
.map(|n| n.data.clone())
.collect()
}
pub fn edges(&self) -> Vec<(DepNode,DepNode)> {
pub fn edges(&self) -> Vec<(DepNode<D>,DepNode<D>)> {
self.graph.all_edges()
.iter()
.map(|edge| (edge.source(), edge.target()))
.map(|(s, t)| (self.graph.node_data(s).clone(), self.graph.node_data(t).clone()))
.map(|(s, t)| (self.graph.node_data(s).clone(),
self.graph.node_data(t).clone()))
.collect()
}
/// All nodes reachable from `node`. In other words, things that
/// will have to be recomputed if `node` changes.
pub fn dependents(&self, node: DepNode) -> Vec<DepNode> {
pub fn transitive_dependents(&self, node: DepNode<D>) -> Vec<DepNode<D>> {
if let Some(&index) = self.indices.get(&node) {
self.graph.depth_traverse(index)
.map(|dependent_node| self.graph.node_data(dependent_node).clone())
.map(|s| self.graph.node_data(s).clone())
.collect()
} else {
vec![]
}
}
/// Just the outgoing edges from `node`.
pub fn immediate_dependents(&self, node: DepNode<D>) -> Vec<DepNode<D>> {
if let Some(&index) = self.indices.get(&node) {
self.graph.successor_nodes(index)
.map(|s| self.graph.node_data(s).clone())
.collect()
} else {
vec![]

View file

@ -8,16 +8,18 @@
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use hir::def_id::DefId;
use super::DepNode;
use super::thread::{DepGraphThreadData, DepMessage};
pub struct DepTask<'graph> {
data: &'graph DepGraphThreadData,
key: DepNode,
key: DepNode<DefId>,
}
impl<'graph> DepTask<'graph> {
pub fn new(data: &'graph DepGraphThreadData, key: DepNode) -> DepTask<'graph> {
pub fn new(data: &'graph DepGraphThreadData, key: DepNode<DefId>)
-> DepTask<'graph> {
data.enqueue(DepMessage::PushTask(key));
DepTask { data: data, key: key }
}

View file

@ -18,6 +18,7 @@
//! to accumulate more messages. This way we only ever have two vectors
//! allocated (and both have a fairly large capacity).
use hir::def_id::DefId;
use rustc_data_structures::veccell::VecCell;
use std::cell::Cell;
use std::sync::mpsc::{self, Sender, Receiver};
@ -28,10 +29,10 @@ use super::DepNode;
use super::edges::DepGraphEdges;
pub enum DepMessage {
Read(DepNode),
Write(DepNode),
PushTask(DepNode),
PopTask(DepNode),
Read(DepNode<DefId>),
Write(DepNode<DefId>),
PushTask(DepNode<DefId>),
PopTask(DepNode<DefId>),
PushIgnore,
PopIgnore,
Query,
@ -57,7 +58,7 @@ pub struct DepGraphThreadData {
swap_out: Sender<Vec<DepMessage>>,
// where to receive query results
query_in: Receiver<DepGraphQuery>,
query_in: Receiver<DepGraphQuery<DefId>>,
}
const INITIAL_CAPACITY: usize = 2048;
@ -105,7 +106,7 @@ impl DepGraphThreadData {
self.swap_out.send(old_messages).unwrap();
}
pub fn query(&self) -> DepGraphQuery {
pub fn query(&self) -> DepGraphQuery<DefId> {
assert!(self.enabled, "cannot query if dep graph construction not enabled");
self.enqueue(DepMessage::Query);
self.swap();
@ -155,7 +156,7 @@ impl DepGraphThreadData {
/// Definition of the depgraph thread.
pub fn main(swap_in: Receiver<Vec<DepMessage>>,
swap_out: Sender<Vec<DepMessage>>,
query_out: Sender<DepGraphQuery>) {
query_out: Sender<DepGraphQuery<DefId>>) {
let mut edges = DepGraphEdges::new();
// the compiler thread always expects a fresh buffer to be

View file

@ -0,0 +1,56 @@
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use hir;
use hir::def_id::DefId;
use hir::intravisit::Visitor;
use ty::TyCtxt;
use super::dep_node::DepNode;
/// Visit all the items in the krate in some order. When visiting a
/// particular item, first create a dep-node by calling `dep_node_fn`
/// and push that onto the dep-graph stack of tasks, and also create a
/// read edge from the corresponding AST node. This is used in
/// compiler passes to automatically record the item that they are
/// working on.
pub fn visit_all_items_in_krate<'tcx,V,F>(tcx: &TyCtxt<'tcx>,
mut dep_node_fn: F,
visitor: &mut V)
where F: FnMut(DefId) -> DepNode<DefId>, V: Visitor<'tcx>
{
struct TrackingVisitor<'visit, 'tcx: 'visit, F: 'visit, V: 'visit> {
tcx: &'visit TyCtxt<'tcx>,
dep_node_fn: &'visit mut F,
visitor: &'visit mut V
}
impl<'visit, 'tcx, F, V> Visitor<'tcx> for TrackingVisitor<'visit, 'tcx, F, V>
where F: FnMut(DefId) -> DepNode<DefId>, V: Visitor<'tcx>
{
fn visit_item(&mut self, i: &'tcx hir::Item) {
let item_def_id = self.tcx.map.local_def_id(i.id);
let task_id = (self.dep_node_fn)(item_def_id);
let _task = self.tcx.dep_graph.in_task(task_id);
debug!("Started task {:?}", task_id);
self.tcx.dep_graph.read(DepNode::Hir(item_def_id));
self.visitor.visit_item(i)
}
}
let krate = tcx.dep_graph.with_ignore(|| tcx.map.krate());
let mut tracking_visitor = TrackingVisitor {
tcx: tcx,
dep_node_fn: &mut dep_node_fn,
visitor: visitor
};
krate.visit_all_items(&mut tracking_visitor)
}

View file

@ -203,17 +203,56 @@ impl Definitions {
}
}
pub fn retrace_path(&self, path: &DefPath) -> Option<DefIndex> {
debug!("retrace_path(path={:?})", path);
// we assume that we only want to retrace paths relative to
// the crate root
assert!(path.is_local());
let root_key = DefKey {
parent: None,
disambiguated_data: DisambiguatedDefPathData {
data: DefPathData::CrateRoot,
disambiguator: 0,
},
};
let root_id = self.key_map[&root_key];
debug!("retrace_path: root_id={:?}", root_id);
let mut id = root_id;
for data in &path.data {
let key = DefKey { parent: Some(id), disambiguated_data: data.clone() };
debug!("key = {:?}", key);
id = match self.key_map.get(&key) {
Some(&id) => id,
None => return None
};
}
Some(id)
}
pub fn create_def_with_parent(&mut self,
parent: Option<DefIndex>,
node_id: ast::NodeId,
data: DefPathData)
-> DefIndex {
debug!("create_def_with_parent(parent={:?}, node_id={:?}, data={:?})",
parent, node_id, data);
assert!(!self.node_map.contains_key(&node_id),
"adding a def'n for node-id {:?} and data {:?} but a previous def'n exists: {:?}",
node_id,
data,
self.data[self.node_map[&node_id].as_usize()]);
assert!(parent.is_some() ^ match data {
DefPathData::CrateRoot | DefPathData::InlinedRoot(_) => true,
_ => false,
});
// Find a unique DefKey. This basically means incrementing the disambiguator
// until we get no match.
let mut key = DefKey {
@ -228,12 +267,17 @@ impl Definitions {
key.disambiguated_data.disambiguator += 1;
}
debug!("create_def_with_parent: after disambiguation, key = {:?}", key);
// Create the definition.
let index = DefIndex::new(self.data.len());
self.data.push(DefData { key: key.clone(), node_id: node_id });
debug!("create_def_with_parent: node_map[{:?}] = {:?}", node_id, index);
self.node_map.insert(node_id, index);
debug!("create_def_with_parent: key_map[{:?}] = {:?}", key, index);
self.key_map.insert(key, index);
index
}
}

View file

@ -208,7 +208,7 @@ impl<'ast> Map<'ast> {
self.dep_graph.read(self.dep_node(id));
}
fn dep_node(&self, id0: NodeId) -> DepNode {
fn dep_node(&self, id0: NodeId) -> DepNode<DefId> {
let map = self.map.borrow();
let mut id = id0;
loop {
@ -282,6 +282,11 @@ impl<'ast> Map<'ast> {
self.definitions.borrow().def_path(def_id.index)
}
pub fn retrace_path(&self, path: &DefPath) -> Option<DefId> {
self.definitions.borrow().retrace_path(path)
.map(DefId::local)
}
pub fn local_def_id(&self, node: NodeId) -> DefId {
self.opt_local_def_id(node).unwrap_or_else(|| {
bug!("local_def_id: no entry for `{}`, which has a map of `{:?}`",

View file

@ -47,9 +47,6 @@
//! Original issue: https://github.com/rust-lang/rust/issues/10207
use std::fmt;
use std::hash::{Hash, SipHasher, Hasher};
use hir;
use hir::intravisit as visit;
#[derive(Clone, PartialEq, Debug)]
pub struct Svh {
@ -57,53 +54,16 @@ pub struct Svh {
}
impl Svh {
pub fn new(hash: &str) -> Svh {
/// Create a new `Svh` given the hash. If you actually want to
/// compute the SVH from some HIR, you want the `calculate_svh`
/// function found in `librustc_trans`.
pub fn new(hash: String) -> Svh {
assert!(hash.len() == 16);
Svh { hash: hash.to_string() }
Svh { hash: hash }
}
pub fn as_str<'a>(&'a self) -> &'a str {
&self.hash
}
pub fn calculate(crate_disambiguator: &str, krate: &hir::Crate) -> Svh {
// FIXME (#14132): This is better than it used to be, but it still not
// ideal. We now attempt to hash only the relevant portions of the
// Crate AST as well as the top-level crate attributes. (However,
// the hashing of the crate attributes should be double-checked
// to ensure it is not incorporating implementation artifacts into
// the hash that are not otherwise visible.)
// FIXME: this should use SHA1, not SipHash. SipHash is not built to
// avoid collisions.
let mut state = SipHasher::new();
"crate_disambiguator".hash(&mut state);
crate_disambiguator.len().hash(&mut state);
crate_disambiguator.hash(&mut state);
{
let mut visit = svh_visitor::make(&mut state, krate);
visit::walk_crate(&mut visit, krate);
}
// FIXME (#14132): This hash is still sensitive to e.g. the
// spans of the crate Attributes and their underlying
// MetaItems; we should make ContentHashable impl for those
// types and then use hash_content. But, since all crate
// attributes should appear near beginning of the file, it is
// not such a big deal to be sensitive to their spans for now.
//
// We hash only the MetaItems instead of the entire Attribute
// to avoid hashing the AttrId
for attr in &krate.attrs {
attr.node.value.hash(&mut state);
}
let hash = state.finish();
return Svh {
hash: (0..64).step_by(4).map(|i| hex(hash >> i)).collect()
};
pub fn from_hash(hash: u64) -> Svh {
return Svh::new((0..64).step_by(4).map(|i| hex(hash >> i)).collect());
fn hex(b: u64) -> char {
let b = (b & 0xf) as u8;
@ -114,6 +74,10 @@ impl Svh {
b as char
}
}
pub fn as_str<'a>(&'a self) -> &'a str {
&self.hash
}
}
impl fmt::Display for Svh {
@ -121,319 +85,3 @@ impl fmt::Display for Svh {
f.pad(self.as_str())
}
}
// FIXME (#14132): Even this SVH computation still has implementation
// artifacts: namely, the order of item declaration will affect the
// hash computation, but for many kinds of items the order of
// declaration should be irrelevant to the ABI.
mod svh_visitor {
pub use self::SawExprComponent::*;
pub use self::SawStmtComponent::*;
use self::SawAbiComponent::*;
use syntax::ast::{self, Name, NodeId};
use syntax::codemap::Span;
use syntax::parse::token;
use hir::intravisit as visit;
use hir::intravisit::{Visitor, FnKind};
use hir::*;
use hir;
use std::hash::{Hash, SipHasher};
pub struct StrictVersionHashVisitor<'a> {
pub krate: &'a Crate,
pub st: &'a mut SipHasher,
}
pub fn make<'a>(st: &'a mut SipHasher, krate: &'a Crate) -> StrictVersionHashVisitor<'a> {
StrictVersionHashVisitor { st: st, krate: krate }
}
// To off-load the bulk of the hash-computation on #[derive(Hash)],
// we define a set of enums corresponding to the content that our
// crate visitor will encounter as it traverses the ast.
//
// The important invariant is that all of the Saw*Component enums
// do not carry any Spans, Names, or Idents.
//
// Not carrying any Names/Idents is the important fix for problem
// noted on PR #13948: using the ident.name as the basis for a
// hash leads to unstable SVH, because ident.name is just an index
// into intern table (i.e. essentially a random address), not
// computed from the name content.
//
// With the below enums, the SVH computation is not sensitive to
// artifacts of how rustc was invoked nor of how the source code
// was laid out. (Or at least it is *less* sensitive.)
// This enum represents the different potential bits of code the
// visitor could encounter that could affect the ABI for the crate,
// and assigns each a distinct tag to feed into the hash computation.
#[derive(Hash)]
enum SawAbiComponent<'a> {
// FIXME (#14132): should we include (some function of)
// ident.ctxt as well?
SawIdent(token::InternedString),
SawStructDef(token::InternedString),
SawLifetime(token::InternedString),
SawLifetimeDef(token::InternedString),
SawMod,
SawForeignItem,
SawItem,
SawDecl,
SawTy,
SawGenerics,
SawFn,
SawTraitItem,
SawImplItem,
SawStructField,
SawVariant,
SawExplicitSelf,
SawPath,
SawBlock,
SawPat,
SawLocal,
SawArm,
SawExpr(SawExprComponent<'a>),
SawStmt(SawStmtComponent),
}
/// SawExprComponent carries all of the information that we want
/// to include in the hash that *won't* be covered by the
/// subsequent recursive traversal of the expression's
/// substructure by the visitor.
///
/// We know every Expr_ variant is covered by a variant because
/// `fn saw_expr` maps each to some case below. Ensuring that
/// each variant carries an appropriate payload has to be verified
/// by hand.
///
/// (However, getting that *exactly* right is not so important
/// because the SVH is just a developer convenience; there is no
/// guarantee of collision-freedom, hash collisions are just
/// (hopefully) unlikely.)
#[derive(Hash)]
pub enum SawExprComponent<'a> {
SawExprLoop(Option<token::InternedString>),
SawExprField(token::InternedString),
SawExprTupField(usize),
SawExprBreak(Option<token::InternedString>),
SawExprAgain(Option<token::InternedString>),
SawExprBox,
SawExprVec,
SawExprCall,
SawExprMethodCall,
SawExprTup,
SawExprBinary(hir::BinOp_),
SawExprUnary(hir::UnOp),
SawExprLit(ast::LitKind),
SawExprCast,
SawExprType,
SawExprIf,
SawExprWhile,
SawExprMatch,
SawExprClosure,
SawExprBlock,
SawExprAssign,
SawExprAssignOp(hir::BinOp_),
SawExprIndex,
SawExprPath(Option<usize>),
SawExprAddrOf(hir::Mutability),
SawExprRet,
SawExprInlineAsm(&'a hir::InlineAsm),
SawExprStruct,
SawExprRepeat,
}
fn saw_expr<'a>(node: &'a Expr_) -> SawExprComponent<'a> {
match *node {
ExprBox(..) => SawExprBox,
ExprVec(..) => SawExprVec,
ExprCall(..) => SawExprCall,
ExprMethodCall(..) => SawExprMethodCall,
ExprTup(..) => SawExprTup,
ExprBinary(op, _, _) => SawExprBinary(op.node),
ExprUnary(op, _) => SawExprUnary(op),
ExprLit(ref lit) => SawExprLit(lit.node.clone()),
ExprCast(..) => SawExprCast,
ExprType(..) => SawExprType,
ExprIf(..) => SawExprIf,
ExprWhile(..) => SawExprWhile,
ExprLoop(_, id) => SawExprLoop(id.map(|id| id.name.as_str())),
ExprMatch(..) => SawExprMatch,
ExprClosure(..) => SawExprClosure,
ExprBlock(..) => SawExprBlock,
ExprAssign(..) => SawExprAssign,
ExprAssignOp(op, _, _) => SawExprAssignOp(op.node),
ExprField(_, name) => SawExprField(name.node.as_str()),
ExprTupField(_, id) => SawExprTupField(id.node),
ExprIndex(..) => SawExprIndex,
ExprPath(ref qself, _) => SawExprPath(qself.as_ref().map(|q| q.position)),
ExprAddrOf(m, _) => SawExprAddrOf(m),
ExprBreak(id) => SawExprBreak(id.map(|id| id.node.name.as_str())),
ExprAgain(id) => SawExprAgain(id.map(|id| id.node.name.as_str())),
ExprRet(..) => SawExprRet,
ExprInlineAsm(ref a,_,_) => SawExprInlineAsm(a),
ExprStruct(..) => SawExprStruct,
ExprRepeat(..) => SawExprRepeat,
}
}
/// SawStmtComponent is analogous to SawExprComponent, but for statements.
#[derive(Hash)]
pub enum SawStmtComponent {
SawStmtDecl,
SawStmtExpr,
SawStmtSemi,
}
fn saw_stmt(node: &Stmt_) -> SawStmtComponent {
match *node {
StmtDecl(..) => SawStmtDecl,
StmtExpr(..) => SawStmtExpr,
StmtSemi(..) => SawStmtSemi,
}
}
impl<'a> Visitor<'a> for StrictVersionHashVisitor<'a> {
fn visit_nested_item(&mut self, item: ItemId) {
self.visit_item(self.krate.item(item.id))
}
fn visit_variant_data(&mut self, s: &'a VariantData, name: Name,
g: &'a Generics, _: NodeId, _: Span) {
SawStructDef(name.as_str()).hash(self.st);
visit::walk_generics(self, g);
visit::walk_struct_def(self, s)
}
fn visit_variant(&mut self, v: &'a Variant, g: &'a Generics, item_id: NodeId) {
SawVariant.hash(self.st);
// walk_variant does not call walk_generics, so do it here.
visit::walk_generics(self, g);
visit::walk_variant(self, v, g, item_id)
}
// All of the remaining methods just record (in the hash
// SipHasher) that the visitor saw that particular variant
// (with its payload), and continue walking as the default
// visitor would.
//
// Some of the implementations have some notes as to how one
// might try to make their SVH computation less discerning
// (e.g. by incorporating reachability analysis). But
// currently all of their implementations are uniform and
// uninteresting.
//
// (If you edit a method such that it deviates from the
// pattern, please move that method up above this comment.)
fn visit_name(&mut self, _: Span, name: Name) {
SawIdent(name.as_str()).hash(self.st);
}
fn visit_lifetime(&mut self, l: &'a Lifetime) {
SawLifetime(l.name.as_str()).hash(self.st);
}
fn visit_lifetime_def(&mut self, l: &'a LifetimeDef) {
SawLifetimeDef(l.lifetime.name.as_str()).hash(self.st);
}
// We do recursively walk the bodies of functions/methods
// (rather than omitting their bodies from the hash) since
// monomorphization and cross-crate inlining generally implies
// that a change to a crate body will require downstream
// crates to be recompiled.
fn visit_expr(&mut self, ex: &'a Expr) {
SawExpr(saw_expr(&ex.node)).hash(self.st); visit::walk_expr(self, ex)
}
fn visit_stmt(&mut self, s: &'a Stmt) {
SawStmt(saw_stmt(&s.node)).hash(self.st); visit::walk_stmt(self, s)
}
fn visit_foreign_item(&mut self, i: &'a ForeignItem) {
// FIXME (#14132) ideally we would incorporate privacy (or
// perhaps reachability) somewhere here, so foreign items
// that do not leak into downstream crates would not be
// part of the ABI.
SawForeignItem.hash(self.st); visit::walk_foreign_item(self, i)
}
fn visit_item(&mut self, i: &'a Item) {
// FIXME (#14132) ideally would incorporate reachability
// analysis somewhere here, so items that never leak into
// downstream crates (e.g. via monomorphisation or
// inlining) would not be part of the ABI.
SawItem.hash(self.st); visit::walk_item(self, i)
}
fn visit_mod(&mut self, m: &'a Mod, _s: Span, _n: NodeId) {
SawMod.hash(self.st); visit::walk_mod(self, m)
}
fn visit_decl(&mut self, d: &'a Decl) {
SawDecl.hash(self.st); visit::walk_decl(self, d)
}
fn visit_ty(&mut self, t: &'a Ty) {
SawTy.hash(self.st); visit::walk_ty(self, t)
}
fn visit_generics(&mut self, g: &'a Generics) {
SawGenerics.hash(self.st); visit::walk_generics(self, g)
}
fn visit_fn(&mut self, fk: FnKind<'a>, fd: &'a FnDecl,
b: &'a Block, s: Span, _: NodeId) {
SawFn.hash(self.st); visit::walk_fn(self, fk, fd, b, s)
}
fn visit_trait_item(&mut self, ti: &'a TraitItem) {
SawTraitItem.hash(self.st); visit::walk_trait_item(self, ti)
}
fn visit_impl_item(&mut self, ii: &'a ImplItem) {
SawImplItem.hash(self.st); visit::walk_impl_item(self, ii)
}
fn visit_struct_field(&mut self, s: &'a StructField) {
SawStructField.hash(self.st); visit::walk_struct_field(self, s)
}
fn visit_explicit_self(&mut self, es: &'a ExplicitSelf) {
SawExplicitSelf.hash(self.st); visit::walk_explicit_self(self, es)
}
fn visit_path(&mut self, path: &'a Path, _: ast::NodeId) {
SawPath.hash(self.st); visit::walk_path(self, path)
}
fn visit_path_list_item(&mut self, prefix: &'a Path, item: &'a PathListItem) {
SawPath.hash(self.st); visit::walk_path_list_item(self, prefix, item)
}
fn visit_block(&mut self, b: &'a Block) {
SawBlock.hash(self.st); visit::walk_block(self, b)
}
fn visit_pat(&mut self, p: &'a Pat) {
SawPat.hash(self.st); visit::walk_pat(self, p)
}
fn visit_local(&mut self, l: &'a Local) {
SawLocal.hash(self.st); visit::walk_local(self, l)
}
fn visit_arm(&mut self, a: &'a Arm) {
SawArm.hash(self.st); visit::walk_arm(self, a)
}
}
}

View file

@ -139,11 +139,9 @@ pub struct Options {
pub continue_parse_after_error: bool,
pub mir_opt_level: usize,
/// if true, build up the dep-graph
pub build_dep_graph: bool,
/// if true, -Z dump-dep-graph was passed to dump out the dep-graph
pub dump_dep_graph: bool,
/// if Some, enable incremental compilation, using the given
/// directory to store intermediate results
pub incremental: Option<PathBuf>,
pub no_analysis: bool,
pub debugging_opts: DebuggingOptions,
@ -260,8 +258,7 @@ pub fn basic_options() -> Options {
treat_err_as_bug: false,
continue_parse_after_error: false,
mir_opt_level: 1,
build_dep_graph: false,
dump_dep_graph: false,
incremental: None,
no_analysis: false,
debugging_opts: basic_debugging_options(),
prints: Vec::new(),
@ -276,6 +273,15 @@ pub fn basic_options() -> Options {
}
}
impl Options {
/// True if there is a reason to build the dep graph.
pub fn build_dep_graph(&self) -> bool {
self.incremental.is_some() ||
self.debugging_opts.dump_dep_graph ||
self.debugging_opts.query_dep_graph
}
}
// The type of entry function, so
// users can have their own entry
// functions that don't start a
@ -635,10 +641,12 @@ options! {DebuggingOptions, DebuggingSetter, basic_debugging_options,
"treat all errors that occur as bugs"),
continue_parse_after_error: bool = (false, parse_bool,
"attempt to recover from parse errors (experimental)"),
incr_comp: bool = (false, parse_bool,
incremental: Option<String> = (None, parse_opt_string,
"enable incremental compilation (experimental)"),
dump_dep_graph: bool = (false, parse_bool,
"dump the dependency graph to $RUST_DEP_GRAPH (default: /tmp/dep_graph.gv)"),
query_dep_graph: bool = (false, parse_bool,
"enable queries of the dependency graph for regression testing"),
no_analysis: bool = (false, parse_bool,
"parse and expand the source, but run no analysis"),
extra_plugins: Vec<String> = (Vec::new(), parse_list,
@ -1050,8 +1058,6 @@ pub fn build_session_options(matches: &getopts::Matches) -> Options {
let treat_err_as_bug = debugging_opts.treat_err_as_bug;
let continue_parse_after_error = debugging_opts.continue_parse_after_error;
let mir_opt_level = debugging_opts.mir_opt_level.unwrap_or(1);
let incremental_compilation = debugging_opts.incr_comp;
let dump_dep_graph = debugging_opts.dump_dep_graph;
let no_analysis = debugging_opts.no_analysis;
let mut output_types = HashMap::new();
@ -1211,6 +1217,8 @@ pub fn build_session_options(matches: &getopts::Matches) -> Options {
let crate_name = matches.opt_str("crate-name");
let incremental = debugging_opts.incremental.as_ref().map(|m| PathBuf::from(m));
Options {
crate_types: crate_types,
gc: gc,
@ -1230,8 +1238,7 @@ pub fn build_session_options(matches: &getopts::Matches) -> Options {
treat_err_as_bug: treat_err_as_bug,
continue_parse_after_error: continue_parse_after_error,
mir_opt_level: mir_opt_level,
build_dep_graph: incremental_compilation || dump_dep_graph,
dump_dep_graph: dump_dep_graph,
incremental: incremental,
no_analysis: no_analysis,
debugging_opts: debugging_opts,
prints: prints,

View file

@ -9,6 +9,7 @@
// except according to those terms.
use dep_graph::DepNode;
use hir::def_id::DefId;
use ty::{Ty, TyS};
use ty::tls;
@ -46,7 +47,7 @@ impl<'tcx, 'lt> TyIVar<'tcx, 'lt> {
}
#[inline]
pub fn get(&self, dep_node: DepNode) -> Option<Ty<'tcx>> {
pub fn get(&self, dep_node: DepNode<DefId>) -> Option<Ty<'tcx>> {
tls::with(|tcx| tcx.dep_graph.read(dep_node));
self.untracked_get()
}
@ -61,11 +62,11 @@ impl<'tcx, 'lt> TyIVar<'tcx, 'lt> {
}
#[inline]
pub fn unwrap(&self, dep_node: DepNode) -> Ty<'tcx> {
pub fn unwrap(&self, dep_node: DepNode<DefId>) -> Ty<'tcx> {
self.get(dep_node).unwrap()
}
pub fn fulfill(&self, dep_node: DepNode, value: Ty<'lt>) {
pub fn fulfill(&self, dep_node: DepNode<DefId>, value: Ty<'lt>) {
tls::with(|tcx| tcx.dep_graph.write(dep_node));
// Invariant (A) is fulfilled, because by (B), every alias

View file

@ -24,7 +24,7 @@ macro_rules! dep_map_ty {
impl<'tcx> DepTrackingMapConfig for $ty_name<'tcx> {
type Key = $key;
type Value = $value;
fn to_dep_node(key: &$key) -> DepNode { DepNode::$node_name(*key) }
fn to_dep_node(key: &$key) -> DepNode<DefId> { DepNode::$node_name(*key) }
}
}
}

View file

@ -937,7 +937,7 @@ impl<'tcx> TraitPredicate<'tcx> {
}
/// Creates the dep-node for selecting/evaluating this trait reference.
fn dep_node(&self) -> DepNode {
fn dep_node(&self) -> DepNode<DefId> {
DepNode::TraitSelect(self.def_id())
}
@ -956,7 +956,7 @@ impl<'tcx> PolyTraitPredicate<'tcx> {
self.0.def_id()
}
pub fn dep_node(&self) -> DepNode {
pub fn dep_node(&self) -> DepNode<DefId> {
// ok to skip binder since depnode does not care about regions
self.0.dep_node()
}
@ -2716,7 +2716,7 @@ impl<'tcx> TyCtxt<'tcx> {
pub fn visit_all_items_in_krate<V,F>(&self,
dep_node_fn: F,
visitor: &mut V)
where F: FnMut(DefId) -> DepNode, V: Visitor<'tcx>
where F: FnMut(DefId) -> DepNode<DefId>, V: Visitor<'tcx>
{
dep_graph::visit_all_items_in_krate(self, dep_node_fn, visitor);
}

View file

@ -23,6 +23,7 @@ rustc_mir = { path = "../librustc_mir" }
rustc_plugin = { path = "../librustc_plugin" }
rustc_passes = { path = "../librustc_passes" }
rustc_privacy = { path = "../librustc_privacy" }
rustc_incremental = { path = "../librustc_incremental" }
rustc_resolve = { path = "../librustc_resolve" }
rustc_save_analysis = { path = "../librustc_save_analysis" }
rustc_trans = { path = "../librustc_trans" }

View file

@ -24,6 +24,7 @@ use rustc::util::common::time;
use rustc::util::nodemap::NodeSet;
use rustc_back::sha2::{Sha256, Digest};
use rustc_borrowck as borrowck;
use rustc_incremental;
use rustc_resolve as resolve;
use rustc_metadata::macro_import;
use rustc_metadata::creader::LocalCrateReader;
@ -121,7 +122,7 @@ pub fn compile_input(sess: &Session,
let expanded_crate = assign_node_ids(sess, expanded_crate);
// Lower ast -> hir.
let lcx = LoweringContext::new(sess, Some(&expanded_crate));
let dep_graph = DepGraph::new(sess.opts.build_dep_graph);
let dep_graph = DepGraph::new(sess.opts.build_dep_graph());
let mut hir_forest = time(sess.time_passes(),
"lowering ast -> hir",
|| hir_map::Forest::new(lower_crate(&lcx, &expanded_crate),
@ -828,6 +829,10 @@ pub fn phase_3_run_analysis_passes<'tcx, F, R>(sess: &'tcx Session,
index,
name,
|tcx| {
time(time_passes,
"load_dep_graph",
|| rustc_incremental::load_dep_graph(tcx));
// passes are timed inside typeck
try_with_f!(typeck::check_crate(tcx, trait_map), (tcx, None, analysis));
@ -952,9 +957,20 @@ pub fn phase_4_translate_to_llvm<'tcx>(tcx: &TyCtxt<'tcx>,
passes.run_passes(tcx, &mut mir_map);
});
let translation =
time(time_passes,
"translation",
move || trans::trans_crate(tcx, &mir_map, analysis));
time(time_passes,
"translation",
move || trans::trans_crate(tcx, &mir_map, analysis))
"assert dep graph",
move || rustc_incremental::assert_dep_graph(tcx));
time(time_passes,
"serialize dep graph",
move || rustc_incremental::save_dep_graph(tcx));
translation
}
/// Run LLVM itself, producing a bitcode file, assembly file or object file

View file

@ -45,6 +45,7 @@ extern crate rustc_passes;
extern crate rustc_lint;
extern crate rustc_plugin;
extern crate rustc_privacy;
extern crate rustc_incremental;
extern crate rustc_metadata;
extern crate rustc_mir;
extern crate rustc_resolve;

View file

@ -0,0 +1,18 @@
[package]
authors = ["The Rust Project Developers"]
name = "rustc_incremental"
version = "0.0.0"
[lib]
name = "rustc_incremental"
path = "lib.rs"
crate-type = ["dylib"]
[dependencies]
graphviz = { path = "../libgraphviz" }
rbml = { path = "../librbml" }
rustc = { path = "../librustc" }
rustc_data_structures = { path = "../librustc_data_structures" }
serialize = { path = "../libserialize" }
log = { path = "../liblog" }
syntax = { path = "../libsyntax" }

View file

@ -13,12 +13,17 @@
//! will dump graphs in graphviz form to disk, and it searches for
//! `#[rustc_if_this_changed]` and `#[rustc_then_this_would_need]`
//! annotations. These annotations can be used to test whether paths
//! exist in the graph. We report errors on each
//! `rustc_if_this_changed` annotation. If a path exists in all
//! cases, then we would report "all path(s) exist". Otherwise, we
//! report: "no path to `foo`" for each case where no path exists.
//! `compile-fail` tests can then be used to check when paths exist or
//! do not.
//! exist in the graph. These checks run after trans, so they view the
//! the final state of the dependency graph. Note that there are
//! similar assertions found in `persist::dirty_clean` which check the
//! **initial** state of the dependency graph, just after it has been
//! loaded from disk.
//!
//! In this code, we report errors on each `rustc_if_this_changed`
//! annotation. If a path exists in all cases, then we would report
//! "all path(s) exist". Otherwise, we report: "no path to `foo`" for
//! each case where no path exists. `compile-fail` tests can then be
//! used to check when paths exist or do not.
//!
//! The full form of the `rustc_if_this_changed` annotation is
//! `#[rustc_if_this_changed(id)]`. The `"id"` is optional and
@ -61,7 +66,7 @@ const ID: &'static str = "id";
pub fn assert_dep_graph(tcx: &TyCtxt) {
let _ignore = tcx.dep_graph.in_ignore();
if tcx.sess.opts.dump_dep_graph {
if tcx.sess.opts.debugging_opts.dump_dep_graph {
dump_graph(tcx);
}
@ -74,14 +79,23 @@ pub fn assert_dep_graph(tcx: &TyCtxt) {
(visitor.if_this_changed, visitor.then_this_would_need)
};
if !if_this_changed.is_empty() || !then_this_would_need.is_empty() {
assert!(tcx.sess.opts.debugging_opts.query_dep_graph,
"cannot use the `#[{}]` or `#[{}]` annotations \
without supplying `-Z query-dep-graph`",
IF_THIS_CHANGED, THEN_THIS_WOULD_NEED);
}
// Check paths.
check_paths(tcx, &if_this_changed, &then_this_would_need);
}
type SourceHashMap = FnvHashMap<InternedString,
FnvHashSet<(Span, DefId, DepNode)>>;
type TargetHashMap = FnvHashMap<InternedString,
FnvHashSet<(Span, InternedString, ast::NodeId, DepNode)>>;
type SourceHashMap =
FnvHashMap<InternedString,
FnvHashSet<(Span, DefId, DepNode<DefId>)>>;
type TargetHashMap =
FnvHashMap<InternedString,
FnvHashSet<(Span, InternedString, ast::NodeId, DepNode<DefId>)>>;
struct IfThisChanged<'a, 'tcx:'a> {
tcx: &'a TyCtxt<'tcx>,
@ -124,34 +138,21 @@ impl<'a, 'tcx> IfThisChanged<'a, 'tcx> {
}
}
}
let dep_node_str = dep_node_interned.as_ref().map(|s| &**s);
macro_rules! match_depnode_name {
($input:expr, $def_id:expr, match { $($variant:ident,)* } else $y:expr) => {
match $input {
$(Some(stringify!($variant)) => DepNode::$variant($def_id),)*
_ => $y
let dep_node = match dep_node_interned {
Some(ref n) => {
match DepNode::from_label_string(&n[..], def_id) {
Ok(n) => n,
Err(()) => {
self.tcx.sess.span_fatal(
attr.span,
&format!("unrecognized DepNode variant {:?}", n));
}
}
}
}
let dep_node = match_depnode_name! {
dep_node_str, def_id, match {
CollectItem,
BorrowCheck,
TransCrateItem,
TypeckItemType,
TypeckItemBody,
ImplOrTraitItems,
ItemSignature,
FieldTy,
TraitItemDefIds,
InherentImpls,
ImplItems,
TraitImpls,
ReprHints,
} else {
None => {
self.tcx.sess.span_fatal(
attr.span,
&format!("unrecognized DepNode variant {:?}", dep_node_str));
&format!("missing DepNode variant"));
}
};
let id = id.unwrap_or(InternedString::new(ID));
@ -194,7 +195,7 @@ fn check_paths(tcx: &TyCtxt,
};
for &(_, source_def_id, source_dep_node) in sources {
let dependents = query.dependents(source_dep_node);
let dependents = query.transitive_dependents(source_dep_node);
for &(target_span, ref target_pass, _, ref target_dep_node) in targets {
if !dependents.contains(&target_dep_node) {
tcx.sess.span_err(
@ -251,33 +252,34 @@ fn dump_graph(tcx: &TyCtxt) {
}
}
pub struct GraphvizDepGraph(FnvHashSet<DepNode>, Vec<(DepNode, DepNode)>);
pub struct GraphvizDepGraph(FnvHashSet<DepNode<DefId>>,
Vec<(DepNode<DefId>, DepNode<DefId>)>);
impl<'a, 'tcx> dot::GraphWalk<'a> for GraphvizDepGraph {
type Node = DepNode;
type Edge = (DepNode, DepNode);
fn nodes(&self) -> dot::Nodes<DepNode> {
type Node = DepNode<DefId>;
type Edge = (DepNode<DefId>, DepNode<DefId>);
fn nodes(&self) -> dot::Nodes<DepNode<DefId>> {
let nodes: Vec<_> = self.0.iter().cloned().collect();
nodes.into_cow()
}
fn edges(&self) -> dot::Edges<(DepNode, DepNode)> {
fn edges(&self) -> dot::Edges<(DepNode<DefId>, DepNode<DefId>)> {
self.1[..].into_cow()
}
fn source(&self, edge: &(DepNode, DepNode)) -> DepNode {
fn source(&self, edge: &(DepNode<DefId>, DepNode<DefId>)) -> DepNode<DefId> {
edge.0
}
fn target(&self, edge: &(DepNode, DepNode)) -> DepNode {
fn target(&self, edge: &(DepNode<DefId>, DepNode<DefId>)) -> DepNode<DefId> {
edge.1
}
}
impl<'a, 'tcx> dot::Labeller<'a> for GraphvizDepGraph {
type Node = DepNode;
type Edge = (DepNode, DepNode);
type Node = DepNode<DefId>;
type Edge = (DepNode<DefId>, DepNode<DefId>);
fn graph_id(&self) -> dot::Id {
dot::Id::new("DependencyGraph").unwrap()
}
fn node_id(&self, n: &DepNode) -> dot::Id {
fn node_id(&self, n: &DepNode<DefId>) -> dot::Id {
let s: String =
format!("{:?}", n).chars()
.map(|c| if c == '_' || c.is_alphanumeric() { c } else { '_' })
@ -285,7 +287,7 @@ impl<'a, 'tcx> dot::Labeller<'a> for GraphvizDepGraph {
debug!("n={:?} s={:?}", n, s);
dot::Id::new(s).unwrap()
}
fn node_label(&self, n: &DepNode) -> dot::LabelText {
fn node_label(&self, n: &DepNode<DefId>) -> dot::LabelText {
dot::LabelText::label(format!("{:?}", n))
}
}
@ -293,7 +295,9 @@ impl<'a, 'tcx> dot::Labeller<'a> for GraphvizDepGraph {
// Given an optional filter like `"x,y,z"`, returns either `None` (no
// filter) or the set of nodes whose labels contain all of those
// substrings.
fn node_set(query: &DepGraphQuery, filter: &str) -> Option<FnvHashSet<DepNode>> {
fn node_set(query: &DepGraphQuery<DefId>, filter: &str)
-> Option<FnvHashSet<DepNode<DefId>>>
{
debug!("node_set(filter={:?})", filter);
if filter.trim().is_empty() {
@ -313,10 +317,10 @@ fn node_set(query: &DepGraphQuery, filter: &str) -> Option<FnvHashSet<DepNode>>
.collect())
}
fn filter_nodes(query: &DepGraphQuery,
sources: &Option<FnvHashSet<DepNode>>,
targets: &Option<FnvHashSet<DepNode>>)
-> FnvHashSet<DepNode>
fn filter_nodes(query: &DepGraphQuery<DefId>,
sources: &Option<FnvHashSet<DepNode<DefId>>>,
targets: &Option<FnvHashSet<DepNode<DefId>>>)
-> FnvHashSet<DepNode<DefId>>
{
if let &Some(ref sources) = sources {
if let &Some(ref targets) = targets {
@ -331,10 +335,10 @@ fn filter_nodes(query: &DepGraphQuery,
}
}
fn walk_nodes(query: &DepGraphQuery,
starts: &FnvHashSet<DepNode>,
fn walk_nodes(query: &DepGraphQuery<DefId>,
starts: &FnvHashSet<DepNode<DefId>>,
direction: Direction)
-> FnvHashSet<DepNode>
-> FnvHashSet<DepNode<DefId>>
{
let mut set = FnvHashSet();
for start in starts {
@ -355,10 +359,10 @@ fn walk_nodes(query: &DepGraphQuery,
set
}
fn walk_between(query: &DepGraphQuery,
sources: &FnvHashSet<DepNode>,
targets: &FnvHashSet<DepNode>)
-> FnvHashSet<DepNode>
fn walk_between(query: &DepGraphQuery<DefId>,
sources: &FnvHashSet<DepNode<DefId>>,
targets: &FnvHashSet<DepNode<DefId>>)
-> FnvHashSet<DepNode<DefId>>
{
// This is a bit tricky. We want to include a node only if it is:
// (a) reachable from a source and (b) will reach a target. And we
@ -386,7 +390,7 @@ fn walk_between(query: &DepGraphQuery,
})
.collect();
fn recurse(query: &DepGraphQuery,
fn recurse(query: &DepGraphQuery<DefId>,
node_states: &mut [State],
node: NodeIndex)
-> bool
@ -423,9 +427,9 @@ fn walk_between(query: &DepGraphQuery,
}
}
fn filter_edges(query: &DepGraphQuery,
nodes: &FnvHashSet<DepNode>)
-> Vec<(DepNode, DepNode)>
fn filter_edges(query: &DepGraphQuery<DefId>,
nodes: &FnvHashSet<DepNode<DefId>>)
-> Vec<(DepNode<DefId>, DepNode<DefId>)>
{
query.edges()
.into_iter()

View file

@ -0,0 +1,422 @@
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Calculation of a Strict Version Hash for crates. For a length
//! comment explaining the general idea, see `librustc/middle/svh.rs`.
use std::hash::{Hash, SipHasher, Hasher};
use rustc::hir::def_id::{CRATE_DEF_INDEX, DefId};
use rustc::hir::svh::Svh;
use rustc::ty;
use rustc::hir::intravisit::{self, Visitor};
use self::svh_visitor::StrictVersionHashVisitor;
pub trait SvhCalculate {
/// Calculate the SVH for an entire krate.
fn calculate_krate_hash(&self) -> Svh;
/// Calculate the SVH for a particular item.
fn calculate_item_hash(&self, def_id: DefId) -> u64;
}
impl<'tcx> SvhCalculate for ty::TyCtxt<'tcx> {
fn calculate_krate_hash(&self) -> Svh {
// FIXME (#14132): This is better than it used to be, but it still not
// ideal. We now attempt to hash only the relevant portions of the
// Crate AST as well as the top-level crate attributes. (However,
// the hashing of the crate attributes should be double-checked
// to ensure it is not incorporating implementation artifacts into
// the hash that are not otherwise visible.)
let crate_disambiguator = self.sess.crate_disambiguator.get();
let krate = self.map.krate();
// FIXME: this should use SHA1, not SipHash. SipHash is not built to
// avoid collisions.
let mut state = SipHasher::new();
debug!("state: {:?}", state);
// FIXME(#32753) -- at (*) we `to_le` for endianness, but is
// this enough, and does it matter anyway?
"crate_disambiguator".hash(&mut state);
crate_disambiguator.as_str().len().to_le().hash(&mut state); // (*)
crate_disambiguator.as_str().hash(&mut state);
debug!("crate_disambiguator: {:?}", crate_disambiguator.as_str());
debug!("state: {:?}", state);
{
let mut visit = StrictVersionHashVisitor::new(&mut state, self);
krate.visit_all_items(&mut visit);
}
// FIXME (#14132): This hash is still sensitive to e.g. the
// spans of the crate Attributes and their underlying
// MetaItems; we should make ContentHashable impl for those
// types and then use hash_content. But, since all crate
// attributes should appear near beginning of the file, it is
// not such a big deal to be sensitive to their spans for now.
//
// We hash only the MetaItems instead of the entire Attribute
// to avoid hashing the AttrId
for attr in &krate.attrs {
debug!("krate attr {:?}", attr);
attr.node.value.hash(&mut state);
}
Svh::from_hash(state.finish())
}
fn calculate_item_hash(&self, def_id: DefId) -> u64 {
assert!(def_id.is_local());
let mut state = SipHasher::new();
{
let mut visit = StrictVersionHashVisitor::new(&mut state, self);
if def_id.index == CRATE_DEF_INDEX {
// the crate root itself is not registered in the map
// as an item, so we have to fetch it this way
let krate = self.map.krate();
intravisit::walk_crate(&mut visit, krate);
} else {
let node_id = self.map.as_local_node_id(def_id).unwrap();
visit.visit_item(self.map.expect_item(node_id));
}
}
state.finish()
}
}
// FIXME (#14132): Even this SVH computation still has implementation
// artifacts: namely, the order of item declaration will affect the
// hash computation, but for many kinds of items the order of
// declaration should be irrelevant to the ABI.
mod svh_visitor {
pub use self::SawExprComponent::*;
pub use self::SawStmtComponent::*;
use self::SawAbiComponent::*;
use syntax::ast::{self, Name, NodeId};
use syntax::codemap::Span;
use syntax::parse::token;
use rustc::ty;
use rustc::hir;
use rustc::hir::*;
use rustc::hir::intravisit as visit;
use rustc::hir::intravisit::{Visitor, FnKind};
use std::hash::{Hash, SipHasher};
pub struct StrictVersionHashVisitor<'a, 'tcx: 'a> {
pub tcx: &'a ty::TyCtxt<'tcx>,
pub st: &'a mut SipHasher,
}
impl<'a, 'tcx> StrictVersionHashVisitor<'a, 'tcx> {
pub fn new(st: &'a mut SipHasher,
tcx: &'a ty::TyCtxt<'tcx>)
-> Self {
StrictVersionHashVisitor { st: st, tcx: tcx }
}
}
// To off-load the bulk of the hash-computation on #[derive(Hash)],
// we define a set of enums corresponding to the content that our
// crate visitor will encounter as it traverses the ast.
//
// The important invariant is that all of the Saw*Component enums
// do not carry any Spans, Names, or Idents.
//
// Not carrying any Names/Idents is the important fix for problem
// noted on PR #13948: using the ident.name as the basis for a
// hash leads to unstable SVH, because ident.name is just an index
// into intern table (i.e. essentially a random address), not
// computed from the name content.
//
// With the below enums, the SVH computation is not sensitive to
// artifacts of how rustc was invoked nor of how the source code
// was laid out. (Or at least it is *less* sensitive.)
// This enum represents the different potential bits of code the
// visitor could encounter that could affect the ABI for the crate,
// and assigns each a distinct tag to feed into the hash computation.
#[derive(Hash)]
enum SawAbiComponent<'a> {
// FIXME (#14132): should we include (some function of)
// ident.ctxt as well?
SawIdent(token::InternedString),
SawStructDef(token::InternedString),
SawLifetime(token::InternedString),
SawLifetimeDef(token::InternedString),
SawMod,
SawForeignItem,
SawItem,
SawDecl,
SawTy,
SawGenerics,
SawFn,
SawTraitItem,
SawImplItem,
SawStructField,
SawVariant,
SawExplicitSelf,
SawPath,
SawBlock,
SawPat,
SawLocal,
SawArm,
SawExpr(SawExprComponent<'a>),
SawStmt(SawStmtComponent),
}
/// SawExprComponent carries all of the information that we want
/// to include in the hash that *won't* be covered by the
/// subsequent recursive traversal of the expression's
/// substructure by the visitor.
///
/// We know every Expr_ variant is covered by a variant because
/// `fn saw_expr` maps each to some case below. Ensuring that
/// each variant carries an appropriate payload has to be verified
/// by hand.
///
/// (However, getting that *exactly* right is not so important
/// because the SVH is just a developer convenience; there is no
/// guarantee of collision-freedom, hash collisions are just
/// (hopefully) unlikely.)
#[derive(Hash)]
pub enum SawExprComponent<'a> {
SawExprLoop(Option<token::InternedString>),
SawExprField(token::InternedString),
SawExprTupField(usize),
SawExprBreak(Option<token::InternedString>),
SawExprAgain(Option<token::InternedString>),
SawExprBox,
SawExprVec,
SawExprCall,
SawExprMethodCall,
SawExprTup,
SawExprBinary(hir::BinOp_),
SawExprUnary(hir::UnOp),
SawExprLit(ast::LitKind),
SawExprCast,
SawExprType,
SawExprIf,
SawExprWhile,
SawExprMatch,
SawExprClosure,
SawExprBlock,
SawExprAssign,
SawExprAssignOp(hir::BinOp_),
SawExprIndex,
SawExprPath(Option<usize>),
SawExprAddrOf(hir::Mutability),
SawExprRet,
SawExprInlineAsm(&'a hir::InlineAsm),
SawExprStruct,
SawExprRepeat,
}
fn saw_expr<'a>(node: &'a Expr_) -> SawExprComponent<'a> {
match *node {
ExprBox(..) => SawExprBox,
ExprVec(..) => SawExprVec,
ExprCall(..) => SawExprCall,
ExprMethodCall(..) => SawExprMethodCall,
ExprTup(..) => SawExprTup,
ExprBinary(op, _, _) => SawExprBinary(op.node),
ExprUnary(op, _) => SawExprUnary(op),
ExprLit(ref lit) => SawExprLit(lit.node.clone()),
ExprCast(..) => SawExprCast,
ExprType(..) => SawExprType,
ExprIf(..) => SawExprIf,
ExprWhile(..) => SawExprWhile,
ExprLoop(_, id) => SawExprLoop(id.map(|id| id.name.as_str())),
ExprMatch(..) => SawExprMatch,
ExprClosure(..) => SawExprClosure,
ExprBlock(..) => SawExprBlock,
ExprAssign(..) => SawExprAssign,
ExprAssignOp(op, _, _) => SawExprAssignOp(op.node),
ExprField(_, name) => SawExprField(name.node.as_str()),
ExprTupField(_, id) => SawExprTupField(id.node),
ExprIndex(..) => SawExprIndex,
ExprPath(ref qself, _) => SawExprPath(qself.as_ref().map(|q| q.position)),
ExprAddrOf(m, _) => SawExprAddrOf(m),
ExprBreak(id) => SawExprBreak(id.map(|id| id.node.name.as_str())),
ExprAgain(id) => SawExprAgain(id.map(|id| id.node.name.as_str())),
ExprRet(..) => SawExprRet,
ExprInlineAsm(ref a,_,_) => SawExprInlineAsm(a),
ExprStruct(..) => SawExprStruct,
ExprRepeat(..) => SawExprRepeat,
}
}
/// SawStmtComponent is analogous to SawExprComponent, but for statements.
#[derive(Hash)]
pub enum SawStmtComponent {
SawStmtDecl,
SawStmtExpr,
SawStmtSemi,
}
fn saw_stmt(node: &Stmt_) -> SawStmtComponent {
match *node {
StmtDecl(..) => SawStmtDecl,
StmtExpr(..) => SawStmtExpr,
StmtSemi(..) => SawStmtSemi,
}
}
impl<'a, 'tcx> Visitor<'a> for StrictVersionHashVisitor<'a, 'tcx> {
fn visit_nested_item(&mut self, item: ItemId) {
debug!("visit_nested_item: {:?} st={:?}", item, self.st);
let def_path = self.tcx.map.def_path_from_id(item.id);
def_path.hash(self.st);
}
fn visit_variant_data(&mut self, s: &'a VariantData, name: Name,
g: &'a Generics, _: NodeId, _: Span) {
SawStructDef(name.as_str()).hash(self.st);
visit::walk_generics(self, g);
visit::walk_struct_def(self, s)
}
fn visit_variant(&mut self, v: &'a Variant, g: &'a Generics, item_id: NodeId) {
SawVariant.hash(self.st);
// walk_variant does not call walk_generics, so do it here.
visit::walk_generics(self, g);
visit::walk_variant(self, v, g, item_id)
}
// All of the remaining methods just record (in the hash
// SipHasher) that the visitor saw that particular variant
// (with its payload), and continue walking as the default
// visitor would.
//
// Some of the implementations have some notes as to how one
// might try to make their SVH computation less discerning
// (e.g. by incorporating reachability analysis). But
// currently all of their implementations are uniform and
// uninteresting.
//
// (If you edit a method such that it deviates from the
// pattern, please move that method up above this comment.)
fn visit_name(&mut self, _: Span, name: Name) {
SawIdent(name.as_str()).hash(self.st);
}
fn visit_lifetime(&mut self, l: &'a Lifetime) {
SawLifetime(l.name.as_str()).hash(self.st);
}
fn visit_lifetime_def(&mut self, l: &'a LifetimeDef) {
SawLifetimeDef(l.lifetime.name.as_str()).hash(self.st);
}
// We do recursively walk the bodies of functions/methods
// (rather than omitting their bodies from the hash) since
// monomorphization and cross-crate inlining generally implies
// that a change to a crate body will require downstream
// crates to be recompiled.
fn visit_expr(&mut self, ex: &'a Expr) {
SawExpr(saw_expr(&ex.node)).hash(self.st); visit::walk_expr(self, ex)
}
fn visit_stmt(&mut self, s: &'a Stmt) {
SawStmt(saw_stmt(&s.node)).hash(self.st); visit::walk_stmt(self, s)
}
fn visit_foreign_item(&mut self, i: &'a ForeignItem) {
// FIXME (#14132) ideally we would incorporate privacy (or
// perhaps reachability) somewhere here, so foreign items
// that do not leak into downstream crates would not be
// part of the ABI.
SawForeignItem.hash(self.st); visit::walk_foreign_item(self, i)
}
fn visit_item(&mut self, i: &'a Item) {
debug!("visit_item: {:?} st={:?}", i, self.st);
// FIXME (#14132) ideally would incorporate reachability
// analysis somewhere here, so items that never leak into
// downstream crates (e.g. via monomorphisation or
// inlining) would not be part of the ABI.
SawItem.hash(self.st); visit::walk_item(self, i)
}
fn visit_mod(&mut self, m: &'a Mod, _s: Span, _n: NodeId) {
SawMod.hash(self.st); visit::walk_mod(self, m)
}
fn visit_decl(&mut self, d: &'a Decl) {
SawDecl.hash(self.st); visit::walk_decl(self, d)
}
fn visit_ty(&mut self, t: &'a Ty) {
SawTy.hash(self.st); visit::walk_ty(self, t)
}
fn visit_generics(&mut self, g: &'a Generics) {
SawGenerics.hash(self.st); visit::walk_generics(self, g)
}
fn visit_fn(&mut self, fk: FnKind<'a>, fd: &'a FnDecl,
b: &'a Block, s: Span, _: NodeId) {
SawFn.hash(self.st); visit::walk_fn(self, fk, fd, b, s)
}
fn visit_trait_item(&mut self, ti: &'a TraitItem) {
SawTraitItem.hash(self.st); visit::walk_trait_item(self, ti)
}
fn visit_impl_item(&mut self, ii: &'a ImplItem) {
SawImplItem.hash(self.st); visit::walk_impl_item(self, ii)
}
fn visit_struct_field(&mut self, s: &'a StructField) {
SawStructField.hash(self.st); visit::walk_struct_field(self, s)
}
fn visit_explicit_self(&mut self, es: &'a ExplicitSelf) {
SawExplicitSelf.hash(self.st); visit::walk_explicit_self(self, es)
}
fn visit_path(&mut self, path: &'a Path, _: ast::NodeId) {
SawPath.hash(self.st); visit::walk_path(self, path)
}
fn visit_path_list_item(&mut self, prefix: &'a Path, item: &'a PathListItem) {
SawPath.hash(self.st); visit::walk_path_list_item(self, prefix, item)
}
fn visit_block(&mut self, b: &'a Block) {
SawBlock.hash(self.st); visit::walk_block(self, b)
}
fn visit_pat(&mut self, p: &'a Pat) {
SawPat.hash(self.st); visit::walk_pat(self, p)
}
fn visit_local(&mut self, l: &'a Local) {
SawLocal.hash(self.st); visit::walk_local(self, l)
}
fn visit_arm(&mut self, a: &'a Arm) {
SawArm.hash(self.st); visit::walk_arm(self, a)
}
}
}

View file

@ -0,0 +1,41 @@
// Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Support for serializing the dep-graph and reloading it.
#![crate_name = "rustc_incremental"]
#![unstable(feature = "rustc_private", issue = "27812")]
#![crate_type = "dylib"]
#![crate_type = "rlib"]
#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
html_favicon_url = "https://doc.rust-lang.org/favicon.ico",
html_root_url = "https://doc.rust-lang.org/nightly/")]
#![cfg_attr(not(stage0), deny(warnings))]
#![feature(rustc_private)]
#![feature(staged_api)]
extern crate graphviz;
extern crate rbml;
#[macro_use] extern crate rustc;
extern crate rustc_data_structures;
extern crate serialize as rustc_serialize;
#[macro_use] extern crate log;
#[macro_use] extern crate syntax;
mod assert_dep_graph;
mod calculate_svh;
mod persist;
pub use assert_dep_graph::assert_dep_graph;
pub use calculate_svh::SvhCalculate;
pub use persist::load_dep_graph;
pub use persist::save_dep_graph;

View file

@ -0,0 +1,13 @@
This is the code to load/save the dependency graph. Loading is assumed
to run early in compilation, and saving at the very end. When loading,
the basic idea is that we will load up the dependency graph from the
previous compilation and compare the hashes of our HIR nodes to the
hashes of the HIR nodes that existed at the time. For each node whose
hash has changed, or which no longer exists in the new HIR, we can
remove that node from the old graph along with any nodes that depend
on it. Then we add what's left to the new graph (if any such nodes or
edges already exist, then there would be no effect, but since we do
this first thing, they do not).

View file

@ -0,0 +1,35 @@
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! The data that we will serialize and deserialize.
use rustc::dep_graph::DepNode;
use rustc_serialize::{Decoder as RustcDecoder,
Encodable as RustcEncodable, Encoder as RustcEncoder};
use super::directory::DefPathIndex;
#[derive(Debug, RustcEncodable, RustcDecodable)]
pub struct SerializedDepGraph {
pub nodes: Vec<DepNode<DefPathIndex>>,
pub edges: Vec<SerializedEdge>,
pub hashes: Vec<SerializedHash>,
}
pub type SerializedEdge = (DepNode<DefPathIndex>, DepNode<DefPathIndex>);
#[derive(Debug, RustcEncodable, RustcDecodable)]
pub struct SerializedHash {
pub index: DefPathIndex,
/// the hash itself, computed by `calculate_item_hash`
pub hash: u64,
}

View file

@ -0,0 +1,118 @@
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Code to convert a DefId into a DefPath (when serializing) and then
//! back again (when deserializing). Note that the new DefId
//! necessarily will not be the same as the old (and of course the
//! item might even be removed in the meantime).
use rustc::dep_graph::DepNode;
use rustc::hir::map::DefPath;
use rustc::hir::def_id::DefId;
use rustc::ty;
use rustc::util::nodemap::DefIdMap;
use rustc_serialize::{Decoder as RustcDecoder,
Encodable as RustcEncodable, Encoder as RustcEncoder};
use std::fmt::{self, Debug};
/// Index into the DefIdDirectory
#[derive(Copy, Clone, Debug, PartialOrd, Ord, Hash, PartialEq, Eq,
RustcEncodable, RustcDecodable)]
pub struct DefPathIndex {
index: u32
}
#[derive(RustcEncodable, RustcDecodable)]
pub struct DefIdDirectory {
// N.B. don't use Removable here because these def-ids are loaded
// directly without remapping, so loading them should not fail.
paths: Vec<DefPath>
}
impl DefIdDirectory {
pub fn new() -> DefIdDirectory {
DefIdDirectory { paths: vec![] }
}
pub fn retrace(&self, tcx: &ty::TyCtxt) -> RetracedDefIdDirectory {
let ids = self.paths.iter()
.map(|path| tcx.map.retrace_path(path))
.collect();
RetracedDefIdDirectory { ids: ids }
}
}
#[derive(Debug, RustcEncodable, RustcDecodable)]
pub struct RetracedDefIdDirectory {
ids: Vec<Option<DefId>>
}
impl RetracedDefIdDirectory {
pub fn def_id(&self, index: DefPathIndex) -> Option<DefId> {
self.ids[index.index as usize]
}
pub fn map(&self, node: DepNode<DefPathIndex>) -> Option<DepNode<DefId>> {
node.map_def(|&index| self.def_id(index))
}
}
pub struct DefIdDirectoryBuilder<'a,'tcx:'a> {
tcx: &'a ty::TyCtxt<'tcx>,
hash: DefIdMap<Option<DefPathIndex>>,
directory: DefIdDirectory,
}
impl<'a,'tcx> DefIdDirectoryBuilder<'a,'tcx> {
pub fn new(tcx: &'a ty::TyCtxt<'tcx>) -> DefIdDirectoryBuilder<'a, 'tcx> {
DefIdDirectoryBuilder {
tcx: tcx,
hash: DefIdMap(),
directory: DefIdDirectory::new()
}
}
pub fn add(&mut self, def_id: DefId) -> Option<DefPathIndex> {
if !def_id.is_local() {
// FIXME(#32015) clarify story about cross-crate dep tracking
return None;
}
let tcx = self.tcx;
let paths = &mut self.directory.paths;
self.hash.entry(def_id)
.or_insert_with(|| {
let def_path = tcx.def_path(def_id);
if !def_path.is_local() {
return None;
}
let index = paths.len() as u32;
paths.push(def_path);
Some(DefPathIndex { index: index })
})
.clone()
}
pub fn map(&mut self, node: DepNode<DefId>) -> Option<DepNode<DefPathIndex>> {
node.map_def(|&def_id| self.add(def_id))
}
pub fn into_directory(self) -> DefIdDirectory {
self.directory
}
}
impl Debug for DefIdDirectory {
fn fmt(&self, fmt: &mut fmt::Formatter) -> Result<(), fmt::Error> {
fmt.debug_list()
.entries(self.paths.iter().enumerate())
.finish()
}
}

View file

@ -0,0 +1,151 @@
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Debugging code to test the state of the dependency graph just
//! after it is loaded from disk. For each node marked with
//! `#[rustc_clean]` or `#[rustc_dirty]`, we will check that a
//! suitable node for that item either appears or does not appear in
//! the dep-graph, as appropriate:
//!
//! - `#[rustc_dirty(label="TypeckItemBody", cfg="rev2")]` if we are
//! in `#[cfg(rev2)]`, then there MUST NOT be a node
//! `DepNode::TypeckItemBody(X)` where `X` is the def-id of the
//! current node.
//! - `#[rustc_clean(label="TypeckItemBody", cfg="rev2")]` same as above,
//! except that the node MUST exist.
//!
//! Errors are reported if we are in the suitable configuration but
//! the required condition is not met.
use rustc::dep_graph::{DepGraphQuery, DepNode};
use rustc::hir;
use rustc::hir::def_id::DefId;
use rustc::hir::intravisit::Visitor;
use syntax::ast::{self, Attribute, MetaItem};
use syntax::attr::AttrMetaMethods;
use syntax::parse::token::InternedString;
use rustc::ty;
const DIRTY: &'static str = "rustc_dirty";
const CLEAN: &'static str = "rustc_clean";
const LABEL: &'static str = "label";
const CFG: &'static str = "cfg";
pub fn check_dirty_clean_annotations(tcx: &ty::TyCtxt) {
let _ignore = tcx.dep_graph.in_ignore();
let query = tcx.dep_graph.query();
let krate = tcx.map.krate();
krate.visit_all_items(&mut DirtyCleanVisitor {
tcx: tcx,
query: &query,
});
}
pub struct DirtyCleanVisitor<'a, 'tcx:'a> {
tcx: &'a ty::TyCtxt<'tcx>,
query: &'a DepGraphQuery<DefId>,
}
impl<'a, 'tcx> DirtyCleanVisitor<'a, 'tcx> {
fn expect_associated_value(&self, item: &MetaItem) -> InternedString {
if let Some(value) = item.value_str() {
value
} else {
self.tcx.sess.span_fatal(
item.span,
&format!("associated value expected for `{}`", item.name()));
}
}
/// Given a `#[rustc_dirty]` or `#[rustc_clean]` attribute, scan
/// for a `cfg="foo"` attribute and check whether we have a cfg
/// flag called `foo`.
fn check_config(&self, attr: &ast::Attribute) -> bool {
debug!("check_config(attr={:?})", attr);
let config = &self.tcx.map.krate().config;
debug!("check_config: config={:?}", config);
for item in attr.meta_item_list().unwrap_or(&[]) {
if item.check_name(CFG) {
let value = self.expect_associated_value(item);
debug!("check_config: searching for cfg {:?}", value);
for cfg in &config[..] {
if cfg.check_name(&value[..]) {
debug!("check_config: matched {:?}", cfg);
return true;
}
}
}
}
debug!("check_config: no match found");
return false;
}
fn dep_node(&self, attr: &Attribute, def_id: DefId) -> DepNode<DefId> {
for item in attr.meta_item_list().unwrap_or(&[]) {
if item.check_name(LABEL) {
let value = self.expect_associated_value(item);
match DepNode::from_label_string(&value[..], def_id) {
Ok(def_id) => return def_id,
Err(()) => {
self.tcx.sess.span_fatal(
item.span,
&format!("dep-node label `{}` not recognized", value));
}
}
}
}
self.tcx.sess.span_fatal(attr.span, "no `label` found");
}
fn dep_node_str(&self, dep_node: DepNode<DefId>) -> DepNode<String> {
dep_node.map_def(|&def_id| Some(self.tcx.item_path_str(def_id))).unwrap()
}
fn assert_dirty(&self, item: &hir::Item, dep_node: DepNode<DefId>) {
debug!("assert_dirty({:?})", dep_node);
if self.query.contains_node(&dep_node) {
let dep_node_str = self.dep_node_str(dep_node);
self.tcx.sess.span_err(
item.span,
&format!("`{:?}` found in dep graph, but should be dirty", dep_node_str));
}
}
fn assert_clean(&self, item: &hir::Item, dep_node: DepNode<DefId>) {
debug!("assert_clean({:?})", dep_node);
if !self.query.contains_node(&dep_node) {
let dep_node_str = self.dep_node_str(dep_node);
self.tcx.sess.span_err(
item.span,
&format!("`{:?}` not found in dep graph, but should be clean", dep_node_str));
}
}
}
impl<'a, 'tcx> Visitor<'tcx> for DirtyCleanVisitor<'a, 'tcx> {
fn visit_item(&mut self, item: &'tcx hir::Item) {
let def_id = self.tcx.map.local_def_id(item.id);
for attr in self.tcx.get_attrs(def_id).iter() {
if attr.check_name(DIRTY) {
if self.check_config(attr) {
self.assert_dirty(item, self.dep_node(attr, def_id));
}
} else if attr.check_name(CLEAN) {
if self.check_config(attr) {
self.assert_clean(item, self.dep_node(attr, def_id));
}
}
}
}
}

View file

@ -0,0 +1,214 @@
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Code to save/load the dep-graph from files.
use calculate_svh::SvhCalculate;
use rbml::Error;
use rbml::opaque::Decoder;
use rustc::dep_graph::DepNode;
use rustc::hir::def_id::DefId;
use rustc::ty;
use rustc_data_structures::fnv::FnvHashSet;
use rustc_serialize::Decodable as RustcDecodable;
use std::io::Read;
use std::fs::File;
use std::path::Path;
use super::data::*;
use super::directory::*;
use super::dirty_clean;
use super::util::*;
type DirtyNodes = FnvHashSet<DepNode<DefId>>;
type CleanEdges = Vec<(DepNode<DefId>, DepNode<DefId>)>;
/// If we are in incremental mode, and a previous dep-graph exists,
/// then load up those nodes/edges that are still valid into the
/// dep-graph for this session. (This is assumed to be running very
/// early in compilation, before we've really done any work, but
/// actually it doesn't matter all that much.) See `README.md` for
/// more general overview.
pub fn load_dep_graph<'tcx>(tcx: &ty::TyCtxt<'tcx>) {
let _ignore = tcx.dep_graph.in_ignore();
if let Some(dep_graph) = dep_graph_path(tcx) {
// FIXME(#32754) lock file?
load_dep_graph_if_exists(tcx, &dep_graph);
dirty_clean::check_dirty_clean_annotations(tcx);
}
}
pub fn load_dep_graph_if_exists<'tcx>(tcx: &ty::TyCtxt<'tcx>, path: &Path) {
if !path.exists() {
return;
}
let mut data = vec![];
match
File::open(path)
.and_then(|mut file| file.read_to_end(&mut data))
{
Ok(_) => { }
Err(err) => {
tcx.sess.err(
&format!("could not load dep-graph from `{}`: {}",
path.display(), err));
return;
}
}
match decode_dep_graph(tcx, &data) {
Ok(dirty) => dirty,
Err(err) => {
bug!("decoding error in dep-graph from `{}`: {}", path.display(), err);
}
}
}
pub fn decode_dep_graph<'tcx>(tcx: &ty::TyCtxt<'tcx>, data: &[u8])
-> Result<(), Error>
{
// Deserialize the directory and dep-graph.
let mut decoder = Decoder::new(data, 0);
let directory = try!(DefIdDirectory::decode(&mut decoder));
let serialized_dep_graph = try!(SerializedDepGraph::decode(&mut decoder));
debug!("decode_dep_graph: directory = {:#?}", directory);
debug!("decode_dep_graph: serialized_dep_graph = {:#?}", serialized_dep_graph);
// Retrace the paths in the directory to find their current location (if any).
let retraced = directory.retrace(tcx);
debug!("decode_dep_graph: retraced = {:#?}", retraced);
// Compute the set of Hir nodes whose data has changed.
let mut dirty_nodes =
initial_dirty_nodes(tcx, &serialized_dep_graph.hashes, &retraced);
debug!("decode_dep_graph: initial dirty_nodes = {:#?}", dirty_nodes);
// Find all DepNodes reachable from that core set. This loop
// iterates repeatedly over the list of edges whose source is not
// known to be dirty (`clean_edges`). If it finds an edge whose
// source is dirty, it removes it from that list and adds the
// target to `dirty_nodes`. It stops when it reaches a fixed
// point.
let clean_edges = compute_clean_edges(&serialized_dep_graph.edges,
&retraced,
&mut dirty_nodes);
// Add synthetic `foo->foo` edges for each clean node `foo` that
// we had before. This is sort of a hack to create clean nodes in
// the graph, since the existence of a node is a signal that the
// work it represents need not be repeated.
let clean_nodes =
serialized_dep_graph.nodes
.iter()
.filter_map(|&node| retraced.map(node))
.filter(|node| !dirty_nodes.contains(node))
.map(|node| (node, node));
// Add nodes and edges that are not dirty into our main graph.
let dep_graph = tcx.dep_graph.clone();
for (source, target) in clean_edges.into_iter().chain(clean_nodes) {
let _task = dep_graph.in_task(target);
dep_graph.read(source);
debug!("decode_dep_graph: clean edge: {:?} -> {:?}", source, target);
}
Ok(())
}
fn initial_dirty_nodes<'tcx>(tcx: &ty::TyCtxt<'tcx>,
hashed_items: &[SerializedHash],
retraced: &RetracedDefIdDirectory)
-> DirtyNodes {
let mut items_removed = false;
let mut dirty_nodes = FnvHashSet();
for hashed_item in hashed_items {
match retraced.def_id(hashed_item.index) {
Some(def_id) => {
// FIXME(#32753) -- should we use a distinct hash here
let current_hash = tcx.calculate_item_hash(def_id);
debug!("initial_dirty_nodes: hash of {:?} is {:?}, was {:?}",
def_id, current_hash, hashed_item.hash);
if current_hash != hashed_item.hash {
dirty_nodes.insert(DepNode::Hir(def_id));
}
}
None => {
items_removed = true;
}
}
}
// If any of the items in the krate have changed, then we consider
// the meta-node `Krate` to be dirty, since that means something
// which (potentially) read the contents of every single item.
if items_removed || !dirty_nodes.is_empty() {
dirty_nodes.insert(DepNode::Krate);
}
dirty_nodes
}
fn compute_clean_edges(serialized_edges: &[(SerializedEdge)],
retraced: &RetracedDefIdDirectory,
dirty_nodes: &mut DirtyNodes)
-> CleanEdges {
// Build up an initial list of edges. Include an edge (source,
// target) if neither node has been removed. If the source has
// been removed, add target to the list of dirty nodes.
let mut clean_edges = Vec::with_capacity(serialized_edges.len());
for &(serialized_source, serialized_target) in serialized_edges {
if let Some(target) = retraced.map(serialized_target) {
if let Some(source) = retraced.map(serialized_source) {
clean_edges.push((source, target))
} else {
// source removed, target must be dirty
dirty_nodes.insert(target);
}
} else {
// target removed, ignore the edge
}
}
debug!("compute_clean_edges: dirty_nodes={:#?}", dirty_nodes);
// Propagate dirty marks by iterating repeatedly over
// `clean_edges`. If we find an edge `(source, target)` where
// `source` is dirty, add `target` to the list of dirty nodes and
// remove it. Keep doing this until we find no more dirty nodes.
let mut previous_size = 0;
while dirty_nodes.len() > previous_size {
debug!("compute_clean_edges: previous_size={}", previous_size);
previous_size = dirty_nodes.len();
let mut i = 0;
while i < clean_edges.len() {
if dirty_nodes.contains(&clean_edges[i].0) {
let (source, target) = clean_edges.swap_remove(i);
debug!("compute_clean_edges: dirty source {:?} -> {:?}",
source, target);
dirty_nodes.insert(target);
} else if dirty_nodes.contains(&clean_edges[i].1) {
let (source, target) = clean_edges.swap_remove(i);
debug!("compute_clean_edges: dirty target {:?} -> {:?}",
source, target);
} else {
i += 1;
}
}
}
clean_edges
}

View file

@ -0,0 +1,23 @@
// Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! When in incremental mode, this pass dumps out the dependency graph
//! into the given directory. At the same time, it also hashes the
//! various HIR nodes.
mod data;
mod directory;
mod dirty_clean;
mod load;
mod save;
mod util;
pub use self::load::load_dep_graph;
pub use self::save::save_dep_graph;

View file

@ -0,0 +1,136 @@
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use calculate_svh::SvhCalculate;
use rbml::opaque::Encoder;
use rustc::dep_graph::DepNode;
use rustc::ty;
use rustc_serialize::{Encodable as RustcEncodable};
use std::io::{self, Cursor, Write};
use std::fs::{self, File};
use super::data::*;
use super::directory::*;
use super::util::*;
pub fn save_dep_graph<'tcx>(tcx: &ty::TyCtxt<'tcx>) {
let _ignore = tcx.dep_graph.in_ignore();
if let Some(dep_graph) = dep_graph_path(tcx) {
// FIXME(#32754) lock file?
// delete the old dep-graph, if any
if dep_graph.exists() {
match fs::remove_file(&dep_graph) {
Ok(()) => { }
Err(err) => {
tcx.sess.err(
&format!("unable to delete old dep-graph at `{}`: {}",
dep_graph.display(), err));
return;
}
}
}
// generate the data in a memory buffer
let mut wr = Cursor::new(Vec::new());
match encode_dep_graph(tcx, &mut Encoder::new(&mut wr)) {
Ok(()) => { }
Err(err) => {
tcx.sess.err(
&format!("could not encode dep-graph to `{}`: {}",
dep_graph.display(), err));
return;
}
}
// write the data out
let data = wr.into_inner();
match
File::create(&dep_graph)
.and_then(|mut file| file.write_all(&data))
{
Ok(_) => { }
Err(err) => {
tcx.sess.err(
&format!("failed to write dep-graph to `{}`: {}",
dep_graph.display(), err));
return;
}
}
}
}
pub fn encode_dep_graph<'tcx>(tcx: &ty::TyCtxt<'tcx>,
encoder: &mut Encoder)
-> io::Result<()>
{
// Here we take advantage of how RBML allows us to skip around
// and encode the depgraph as a two-part structure:
//
// ```
// <dep-graph>[SerializedDepGraph]</dep-graph> // tag 0
// <directory>[DefIdDirectory]</directory> // tag 1
// ```
//
// Then later we can load the directory by skipping to find tag 1.
let query = tcx.dep_graph.query();
let mut builder = DefIdDirectoryBuilder::new(tcx);
// Create hashes for things we can persist.
let hashes =
query.nodes()
.into_iter()
.filter_map(|dep_node| match dep_node {
DepNode::Hir(def_id) => {
assert!(def_id.is_local());
builder.add(def_id)
.map(|index| {
// FIXME(#32753) -- should we use a distinct hash here
let hash = tcx.calculate_item_hash(def_id);
SerializedHash { index: index, hash: hash }
})
}
_ => None
})
.collect();
// Create the serialized dep-graph, dropping nodes that are
// from other crates or from inlined items.
//
// FIXME(#32015) fix handling of other crates
let graph = SerializedDepGraph {
nodes: query.nodes().into_iter()
.flat_map(|node| builder.map(node))
.collect(),
edges: query.edges().into_iter()
.flat_map(|(source_node, target_node)| {
builder.map(source_node)
.and_then(|source| {
builder.map(target_node)
.map(|target| (source, target))
})
})
.collect(),
hashes: hashes,
};
debug!("graph = {:#?}", graph);
// Encode the directory and then the graph data.
let directory = builder.into_directory();
try!(directory.encode(encoder));
try!(graph.encode(encoder));
Ok(())
}

View file

@ -0,0 +1,32 @@
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use rustc::ty;
use std::fs;
use std::path::PathBuf;
pub fn dep_graph_path<'tcx>(tcx: &ty::TyCtxt<'tcx>) -> Option<PathBuf> {
// For now, just save/load dep-graph from
// directory/dep_graph.rbml
tcx.sess.opts.incremental.as_ref().and_then(|incr_dir| {
match fs::create_dir_all(&incr_dir){
Ok(()) => {}
Err(err) => {
tcx.sess.err(
&format!("could not create the directory `{}`: {}",
incr_dir.display(), err));
return None;
}
}
Some(incr_dir.join("dep_graph.rbml"))
})
}

View file

@ -1242,7 +1242,7 @@ pub fn get_crate_deps(data: &[u8]) -> Vec<CrateDep> {
reader::tagged_docs(depsdoc, tag_crate_dep).enumerate().map(|(crate_num, depdoc)| {
let name = docstr(depdoc, tag_crate_dep_crate_name);
let hash = Svh::new(&docstr(depdoc, tag_crate_dep_hash));
let hash = Svh::new(docstr(depdoc, tag_crate_dep_hash));
let doc = reader::get_doc(depdoc, tag_crate_dep_explicitly_linked);
let explicitly_linked = reader::doc_as_u8(doc) != 0;
CrateDep {
@ -1266,14 +1266,14 @@ fn list_crate_deps(data: &[u8], out: &mut io::Write) -> io::Result<()> {
pub fn maybe_get_crate_hash(data: &[u8]) -> Option<Svh> {
let cratedoc = rbml::Doc::new(data);
reader::maybe_get_doc(cratedoc, tag_crate_hash).map(|doc| {
Svh::new(doc.as_str_slice())
Svh::new(doc.as_str_slice().to_string())
})
}
pub fn get_crate_hash(data: &[u8]) -> Svh {
let cratedoc = rbml::Doc::new(data);
let hashdoc = reader::get_doc(cratedoc, tag_crate_hash);
Svh::new(hashdoc.as_str_slice())
Svh::new(hashdoc.as_str_slice().to_string())
}
pub fn maybe_get_crate_name(data: &[u8]) -> Option<&str> {

View file

@ -584,7 +584,8 @@ impl<'tcx> MirPass<'tcx> for TypeckMir {
// broken MIR, so try not to report duplicate errors.
return;
}
let _task = tcx.dep_graph.in_task(DepNode::MirTypeck(id));
let def_id = tcx.map.local_def_id(id);
let _task = tcx.dep_graph.in_task(DepNode::MirTypeck(def_id));
let param_env = ty::ParameterEnvironment::for_item(tcx, id);
let infcx = infer::new_infer_ctxt(tcx,
&tcx.tables,

View file

@ -18,6 +18,7 @@ rustc_back = { path = "../librustc_back" }
rustc_const_eval = { path = "../librustc_const_eval" }
rustc_const_math = { path = "../librustc_const_math" }
rustc_data_structures = { path = "../librustc_data_structures" }
rustc_incremental = { path = "../librustc_incremental" }
rustc_llvm = { path = "../librustc_llvm" }
rustc_mir = { path = "../librustc_mir" }
rustc_platform_intrinsics = { path = "../librustc_platform_intrinsics" }

View file

@ -13,7 +13,6 @@ use super::linker::{Linker, GnuLinker, MsvcLinker};
use super::rpath::RPathConfig;
use super::rpath;
use super::msvc;
use super::svh::Svh;
use session::config;
use session::config::NoDebugInfo;
use session::config::{OutputFilenames, Input, OutputType};
@ -26,8 +25,10 @@ use middle::dependency_format::Linkage;
use CrateTranslation;
use util::common::time;
use util::fs::fix_windows_verbatim_for_gcc;
use rustc::ty::TyCtxt;
use rustc_back::tempdir::TempDir;
use rustc_incremental::SvhCalculate;
use std::ascii;
use std::char;
use std::env;
@ -43,8 +44,6 @@ use syntax::ast;
use syntax::codemap::Span;
use syntax::attr::AttrMetaMethods;
use rustc::hir;
// RLIB LLVM-BYTECODE OBJECT LAYOUT
// Version 1
// Bytes Data
@ -122,15 +121,15 @@ pub fn find_crate_name(sess: Option<&Session>,
}
"rust_out".to_string()
}
pub fn build_link_meta(sess: &Session,
krate: &hir::Crate,
pub fn build_link_meta(tcx: &TyCtxt,
name: &str)
-> LinkMeta {
let r = LinkMeta {
crate_name: name.to_owned(),
crate_hash: Svh::calculate(&sess.crate_disambiguator.get().as_str(), krate),
crate_hash: tcx.calculate_krate_hash(),
};
info!("{:?}", r);
return r;

View file

@ -54,7 +54,6 @@ use session::Session;
use _match;
use abi::{self, Abi, FnType};
use adt;
use assert_dep_graph;
use attributes;
use build::*;
use builder::{Builder, noname};
@ -2730,7 +2729,7 @@ pub fn trans_crate<'tcx>(tcx: &TyCtxt<'tcx>,
}
}
let link_meta = link::build_link_meta(&tcx.sess, krate, name);
let link_meta = link::build_link_meta(&tcx, name);
let codegen_units = tcx.sess.opts.cg.codegen_units;
let shared_ccx = SharedCrateContext::new(&link_meta.crate_name,
@ -2856,8 +2855,6 @@ pub fn trans_crate<'tcx>(tcx: &TyCtxt<'tcx>,
};
let no_builtins = attr::contains_name(&krate.attrs, "no_builtins");
assert_dep_graph::assert_dep_graph(tcx);
CrateTranslation {
modules: modules,
metadata_module: metadata_module,

View file

@ -179,7 +179,7 @@ pub struct TraitSelectionCache<'tcx> {
impl<'tcx> DepTrackingMapConfig for TraitSelectionCache<'tcx> {
type Key = ty::PolyTraitRef<'tcx>;
type Value = traits::Vtable<'tcx, ()>;
fn to_dep_node(key: &ty::PolyTraitRef<'tcx>) -> DepNode {
fn to_dep_node(key: &ty::PolyTraitRef<'tcx>) -> DepNode<DefId> {
ty::tls::with(|tcx| {
let lifted_key = tcx.lift(key).unwrap();
lifted_key.to_poly_trait_predicate().dep_node()

View file

@ -46,6 +46,7 @@ extern crate libc;
#[macro_use] extern crate rustc;
extern crate rustc_back;
extern crate rustc_data_structures;
extern crate rustc_incremental;
pub extern crate rustc_llvm as llvm;
extern crate rustc_mir;
extern crate rustc_platform_intrinsics as intrinsics;
@ -85,7 +86,6 @@ mod macros;
mod abi;
mod adt;
mod asm;
mod assert_dep_graph;
mod attributes;
mod base;
mod basic_block;

View file

@ -356,6 +356,14 @@ pub const KNOWN_ATTRIBUTES: &'static [(&'static str, AttributeType, AttributeGat
"the `#[rustc_if_this_changed]` attribute \
is just used for rustc unit tests \
and will never be stable")),
("rustc_dirty", Whitelisted, Gated("rustc_attrs",
"the `#[rustc_dirty]` attribute \
is just used for rustc unit tests \
and will never be stable")),
("rustc_clean", Whitelisted, Gated("rustc_attrs",
"the `#[rustc_clean]` attribute \
is just used for rustc unit tests \
and will never be stable")),
("rustc_symbol_name", Whitelisted, Gated("rustc_attrs",
"internal rustc attributes will never be stable")),
("rustc_item_path", Whitelisted, Gated("rustc_attrs",

View file

@ -11,7 +11,7 @@
// Test that when a trait impl changes, fns whose body uses that trait
// must also be recompiled.
// compile-flags: -Z incr-comp
// compile-flags: -Z query-dep-graph
#![feature(rustc_attrs)]
#![allow(warnings)]

View file

@ -11,7 +11,7 @@
// Test that immediate callers have to change when callee changes, but
// not callers' callers.
// compile-flags: -Z incr-comp
// compile-flags: -Z query-dep-graph
#![feature(rustc_attrs)]
#![allow(dead_code)]

View file

@ -11,7 +11,7 @@
// Test cases where a changing struct appears in the signature of fns
// and methods.
// compile-flags: -Z incr-comp
// compile-flags: -Z query-dep-graph
#![feature(rustc_attrs)]
#![allow(dead_code)]

View file

@ -11,7 +11,7 @@
// Test that adding an impl to a trait `Foo` DOES affect functions
// that only use `Bar` if they have methods in common.
// compile-flags: -Z incr-comp
// compile-flags: -Z query-dep-graph
#![feature(rustc_attrs)]
#![allow(dead_code)]

View file

@ -11,7 +11,7 @@
// Test that adding an impl to a trait `Foo` does not affect functions
// that only use `Bar`, so long as they do not have methods in common.
// compile-flags: -Z incr-comp
// compile-flags: -Z query-dep-graph
#![feature(rustc_attrs)]
#![allow(warnings)]

View file

@ -11,7 +11,7 @@
// Test that when a trait impl changes, fns whose body uses that trait
// must also be recompiled.
// compile-flags: -Z incr-comp
// compile-flags: -Z query-dep-graph
#![feature(rustc_attrs)]
#![allow(warnings)]

View file

@ -10,7 +10,7 @@
// Test that two unrelated functions have no trans dependency.
// compile-flags: -Z incr-comp
// compile-flags: -Z query-dep-graph
#![feature(rustc_attrs)]
#![allow(dead_code)]

View file

@ -0,0 +1,53 @@
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// revisions: rpass1 cfail2
#![allow(warnings)]
#![feature(rustc_attrs)]
// Sanity check for the dirty-clean system. Give the opposite
// annotations that we expect to see, so that we check that errors are
// reported.
fn main() { }
mod x {
#[cfg(rpass1)]
pub fn x() -> usize {
22
}
#[cfg(cfail2)]
pub fn x() -> u32 {
22
}
}
mod y {
use x;
#[rustc_clean(label="TypeckItemBody", cfg="cfail2")]
#[rustc_clean(label="TransCrateItem", cfg="cfail2")]
pub fn y() {
//[cfail2]~^ ERROR `TypeckItemBody("y::y")` not found in dep graph, but should be clean
//[cfail2]~| ERROR `TransCrateItem("y::y")` not found in dep graph, but should be clean
x::x();
}
}
mod z {
#[rustc_dirty(label="TypeckItemBody", cfg="cfail2")]
#[rustc_dirty(label="TransCrateItem", cfg="cfail2")]
pub fn z() {
//[cfail2]~^ ERROR `TypeckItemBody("z::z")` found in dep graph, but should be dirty
//[cfail2]~| ERROR `TransCrateItem("z::z")` found in dep graph, but should be dirty
}
}

View file

@ -0,0 +1,46 @@
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// revisions: rpass1 rpass2
#![allow(warnings)]
#![feature(rustc_attrs)]
fn main() { }
mod x {
#[cfg(rpass1)]
pub fn x() -> i32 {
1
}
#[cfg(rpass2)]
pub fn x() -> i32 {
2
}
}
mod y {
use x;
#[rustc_dirty(label="TypeckItemBody", cfg="rpass2")]
pub fn y() {
x::x();
}
}
mod z {
use y;
#[rustc_clean(label="TypeckItemBody", cfg="rpass2")]
pub fn z() {
y::y();
}
}

View file

@ -0,0 +1,58 @@
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// revisions: rpass1 rpass2
#![allow(warnings)]
#![feature(rustc_attrs)]
// Here the only thing which changes is the string constant in `x`.
// Therefore, the compiler deduces (correctly) that typeck is not
// needed even for callers of `x`.
//
// It is not entirely clear why `TransCrateItem` invalidates `y` and
// `z`, actually, I think it's because of the structure of
// trans. -nmatsakis
fn main() { }
mod x {
#[cfg(rpass1)]
pub fn x() {
println!("1");
}
#[cfg(rpass2)]
#[rustc_dirty(label="TypeckItemBody", cfg="rpass2")]
#[rustc_dirty(label="TransCrateItem", cfg="rpass2")]
pub fn x() {
println!("2");
}
}
mod y {
use x;
#[rustc_clean(label="TypeckItemBody", cfg="rpass2")]
#[rustc_clean(label="TransCrateItem", cfg="rpass2")]
pub fn y() {
x::x();
}
}
mod z {
use y;
#[rustc_clean(label="TypeckItemBody", cfg="rpass2")]
#[rustc_clean(label="TransCrateItem", cfg="rpass2")]
pub fn z() {
y::y();
}
}

View file

@ -239,7 +239,7 @@ fn compile_program(input: &str, sysroot: PathBuf)
let krate = driver::assign_node_ids(&sess, krate);
let lcx = LoweringContext::new(&sess, Some(&krate));
let dep_graph = DepGraph::new(sess.opts.build_dep_graph);
let dep_graph = DepGraph::new(sess.opts.build_dep_graph());
let mut hir_forest = ast_map::Forest::new(lower_crate(&lcx, &krate), dep_graph);
let arenas = ty::CtxtArenas::new();
let ast_map = driver::make_map(&sess, &mut hir_forest);