Merge branch 'master' into rusty-hermit
This commit is contained in:
commit
ddcd157d03
230 changed files with 3546 additions and 2750 deletions
11
.gitignore
vendored
11
.gitignore
vendored
|
@ -1,6 +1,10 @@
|
|||
# This file should only ignore things that are generated during a build,
|
||||
# generated by common IDEs, and optional files controlled by the user
|
||||
# that affect the build (such as config.toml).
|
||||
# This file should only ignore things that are generated during a `x.py` build,
|
||||
# generated by common IDEs, and optional files controlled by the user that
|
||||
# affect the build (such as config.toml).
|
||||
# In particular, things like `mir_dump` should not be listed here; they are only
|
||||
# created during manual debugging and many people like to clean up instead of
|
||||
# having git ignore such leftovers. You can use `.git/info/exclude` to
|
||||
# configure your local ignore list.
|
||||
# FIXME: This needs cleanup.
|
||||
*~
|
||||
.#*
|
||||
|
@ -52,3 +56,4 @@ config.stamp
|
|||
Session.vim
|
||||
.cargo
|
||||
no_llvm_build
|
||||
# Before adding new lines, see the comment at the top.
|
||||
|
|
3
.gitmodules
vendored
3
.gitmodules
vendored
|
@ -28,9 +28,6 @@
|
|||
[submodule "src/doc/rust-by-example"]
|
||||
path = src/doc/rust-by-example
|
||||
url = https://github.com/rust-lang/rust-by-example.git
|
||||
[submodule "src/llvm-emscripten"]
|
||||
path = src/llvm-emscripten
|
||||
url = https://github.com/rust-lang/llvm.git
|
||||
[submodule "src/stdarch"]
|
||||
path = src/stdarch
|
||||
url = https://github.com/rust-lang/stdarch.git
|
||||
|
|
|
@ -1724,9 +1724,9 @@ checksum = "b294d6fa9ee409a054354afc4352b0b9ef7ca222c69b8812cbea9e7d2bf3783f"
|
|||
|
||||
[[package]]
|
||||
name = "libc"
|
||||
version = "0.2.62"
|
||||
version = "0.2.64"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "34fcd2c08d2f832f376f4173a231990fa5aef4e99fb569867318a227ef4c06ba"
|
||||
checksum = "74dfca3d9957906e8d1e6a0b641dc9a59848e793f1da2165889fd4f62d10d79c"
|
||||
dependencies = [
|
||||
"rustc-std-workspace-core",
|
||||
]
|
||||
|
@ -3567,6 +3567,7 @@ dependencies = [
|
|||
"rustc_plugin_impl",
|
||||
"rustc_privacy",
|
||||
"rustc_resolve",
|
||||
"rustc_target",
|
||||
"rustc_traits",
|
||||
"rustc_typeck",
|
||||
"serialize",
|
||||
|
|
|
@ -374,10 +374,7 @@
|
|||
|
||||
# This is an array of the codegen backends that will be compiled for the rustc
|
||||
# that's being compiled. The default is to only build the LLVM codegen backend,
|
||||
# but you can also optionally enable the "emscripten" backend for asm.js or
|
||||
# make this an empty array (but that probably won't get too far in the
|
||||
# bootstrap)
|
||||
# FIXME: remove the obsolete emscripten backend option.
|
||||
# and currently the only standard option supported is `"llvm"`
|
||||
#codegen-backends = ["llvm"]
|
||||
|
||||
# This is the name of the directory in which codegen backends will get installed
|
||||
|
|
|
@ -734,10 +734,6 @@ class RustBuild(object):
|
|||
if module.endswith("llvm-project"):
|
||||
if self.get_toml('llvm-config') and self.get_toml('lld') != 'true':
|
||||
continue
|
||||
if module.endswith("llvm-emscripten"):
|
||||
backends = self.get_toml('codegen-backends')
|
||||
if backends is None or not 'emscripten' in backends:
|
||||
continue
|
||||
check = self.check_submodule(module, slow_submodules)
|
||||
filtered_submodules.append((module, check))
|
||||
submodules_names.append(module)
|
||||
|
|
|
@ -161,7 +161,7 @@ impl Ord for Interned<String> {
|
|||
}
|
||||
}
|
||||
|
||||
struct TyIntern<T: Hash + Clone + Eq> {
|
||||
struct TyIntern<T: Clone + Eq> {
|
||||
items: Vec<T>,
|
||||
set: HashMap<T, Interned<T>>,
|
||||
}
|
||||
|
|
|
@ -210,7 +210,6 @@ pub fn std_cargo(builder: &Builder<'_>,
|
|||
// config.toml equivalent) is used
|
||||
let llvm_config = builder.ensure(native::Llvm {
|
||||
target: builder.config.build,
|
||||
emscripten: false,
|
||||
});
|
||||
cargo.env("LLVM_CONFIG", llvm_config);
|
||||
cargo.env("RUSTC_BUILD_SANITIZERS", "1");
|
||||
|
@ -615,36 +614,27 @@ pub fn build_codegen_backend(builder: &Builder<'_>,
|
|||
compiler: &Compiler,
|
||||
target: Interned<String>,
|
||||
backend: Interned<String>) -> String {
|
||||
let mut features = String::new();
|
||||
|
||||
match &*backend {
|
||||
"llvm" | "emscripten" => {
|
||||
"llvm" => {
|
||||
// Build LLVM for our target. This will implicitly build the
|
||||
// host LLVM if necessary.
|
||||
let llvm_config = builder.ensure(native::Llvm {
|
||||
target,
|
||||
emscripten: backend == "emscripten",
|
||||
});
|
||||
|
||||
if backend == "emscripten" {
|
||||
features.push_str(" emscripten");
|
||||
}
|
||||
|
||||
builder.info(&format!("Building stage{} codegen artifacts ({} -> {}, {})",
|
||||
compiler.stage, &compiler.host, target, backend));
|
||||
|
||||
// Pass down configuration from the LLVM build into the build of
|
||||
// librustc_llvm and librustc_codegen_llvm.
|
||||
if builder.is_rust_llvm(target) && backend != "emscripten" {
|
||||
if builder.is_rust_llvm(target) {
|
||||
cargo.env("LLVM_RUSTLLVM", "1");
|
||||
}
|
||||
|
||||
cargo.env("LLVM_CONFIG", &llvm_config);
|
||||
if backend != "emscripten" {
|
||||
let target_config = builder.config.target_config.get(&target);
|
||||
if let Some(s) = target_config.and_then(|c| c.llvm_config.as_ref()) {
|
||||
cargo.env("CFG_LLVM_ROOT", s);
|
||||
}
|
||||
let target_config = builder.config.target_config.get(&target);
|
||||
if let Some(s) = target_config.and_then(|c| c.llvm_config.as_ref()) {
|
||||
cargo.env("CFG_LLVM_ROOT", s);
|
||||
}
|
||||
// Some LLVM linker flags (-L and -l) may be needed to link librustc_llvm.
|
||||
if let Some(ref s) = builder.config.llvm_ldflags {
|
||||
|
@ -662,9 +652,7 @@ pub fn build_codegen_backend(builder: &Builder<'_>,
|
|||
"libstdc++.a");
|
||||
cargo.env("LLVM_STATIC_STDCPP", file);
|
||||
}
|
||||
if builder.config.llvm_link_shared ||
|
||||
(builder.config.llvm_thin_lto && backend != "emscripten")
|
||||
{
|
||||
if builder.config.llvm_link_shared || builder.config.llvm_thin_lto {
|
||||
cargo.env("LLVM_LINK_SHARED", "1");
|
||||
}
|
||||
if builder.config.llvm_use_libcxx {
|
||||
|
@ -676,8 +664,7 @@ pub fn build_codegen_backend(builder: &Builder<'_>,
|
|||
}
|
||||
_ => panic!("unknown backend: {}", backend),
|
||||
}
|
||||
|
||||
features
|
||||
String::new()
|
||||
}
|
||||
|
||||
/// Creates the `codegen-backends` folder for a compiler that's about to be
|
||||
|
|
|
@ -668,7 +668,6 @@ impl Config {
|
|||
|
||||
pub fn llvm_enabled(&self) -> bool {
|
||||
self.rust_codegen_backends.contains(&INTERNER.intern_str("llvm"))
|
||||
|| self.rust_codegen_backends.contains(&INTERNER.intern_str("emscripten"))
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -55,7 +55,6 @@ o("sanitizers", "build.sanitizers", "build the sanitizer runtimes (asan, lsan, m
|
|||
o("dist-src", "rust.dist-src", "when building tarballs enables building a source tarball")
|
||||
o("cargo-native-static", "build.cargo-native-static", "static native libraries in cargo")
|
||||
o("profiler", "build.profiler", "build the profiler runtime")
|
||||
o("emscripten", None, "compile the emscripten backend as well as LLVM")
|
||||
o("full-tools", None, "enable all tools")
|
||||
o("lld", "rust.lld", "build lld")
|
||||
o("lldb", "rust.lldb", "build lldb")
|
||||
|
@ -335,10 +334,8 @@ for key in known_args:
|
|||
set('build.host', value.split(','))
|
||||
elif option.name == 'target':
|
||||
set('build.target', value.split(','))
|
||||
elif option.name == 'emscripten':
|
||||
set('rust.codegen-backends', ['llvm', 'emscripten'])
|
||||
elif option.name == 'full-tools':
|
||||
set('rust.codegen-backends', ['llvm', 'emscripten'])
|
||||
set('rust.codegen-backends', ['llvm'])
|
||||
set('rust.lld', True)
|
||||
set('rust.llvm-tools', True)
|
||||
set('build.extended', True)
|
||||
|
|
|
@ -826,7 +826,6 @@ fn copy_src_dirs(builder: &Builder<'_>, src_dirs: &[&str], exclude_dirs: &[&str]
|
|||
|
||||
const LLVM_TEST: &[&str] = &[
|
||||
"llvm-project/llvm/test", "llvm-project\\llvm\\test",
|
||||
"llvm-emscripten/test", "llvm-emscripten\\test",
|
||||
];
|
||||
if LLVM_TEST.iter().any(|path| spath.contains(path)) &&
|
||||
(spath.ends_with(".ll") ||
|
||||
|
@ -834,9 +833,6 @@ fn copy_src_dirs(builder: &Builder<'_>, src_dirs: &[&str], exclude_dirs: &[&str]
|
|||
spath.ends_with(".s")) {
|
||||
return false
|
||||
}
|
||||
if spath.contains("test/emscripten") || spath.contains("test\\emscripten") {
|
||||
return false
|
||||
}
|
||||
|
||||
let full_path = Path::new(dir).join(path);
|
||||
if exclude_dirs.iter().any(|excl| full_path == Path::new(excl)) {
|
||||
|
|
|
@ -232,7 +232,6 @@ pub struct Build {
|
|||
miri_info: channel::GitInfo,
|
||||
rustfmt_info: channel::GitInfo,
|
||||
in_tree_llvm_info: channel::GitInfo,
|
||||
emscripten_llvm_info: channel::GitInfo,
|
||||
local_rebuild: bool,
|
||||
fail_fast: bool,
|
||||
doc_tests: DocTests,
|
||||
|
@ -351,7 +350,6 @@ impl Build {
|
|||
|
||||
// we always try to use git for LLVM builds
|
||||
let in_tree_llvm_info = channel::GitInfo::new(false, &src.join("src/llvm-project"));
|
||||
let emscripten_llvm_info = channel::GitInfo::new(false, &src.join("src/llvm-emscripten"));
|
||||
|
||||
let mut build = Build {
|
||||
initial_rustc: config.initial_rustc.clone(),
|
||||
|
@ -376,7 +374,6 @@ impl Build {
|
|||
miri_info,
|
||||
rustfmt_info,
|
||||
in_tree_llvm_info,
|
||||
emscripten_llvm_info,
|
||||
cc: HashMap::new(),
|
||||
cxx: HashMap::new(),
|
||||
ar: HashMap::new(),
|
||||
|
@ -553,10 +550,6 @@ impl Build {
|
|||
self.out.join(&*target).join("llvm")
|
||||
}
|
||||
|
||||
fn emscripten_llvm_out(&self, target: Interned<String>) -> PathBuf {
|
||||
self.out.join(&*target).join("llvm-emscripten")
|
||||
}
|
||||
|
||||
fn lld_out(&self, target: Interned<String>) -> PathBuf {
|
||||
self.out.join(&*target).join("lld")
|
||||
}
|
||||
|
|
|
@ -28,7 +28,6 @@ use crate::GitRepo;
|
|||
#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
|
||||
pub struct Llvm {
|
||||
pub target: Interned<String>,
|
||||
pub emscripten: bool,
|
||||
}
|
||||
|
||||
impl Step for Llvm {
|
||||
|
@ -40,46 +39,35 @@ impl Step for Llvm {
|
|||
run.path("src/llvm-project")
|
||||
.path("src/llvm-project/llvm")
|
||||
.path("src/llvm")
|
||||
.path("src/llvm-emscripten")
|
||||
}
|
||||
|
||||
fn make_run(run: RunConfig<'_>) {
|
||||
let emscripten = run.path.ends_with("llvm-emscripten");
|
||||
run.builder.ensure(Llvm {
|
||||
target: run.target,
|
||||
emscripten,
|
||||
});
|
||||
}
|
||||
|
||||
/// Compile LLVM for `target`.
|
||||
fn run(self, builder: &Builder<'_>) -> PathBuf {
|
||||
let target = self.target;
|
||||
let emscripten = self.emscripten;
|
||||
|
||||
// If we're using a custom LLVM bail out here, but we can only use a
|
||||
// custom LLVM for the build triple.
|
||||
if !self.emscripten {
|
||||
if let Some(config) = builder.config.target_config.get(&target) {
|
||||
if let Some(ref s) = config.llvm_config {
|
||||
check_llvm_version(builder, s);
|
||||
return s.to_path_buf()
|
||||
}
|
||||
if let Some(config) = builder.config.target_config.get(&target) {
|
||||
if let Some(ref s) = config.llvm_config {
|
||||
check_llvm_version(builder, s);
|
||||
return s.to_path_buf()
|
||||
}
|
||||
}
|
||||
|
||||
let (llvm_info, root, out_dir, llvm_config_ret_dir) = if emscripten {
|
||||
let info = &builder.emscripten_llvm_info;
|
||||
let dir = builder.emscripten_llvm_out(target);
|
||||
let config_dir = dir.join("bin");
|
||||
(info, "src/llvm-emscripten", dir, config_dir)
|
||||
} else {
|
||||
let info = &builder.in_tree_llvm_info;
|
||||
let mut dir = builder.llvm_out(builder.config.build);
|
||||
if !builder.config.build.contains("msvc") || builder.config.ninja {
|
||||
dir.push("build");
|
||||
}
|
||||
(info, "src/llvm-project/llvm", builder.llvm_out(target), dir.join("bin"))
|
||||
};
|
||||
let llvm_info = &builder.in_tree_llvm_info;
|
||||
let root = "src/llvm-project/llvm";
|
||||
let out_dir = builder.llvm_out(target);
|
||||
let mut llvm_config_ret_dir = builder.llvm_out(builder.config.build);
|
||||
if !builder.config.build.contains("msvc") || builder.config.ninja {
|
||||
llvm_config_ret_dir.push("build");
|
||||
}
|
||||
llvm_config_ret_dir.push("bin");
|
||||
|
||||
let build_llvm_config = llvm_config_ret_dir
|
||||
.join(exe("llvm-config", &*builder.config.build));
|
||||
|
@ -107,8 +95,7 @@ impl Step for Llvm {
|
|||
}
|
||||
}
|
||||
|
||||
let descriptor = if emscripten { "Emscripten " } else { "" };
|
||||
builder.info(&format!("Building {}LLVM for {}", descriptor, target));
|
||||
builder.info(&format!("Building LLVM for {}", target));
|
||||
let _time = util::timeit(&builder);
|
||||
t!(fs::create_dir_all(&out_dir));
|
||||
|
||||
|
@ -123,23 +110,15 @@ impl Step for Llvm {
|
|||
|
||||
// NOTE: remember to also update `config.toml.example` when changing the
|
||||
// defaults!
|
||||
let llvm_targets = if self.emscripten {
|
||||
"JSBackend"
|
||||
} else {
|
||||
match builder.config.llvm_targets {
|
||||
Some(ref s) => s,
|
||||
None => "AArch64;ARM;Hexagon;MSP430;Mips;NVPTX;PowerPC;RISCV;\
|
||||
Sparc;SystemZ;WebAssembly;X86",
|
||||
}
|
||||
let llvm_targets = match &builder.config.llvm_targets {
|
||||
Some(s) => s,
|
||||
None => "AArch64;ARM;Hexagon;MSP430;Mips;NVPTX;PowerPC;RISCV;\
|
||||
Sparc;SystemZ;WebAssembly;X86",
|
||||
};
|
||||
|
||||
let llvm_exp_targets = if self.emscripten {
|
||||
""
|
||||
} else {
|
||||
match builder.config.llvm_experimental_targets {
|
||||
Some(ref s) => s,
|
||||
None => "",
|
||||
}
|
||||
let llvm_exp_targets = match builder.config.llvm_experimental_targets {
|
||||
Some(ref s) => s,
|
||||
None => "",
|
||||
};
|
||||
|
||||
let assertions = if builder.config.llvm_assertions {"ON"} else {"OFF"};
|
||||
|
@ -163,25 +142,23 @@ impl Step for Llvm {
|
|||
.define("LLVM_TARGET_ARCH", target.split('-').next().unwrap())
|
||||
.define("LLVM_DEFAULT_TARGET_TRIPLE", target);
|
||||
|
||||
if builder.config.llvm_thin_lto && !emscripten {
|
||||
if builder.config.llvm_thin_lto {
|
||||
cfg.define("LLVM_ENABLE_LTO", "Thin");
|
||||
if !target.contains("apple") {
|
||||
cfg.define("LLVM_ENABLE_LLD", "ON");
|
||||
}
|
||||
}
|
||||
|
||||
let want_lldb = builder.config.lldb_enabled && !self.emscripten;
|
||||
|
||||
// This setting makes the LLVM tools link to the dynamic LLVM library,
|
||||
// which saves both memory during parallel links and overall disk space
|
||||
// for the tools. We don't do this on every platform as it doesn't work
|
||||
// equally well everywhere.
|
||||
if builder.llvm_link_tools_dynamically(target) && !emscripten {
|
||||
if builder.llvm_link_tools_dynamically(target) {
|
||||
cfg.define("LLVM_LINK_LLVM_DYLIB", "ON");
|
||||
}
|
||||
|
||||
// For distribution we want the LLVM tools to be *statically* linked to libstdc++
|
||||
if builder.config.llvm_tools_enabled || want_lldb {
|
||||
if builder.config.llvm_tools_enabled || builder.config.lldb_enabled {
|
||||
if !target.contains("windows") {
|
||||
if target.contains("apple") {
|
||||
cfg.define("CMAKE_EXE_LINKER_FLAGS", "-static-libstdc++");
|
||||
|
@ -209,7 +186,7 @@ impl Step for Llvm {
|
|||
enabled_llvm_projects.push("compiler-rt");
|
||||
}
|
||||
|
||||
if want_lldb {
|
||||
if builder.config.lldb_enabled {
|
||||
enabled_llvm_projects.push("clang");
|
||||
enabled_llvm_projects.push("lldb");
|
||||
// For the time being, disable code signing.
|
||||
|
@ -234,10 +211,9 @@ impl Step for Llvm {
|
|||
}
|
||||
|
||||
// http://llvm.org/docs/HowToCrossCompileLLVM.html
|
||||
if target != builder.config.build && !emscripten {
|
||||
if target != builder.config.build {
|
||||
builder.ensure(Llvm {
|
||||
target: builder.config.build,
|
||||
emscripten: false,
|
||||
});
|
||||
// FIXME: if the llvm root for the build triple is overridden then we
|
||||
// should use llvm-tblgen from there, also should verify that it
|
||||
|
@ -481,7 +457,6 @@ impl Step for Lld {
|
|||
|
||||
let llvm_config = builder.ensure(Llvm {
|
||||
target: self.target,
|
||||
emscripten: false,
|
||||
});
|
||||
|
||||
let out_dir = builder.lld_out(target);
|
||||
|
|
|
@ -427,7 +427,7 @@ impl Step for Miri {
|
|||
// (We do this separately from the above so that when the setup actually
|
||||
// happens we get some output.)
|
||||
// We re-use the `cargo` from above.
|
||||
cargo.arg("--env");
|
||||
cargo.arg("--print-sysroot");
|
||||
|
||||
// FIXME: Is there a way in which we can re-use the usual `run` helpers?
|
||||
let miri_sysroot = if builder.config.dry_run {
|
||||
|
@ -437,13 +437,11 @@ impl Step for Miri {
|
|||
let out = cargo.output()
|
||||
.expect("We already ran `cargo miri setup` before and that worked");
|
||||
assert!(out.status.success(), "`cargo miri setup` returned with non-0 exit code");
|
||||
// Output is "MIRI_SYSROOT=<str>\n".
|
||||
// Output is "<sysroot>\n".
|
||||
let stdout = String::from_utf8(out.stdout)
|
||||
.expect("`cargo miri setup` stdout is not valid UTF-8");
|
||||
let stdout = stdout.trim();
|
||||
builder.verbose(&format!("`cargo miri setup --env` returned: {:?}", stdout));
|
||||
let sysroot = stdout.splitn(2, '=')
|
||||
.nth(1).expect("`cargo miri setup` stdout did not contain '='");
|
||||
let sysroot = stdout.trim_end();
|
||||
builder.verbose(&format!("`cargo miri setup --print-sysroot` said: {:?}", sysroot));
|
||||
sysroot.to_owned()
|
||||
};
|
||||
|
||||
|
@ -1165,7 +1163,7 @@ impl Step for Compiletest {
|
|||
}).to_string()
|
||||
})
|
||||
};
|
||||
let lldb_exe = if builder.config.lldb_enabled && !target.contains("emscripten") {
|
||||
let lldb_exe = if builder.config.lldb_enabled {
|
||||
// Test against the lldb that was just built.
|
||||
builder.llvm_out(target).join("bin").join("lldb")
|
||||
} else {
|
||||
|
@ -1234,7 +1232,6 @@ impl Step for Compiletest {
|
|||
if builder.config.llvm_enabled() {
|
||||
let llvm_config = builder.ensure(native::Llvm {
|
||||
target: builder.config.build,
|
||||
emscripten: false,
|
||||
});
|
||||
if !builder.config.dry_run {
|
||||
let llvm_version = output(Command::new(&llvm_config).arg("--version"));
|
||||
|
|
|
@ -139,7 +139,6 @@ ENV RUST_CONFIGURE_ARGS \
|
|||
--musl-root-aarch64=/musl-aarch64 \
|
||||
--musl-root-mips=/musl-mips \
|
||||
--musl-root-mipsel=/musl-mipsel \
|
||||
--enable-emscripten \
|
||||
--disable-docs
|
||||
|
||||
ENV SCRIPT \
|
||||
|
|
|
@ -47,7 +47,7 @@ function fetch_github_commit_archive {
|
|||
rm $cached
|
||||
}
|
||||
|
||||
included="src/llvm-project src/llvm-emscripten src/doc/book src/doc/rust-by-example"
|
||||
included="src/llvm-project src/doc/book src/doc/rust-by-example"
|
||||
modules="$(git config --file .gitmodules --get-regexp '\.path$' | cut -d' ' -f2)"
|
||||
modules=($modules)
|
||||
use_git=""
|
||||
|
|
|
@ -596,30 +596,6 @@ warning: function cannot return without recursing
|
|||
|
|
||||
```
|
||||
|
||||
## unions-with-drop-fields
|
||||
|
||||
This lint detects use of unions that contain fields with possibly non-trivial drop code. Some
|
||||
example code that triggers this lint:
|
||||
|
||||
```rust
|
||||
#![feature(untagged_unions)]
|
||||
|
||||
union U {
|
||||
s: String,
|
||||
}
|
||||
```
|
||||
|
||||
This will produce:
|
||||
|
||||
```text
|
||||
warning: union contains a field with possibly non-trivial drop code, drop code of union fields is ignored when dropping the union
|
||||
--> src/main.rs:4:5
|
||||
|
|
||||
4 | s: String,
|
||||
| ^^^^^^^^^
|
||||
|
|
||||
```
|
||||
|
||||
## unknown-lints
|
||||
|
||||
This lint detects unrecognized lint attribute. Some
|
||||
|
|
|
@ -167,7 +167,7 @@ pub trait FromIterator<A>: Sized {
|
|||
/// // and we'll implement IntoIterator
|
||||
/// impl IntoIterator for MyCollection {
|
||||
/// type Item = i32;
|
||||
/// type IntoIter = ::std::vec::IntoIter<Self::Item>;
|
||||
/// type IntoIter = std::vec::IntoIter<Self::Item>;
|
||||
///
|
||||
/// fn into_iter(self) -> Self::IntoIter {
|
||||
/// self.0.into_iter()
|
||||
|
|
|
@ -3757,8 +3757,8 @@ assert!(!10", stringify!($SelfT), ".is_power_of_two());", $EndFeature, "
|
|||
```"),
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
#[inline]
|
||||
pub fn is_power_of_two(self) -> bool {
|
||||
(self.wrapping_sub(1)) & self == 0 && !(self == 0)
|
||||
pub const fn is_power_of_two(self) -> bool {
|
||||
self.count_ones() == 1
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -76,7 +76,7 @@ impl<T: ?Sized+Unsize<U>, U: ?Sized> CoerceUnsized<*const U> for *const T {}
|
|||
/// ```
|
||||
/// # #![feature(dispatch_from_dyn, unsize)]
|
||||
/// # use std::{ops::DispatchFromDyn, marker::Unsize};
|
||||
/// # struct Rc<T: ?Sized>(::std::rc::Rc<T>);
|
||||
/// # struct Rc<T: ?Sized>(std::rc::Rc<T>);
|
||||
/// impl<T: ?Sized, U: ?Sized> DispatchFromDyn<Rc<U>> for Rc<T>
|
||||
/// where
|
||||
/// T: Unsize<U>,
|
||||
|
|
|
@ -64,7 +64,7 @@
|
|||
//!
|
||||
//! fn check_optional(optional: Option<Box<i32>>) {
|
||||
//! match optional {
|
||||
//! Some(ref p) => println!("has value {}", p),
|
||||
//! Some(p) => println!("has value {}", p),
|
||||
//! None => println!("has no value"),
|
||||
//! }
|
||||
//! }
|
||||
|
@ -83,7 +83,7 @@
|
|||
//! let msg = Some("howdy");
|
||||
//!
|
||||
//! // Take a reference to the contained string
|
||||
//! if let Some(ref m) = msg {
|
||||
//! if let Some(m) = &msg {
|
||||
//! println!("{}", *m);
|
||||
//! }
|
||||
//!
|
||||
|
@ -395,10 +395,10 @@ impl<T> Option<T> {
|
|||
/// ```
|
||||
#[inline]
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
pub fn unwrap_or(self, def: T) -> T {
|
||||
pub fn unwrap_or(self, default: T) -> T {
|
||||
match self {
|
||||
Some(x) => x,
|
||||
None => def,
|
||||
None => default,
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -188,7 +188,7 @@ unsafe fn real_drop_in_place<T: ?Sized>(to_drop: &mut T) {
|
|||
/// let p: *const i32 = ptr::null();
|
||||
/// assert!(p.is_null());
|
||||
/// ```
|
||||
#[inline]
|
||||
#[inline(always)]
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
#[rustc_promotable]
|
||||
pub const fn null<T>() -> *const T { 0 as *const T }
|
||||
|
@ -203,7 +203,7 @@ pub const fn null<T>() -> *const T { 0 as *const T }
|
|||
/// let p: *mut i32 = ptr::null_mut();
|
||||
/// assert!(p.is_null());
|
||||
/// ```
|
||||
#[inline]
|
||||
#[inline(always)]
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
#[rustc_promotable]
|
||||
pub const fn null_mut<T>() -> *mut T { 0 as *mut T }
|
||||
|
|
|
@ -176,7 +176,7 @@ Section: Creating a string
|
|||
/// ```
|
||||
/// fn from_utf8_lossy<F>(mut input: &[u8], mut push: F) where F: FnMut(&str) {
|
||||
/// loop {
|
||||
/// match ::std::str::from_utf8(input) {
|
||||
/// match std::str::from_utf8(input) {
|
||||
/// Ok(valid) => {
|
||||
/// push(valid);
|
||||
/// break
|
||||
|
@ -184,7 +184,7 @@ Section: Creating a string
|
|||
/// Err(error) => {
|
||||
/// let (valid, after_valid) = input.split_at(error.valid_up_to());
|
||||
/// unsafe {
|
||||
/// push(::std::str::from_utf8_unchecked(valid))
|
||||
/// push(std::str::from_utf8_unchecked(valid))
|
||||
/// }
|
||||
/// push("\u{FFFD}");
|
||||
///
|
||||
|
|
|
@ -36,5 +36,5 @@ parking_lot = "0.9"
|
|||
byteorder = { version = "1.3" }
|
||||
chalk-engine = { version = "0.9.0", default-features=false }
|
||||
rustc_fs_util = { path = "../librustc_fs_util" }
|
||||
smallvec = { version = "0.6.7", features = ["union", "may_dangle"] }
|
||||
smallvec = { version = "0.6.8", features = ["union", "may_dangle"] }
|
||||
measureme = "0.3"
|
||||
|
|
|
@ -35,7 +35,7 @@ impl DepNodeIndex {
|
|||
pub const INVALID: DepNodeIndex = DepNodeIndex::MAX;
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
|
||||
#[derive(PartialEq)]
|
||||
pub enum DepNodeColor {
|
||||
Red,
|
||||
Green(DepNodeIndex)
|
||||
|
|
|
@ -599,7 +599,6 @@ macro_rules! define_global_metadata_kind {
|
|||
(pub enum GlobalMetaDataKind {
|
||||
$($variant:ident),*
|
||||
}) => (
|
||||
#[derive(Clone, Copy, Debug, Hash, RustcEncodable, RustcDecodable)]
|
||||
pub enum GlobalMetaDataKind {
|
||||
$($variant),*
|
||||
}
|
||||
|
|
|
@ -1077,7 +1077,7 @@ impl Mutability {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, RustcEncodable, RustcDecodable, Debug, Hash, HashStable)]
|
||||
#[derive(Copy, Clone, PartialEq, RustcEncodable, RustcDecodable, Debug, HashStable)]
|
||||
pub enum BinOpKind {
|
||||
/// The `+` operator (addition).
|
||||
Add,
|
||||
|
@ -1211,7 +1211,7 @@ impl Into<ast::BinOpKind> for BinOpKind {
|
|||
|
||||
pub type BinOp = Spanned<BinOpKind>;
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, RustcEncodable, RustcDecodable, Debug, Hash, HashStable)]
|
||||
#[derive(Copy, Clone, PartialEq, RustcEncodable, RustcDecodable, Debug, HashStable)]
|
||||
pub enum UnOp {
|
||||
/// The `*` operator (deferencing).
|
||||
UnDeref,
|
||||
|
@ -1388,8 +1388,7 @@ impl Body {
|
|||
}
|
||||
|
||||
/// The type of source expression that caused this generator to be created.
|
||||
#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, HashStable,
|
||||
RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
|
||||
#[derive(Clone, PartialEq, Eq, HashStable, RustcEncodable, RustcDecodable, Debug, Copy)]
|
||||
pub enum GeneratorKind {
|
||||
/// An explicit `async` block or the body of an async function.
|
||||
Async(AsyncGeneratorKind),
|
||||
|
@ -1412,8 +1411,7 @@ impl fmt::Display for GeneratorKind {
|
|||
///
|
||||
/// This helps error messages but is also used to drive coercions in
|
||||
/// type-checking (see #60424).
|
||||
#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, HashStable,
|
||||
RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
|
||||
#[derive(Clone, PartialEq, Eq, HashStable, RustcEncodable, RustcDecodable, Debug, Copy)]
|
||||
pub enum AsyncGeneratorKind {
|
||||
/// An explicit `async` block written by the user.
|
||||
Block,
|
||||
|
|
|
@ -11,7 +11,7 @@ use rustc_serialize::{Encodable, Decodable, Encoder, Decoder};
|
|||
|
||||
use rustc_data_structures::stable_hasher::{StableHasher, HashStable};
|
||||
/// An owned smart pointer.
|
||||
#[derive(Hash, PartialEq, Eq)]
|
||||
#[derive(PartialEq, Eq)]
|
||||
pub struct P<T: ?Sized> {
|
||||
ptr: Box<T>
|
||||
}
|
||||
|
|
|
@ -468,7 +468,7 @@ impl<'cx, 'tcx> TypeFolder<'tcx> for Canonicalizer<'cx, 'tcx> {
|
|||
ConstValue::Infer(InferConst::Fresh(_)) => {
|
||||
bug!("encountered a fresh const during canonicalization")
|
||||
}
|
||||
ConstValue::Infer(InferConst::Canonical(debruijn, _)) => {
|
||||
ConstValue::Bound(debruijn, _) => {
|
||||
if debruijn >= self.binder_index {
|
||||
bug!("escaping bound type during canonicalization")
|
||||
} else {
|
||||
|
@ -700,8 +700,8 @@ impl<'cx, 'tcx> Canonicalizer<'cx, 'tcx> {
|
|||
let var = self.canonical_var(info, const_var.into());
|
||||
self.tcx().mk_const(
|
||||
ty::Const {
|
||||
val: ConstValue::Infer(InferConst::Canonical(self.binder_index, var.into())),
|
||||
ty: const_var.ty,
|
||||
val: ConstValue::Bound(self.binder_index, var.into()),
|
||||
ty: self.fold_ty(const_var.ty),
|
||||
}
|
||||
)
|
||||
}
|
||||
|
|
|
@ -33,7 +33,7 @@ use std::ops::Index;
|
|||
use syntax::source_map::Span;
|
||||
use crate::ty::fold::TypeFoldable;
|
||||
use crate::ty::subst::GenericArg;
|
||||
use crate::ty::{self, BoundVar, InferConst, Lift, List, Region, TyCtxt};
|
||||
use crate::ty::{self, BoundVar, Lift, List, Region, TyCtxt};
|
||||
|
||||
mod canonicalizer;
|
||||
|
||||
|
@ -73,7 +73,7 @@ pub struct CanonicalVarValues<'tcx> {
|
|||
/// various parts of it with canonical variables. This struct stores
|
||||
/// those replaced bits to remember for when we process the query
|
||||
/// result.
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Hash, RustcDecodable, RustcEncodable)]
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct OriginalQueryValues<'tcx> {
|
||||
/// Map from the universes that appear in the query to the
|
||||
/// universes in the caller context. For the time being, we only
|
||||
|
@ -510,9 +510,7 @@ impl<'tcx> CanonicalVarValues<'tcx> {
|
|||
GenericArgKind::Const(ct) => {
|
||||
tcx.mk_const(ty::Const {
|
||||
ty: ct.ty,
|
||||
val: ConstValue::Infer(
|
||||
InferConst::Canonical(ty::INNERMOST, ty::BoundVar::from_u32(i))
|
||||
),
|
||||
val: ConstValue::Bound(ty::INNERMOST, ty::BoundVar::from_u32(i)),
|
||||
}).into()
|
||||
}
|
||||
})
|
||||
|
|
|
@ -26,7 +26,7 @@ use crate::traits::TraitEngine;
|
|||
use crate::traits::{Obligation, ObligationCause, PredicateObligation};
|
||||
use crate::ty::fold::TypeFoldable;
|
||||
use crate::ty::subst::{GenericArg, GenericArgKind};
|
||||
use crate::ty::{self, BoundVar, InferConst, Ty, TyCtxt};
|
||||
use crate::ty::{self, BoundVar, Ty, TyCtxt};
|
||||
use crate::util::captures::Captures;
|
||||
|
||||
impl<'tcx> InferCtxtBuilder<'tcx> {
|
||||
|
@ -493,10 +493,7 @@ impl<'cx, 'tcx> InferCtxt<'cx, 'tcx> {
|
|||
}
|
||||
}
|
||||
GenericArgKind::Const(result_value) => {
|
||||
if let ty::Const {
|
||||
val: ConstValue::Infer(InferConst::Canonical(debrujin, b)),
|
||||
..
|
||||
} = result_value {
|
||||
if let ty::Const { val: ConstValue::Bound(debrujin, b), .. } = result_value {
|
||||
// ...in which case we would set `canonical_vars[0]` to `Some(const X)`.
|
||||
|
||||
// We only allow a `ty::INNERMOST` index in substitutions.
|
||||
|
|
|
@ -53,7 +53,7 @@ pub struct CombineFields<'infcx, 'tcx> {
|
|||
pub obligations: PredicateObligations<'tcx>,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Eq, PartialEq, Hash, Debug)]
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
pub enum RelationDir {
|
||||
SubtypeOf, SupertypeOf, EqTo
|
||||
}
|
||||
|
|
|
@ -252,7 +252,7 @@ impl<'a, 'tcx> TypeFolder<'tcx> for TypeFreshener<'a, 'tcx> {
|
|||
return ct;
|
||||
}
|
||||
|
||||
ConstValue::Infer(ty::InferConst::Canonical(..)) |
|
||||
ConstValue::Bound(..) |
|
||||
ConstValue::Placeholder(_) => {
|
||||
bug!("unexpected const {:?}", ct)
|
||||
}
|
||||
|
|
|
@ -407,7 +407,7 @@ pub enum RegionVariableOrigin {
|
|||
NLL(NLLRegionVariableOrigin),
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
pub enum NLLRegionVariableOrigin {
|
||||
/// During NLL region processing, we create variables for free
|
||||
/// regions that we encounter in the function signature and
|
||||
|
|
|
@ -27,12 +27,12 @@ use crate::ty::error::TypeError;
|
|||
use crate::ty::fold::{TypeFoldable, TypeVisitor};
|
||||
use crate::ty::relate::{self, Relate, RelateResult, TypeRelation};
|
||||
use crate::ty::subst::GenericArg;
|
||||
use crate::ty::{self, Ty, TyCtxt, InferConst};
|
||||
use crate::ty::{self, Ty, TyCtxt};
|
||||
use crate::mir::interpret::ConstValue;
|
||||
use rustc_data_structures::fx::FxHashMap;
|
||||
use std::fmt::Debug;
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
|
||||
#[derive(PartialEq)]
|
||||
pub enum NormalizationStrategy {
|
||||
Lazy,
|
||||
Eager,
|
||||
|
@ -618,7 +618,7 @@ where
|
|||
a: &'tcx ty::Const<'tcx>,
|
||||
b: &'tcx ty::Const<'tcx>,
|
||||
) -> RelateResult<'tcx, &'tcx ty::Const<'tcx>> {
|
||||
if let ty::Const { val: ConstValue::Infer(InferConst::Canonical(_, _)), .. } = a {
|
||||
if let ty::Const { val: ConstValue::Bound(..), .. } = a {
|
||||
// FIXME(const_generics): I'm unsure how this branch should actually be handled,
|
||||
// so this is probably not correct.
|
||||
self.infcx.super_combine_consts(self, a, b)
|
||||
|
@ -993,7 +993,7 @@ where
|
|||
) -> RelateResult<'tcx, &'tcx ty::Const<'tcx>> {
|
||||
debug!("TypeGeneralizer::consts(a={:?})", a);
|
||||
|
||||
if let ty::Const { val: ConstValue::Infer(InferConst::Canonical(_, _)), .. } = a {
|
||||
if let ty::Const { val: ConstValue::Bound(..), .. } = a {
|
||||
bug!(
|
||||
"unexpected inference variable encountered in NLL generalization: {:?}",
|
||||
a
|
||||
|
|
|
@ -116,7 +116,7 @@ pub struct RegionConstraintData<'tcx> {
|
|||
}
|
||||
|
||||
/// Represents a constraint that influences the inference process.
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug, PartialOrd, Ord)]
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Debug, PartialOrd, Ord)]
|
||||
pub enum Constraint<'tcx> {
|
||||
/// A region variable is a subregion of another.
|
||||
VarSubVar(RegionVid, RegionVid),
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
use super::{InferCtxt, FixupError, FixupResult, Span};
|
||||
use super::type_variable::{TypeVariableOrigin, TypeVariableOriginKind};
|
||||
use crate::mir::interpret::ConstValue;
|
||||
use crate::ty::{self, Ty, Const, TyCtxt, TypeFoldable, InferConst, TypeFlags};
|
||||
use crate::ty::{self, Ty, Const, TyCtxt, TypeFoldable, InferConst};
|
||||
use crate::ty::fold::{TypeFolder, TypeVisitor};
|
||||
|
||||
///////////////////////////////////////////////////////////////////////////
|
||||
|
@ -29,7 +29,7 @@ impl<'a, 'tcx> TypeFolder<'tcx> for OpportunisticVarResolver<'a, 'tcx> {
|
|||
}
|
||||
|
||||
fn fold_ty(&mut self, t: Ty<'tcx>) -> Ty<'tcx> {
|
||||
if !t.has_infer_types() {
|
||||
if !t.has_infer_types() && !t.has_infer_consts() {
|
||||
t // micro-optimize -- if there is nothing in this type that this fold affects...
|
||||
} else {
|
||||
let t = self.infcx.shallow_resolve(t);
|
||||
|
@ -38,7 +38,7 @@ impl<'a, 'tcx> TypeFolder<'tcx> for OpportunisticVarResolver<'a, 'tcx> {
|
|||
}
|
||||
|
||||
fn fold_const(&mut self, ct: &'tcx Const<'tcx>) -> &'tcx Const<'tcx> {
|
||||
if !ct.has_type_flags(TypeFlags::HAS_CT_INFER) {
|
||||
if !ct.has_infer_consts() {
|
||||
ct // micro-optimize -- if there is nothing in this const that this fold affects...
|
||||
} else {
|
||||
let ct = self.infcx.shallow_resolve(ct);
|
||||
|
|
|
@ -43,6 +43,7 @@
|
|||
#![feature(nll)]
|
||||
#![feature(non_exhaustive)]
|
||||
#![feature(optin_builtin_traits)]
|
||||
#![feature(option_expect_none)]
|
||||
#![feature(range_is_empty)]
|
||||
#![feature(slice_patterns)]
|
||||
#![feature(specialization)]
|
||||
|
|
|
@ -202,11 +202,7 @@ impl<'a> LintLevelsBuilder<'a> {
|
|||
let meta = unwrap_or!(attr.meta(), continue);
|
||||
attr::mark_used(attr);
|
||||
|
||||
let mut metas = if let Some(metas) = meta.meta_item_list() {
|
||||
metas
|
||||
} else {
|
||||
continue;
|
||||
};
|
||||
let mut metas = unwrap_or!(meta.meta_item_list(), continue);
|
||||
|
||||
if metas.is_empty() {
|
||||
// FIXME (#55112): issue unused-attributes lint for `#[level()]`
|
||||
|
|
|
@ -117,7 +117,7 @@ pub struct NativeLibrary {
|
|||
pub wasm_import_module: Option<Symbol>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Hash, RustcEncodable, RustcDecodable, HashStable)]
|
||||
#[derive(Clone, RustcEncodable, RustcDecodable, HashStable)]
|
||||
pub struct ForeignModule {
|
||||
pub foreign_items: Vec<DefId>,
|
||||
pub def_id: DefId,
|
||||
|
|
|
@ -102,7 +102,7 @@ pub struct Upvar {
|
|||
}
|
||||
|
||||
// different kinds of pointers:
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
|
||||
#[derive(Clone, Copy, Debug, PartialEq)]
|
||||
pub enum PointerKind<'tcx> {
|
||||
/// `Box<T>`
|
||||
Unique,
|
||||
|
@ -116,7 +116,7 @@ pub enum PointerKind<'tcx> {
|
|||
|
||||
// We use the term "interior" to mean "something reachable from the
|
||||
// base without a pointer dereference", e.g., a field
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Hash)]
|
||||
#[derive(Clone, PartialEq)]
|
||||
pub enum InteriorKind {
|
||||
InteriorField(FieldIndex),
|
||||
InteriorElement(InteriorOffsetKind),
|
||||
|
@ -139,13 +139,13 @@ impl Hash for FieldIndex {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
|
||||
#[derive(Clone, PartialEq)]
|
||||
pub enum InteriorOffsetKind {
|
||||
Index, // e.g., `array_expr[index_expr]`
|
||||
Pattern, // e.g., `fn foo([_, a, _, _]: [A; 4]) { ... }`
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
|
||||
#[derive(Clone, Copy, PartialEq, Debug)]
|
||||
pub enum MutabilityCategory {
|
||||
McImmutable, // Immutable.
|
||||
McDeclared, // Directly declared as mutable.
|
||||
|
|
|
@ -25,7 +25,7 @@ use crate::util::nodemap::{FxHashSet, FxHashMap};
|
|||
use std::mem::replace;
|
||||
use std::cmp::Ordering;
|
||||
|
||||
#[derive(RustcEncodable, RustcDecodable, PartialEq, PartialOrd, Clone, Copy, Debug, Eq, Hash)]
|
||||
#[derive(PartialEq, Clone, Copy, Debug)]
|
||||
pub enum StabilityLevel {
|
||||
Unstable,
|
||||
Stable,
|
||||
|
@ -905,11 +905,10 @@ pub fn check_unused_or_stable_features(tcx: TyCtxt<'_>) {
|
|||
// Warn if the user has enabled an already-stable lang feature.
|
||||
unnecessary_stable_feature_lint(tcx, span, feature, since);
|
||||
}
|
||||
if lang_features.contains(&feature) {
|
||||
if !lang_features.insert(feature) {
|
||||
// Warn if the user enables a lang feature multiple times.
|
||||
duplicate_feature_err(tcx.sess, span, feature);
|
||||
}
|
||||
lang_features.insert(feature);
|
||||
}
|
||||
|
||||
let declared_lib_features = &tcx.features().declared_lib_features;
|
||||
|
|
|
@ -245,6 +245,8 @@ impl<'tcx, Tag: Copy, Extra: AllocationExtra<Tag>> Allocation<Tag, Extra> {
|
|||
/// as a slice.
|
||||
///
|
||||
/// It is the caller's responsibility to check bounds and alignment beforehand.
|
||||
/// Most likely, you want to use the `PlaceTy` and `OperandTy`-based methods
|
||||
/// on `InterpCx` instead.
|
||||
#[inline]
|
||||
pub fn get_bytes(
|
||||
&self,
|
||||
|
@ -275,6 +277,8 @@ impl<'tcx, Tag: Copy, Extra: AllocationExtra<Tag>> Allocation<Tag, Extra> {
|
|||
/// so be sure to actually put data there!
|
||||
///
|
||||
/// It is the caller's responsibility to check bounds and alignment beforehand.
|
||||
/// Most likely, you want to use the `PlaceTy` and `OperandTy`-based methods
|
||||
/// on `InterpCx` instead.
|
||||
pub fn get_bytes_mut(
|
||||
&mut self,
|
||||
cx: &impl HasDataLayout,
|
||||
|
@ -297,6 +301,8 @@ impl<'tcx, Tag: Copy, Extra: AllocationExtra<Tag>> Allocation<Tag, Extra> {
|
|||
impl<'tcx, Tag: Copy, Extra: AllocationExtra<Tag>> Allocation<Tag, Extra> {
|
||||
/// Reads bytes until a `0` is encountered. Will error if the end of the allocation is reached
|
||||
/// before a `0` is found.
|
||||
///
|
||||
/// Most likely, you want to call `Memory::read_c_str` instead of this method.
|
||||
pub fn read_c_str(
|
||||
&self,
|
||||
cx: &impl HasDataLayout,
|
||||
|
@ -342,33 +348,22 @@ impl<'tcx, Tag: Copy, Extra: AllocationExtra<Tag>> Allocation<Tag, Extra> {
|
|||
/// Writes `src` to the memory starting at `ptr.offset`.
|
||||
///
|
||||
/// It is the caller's responsibility to check bounds and alignment beforehand.
|
||||
/// Most likely, you want to call `Memory::write_bytes` instead of this method.
|
||||
pub fn write_bytes(
|
||||
&mut self,
|
||||
cx: &impl HasDataLayout,
|
||||
ptr: Pointer<Tag>,
|
||||
src: &[u8],
|
||||
src: impl IntoIterator<Item=u8, IntoIter: iter::ExactSizeIterator>,
|
||||
) -> InterpResult<'tcx>
|
||||
{
|
||||
let mut src = src.into_iter();
|
||||
let bytes = self.get_bytes_mut(cx, ptr, Size::from_bytes(src.len() as u64))?;
|
||||
bytes.clone_from_slice(src);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Sets `count` bytes starting at `ptr.offset` with `val`. Basically `memset`.
|
||||
///
|
||||
/// It is the caller's responsibility to check bounds and alignment beforehand.
|
||||
pub fn write_repeat(
|
||||
&mut self,
|
||||
cx: &impl HasDataLayout,
|
||||
ptr: Pointer<Tag>,
|
||||
val: u8,
|
||||
count: Size
|
||||
) -> InterpResult<'tcx>
|
||||
{
|
||||
let bytes = self.get_bytes_mut(cx, ptr, count)?;
|
||||
for b in bytes {
|
||||
*b = val;
|
||||
// `zip` would stop when the first iterator ends; we want to definitely
|
||||
// cover all of `bytes`.
|
||||
for dest in bytes {
|
||||
*dest = src.next().expect("iterator was shorter than it said it would be");
|
||||
}
|
||||
src.next().expect_none("iterator was longer than it said it would be");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
|
@ -380,6 +375,7 @@ impl<'tcx, Tag: Copy, Extra: AllocationExtra<Tag>> Allocation<Tag, Extra> {
|
|||
/// pointers being valid for ZSTs.
|
||||
///
|
||||
/// It is the caller's responsibility to check bounds and alignment beforehand.
|
||||
/// Most likely, you want to call `InterpCx::read_scalar` instead of this method.
|
||||
pub fn read_scalar(
|
||||
&self,
|
||||
cx: &impl HasDataLayout,
|
||||
|
@ -418,6 +414,7 @@ impl<'tcx, Tag: Copy, Extra: AllocationExtra<Tag>> Allocation<Tag, Extra> {
|
|||
/// Reads a pointer-sized scalar.
|
||||
///
|
||||
/// It is the caller's responsibility to check bounds and alignment beforehand.
|
||||
/// Most likely, you want to call `InterpCx::read_scalar` instead of this method.
|
||||
pub fn read_ptr_sized(
|
||||
&self,
|
||||
cx: &impl HasDataLayout,
|
||||
|
@ -435,6 +432,7 @@ impl<'tcx, Tag: Copy, Extra: AllocationExtra<Tag>> Allocation<Tag, Extra> {
|
|||
/// pointers being valid for ZSTs.
|
||||
///
|
||||
/// It is the caller's responsibility to check bounds and alignment beforehand.
|
||||
/// Most likely, you want to call `InterpCx::write_scalar` instead of this method.
|
||||
pub fn write_scalar(
|
||||
&mut self,
|
||||
cx: &impl HasDataLayout,
|
||||
|
@ -477,6 +475,7 @@ impl<'tcx, Tag: Copy, Extra: AllocationExtra<Tag>> Allocation<Tag, Extra> {
|
|||
/// Writes a pointer-sized scalar.
|
||||
///
|
||||
/// It is the caller's responsibility to check bounds and alignment beforehand.
|
||||
/// Most likely, you want to call `InterpCx::write_scalar` instead of this method.
|
||||
pub fn write_ptr_sized(
|
||||
&mut self,
|
||||
cx: &impl HasDataLayout,
|
||||
|
|
|
@ -5,11 +5,12 @@ use rustc_apfloat::{Float, ieee::{Double, Single}};
|
|||
use crate::ty::{Ty, InferConst, ParamConst, layout::{HasDataLayout, Size}, subst::SubstsRef};
|
||||
use crate::ty::PlaceholderConst;
|
||||
use crate::hir::def_id::DefId;
|
||||
use crate::ty::{BoundVar, DebruijnIndex};
|
||||
|
||||
use super::{InterpResult, Pointer, PointerArithmetic, Allocation, AllocId, sign_extend, truncate};
|
||||
|
||||
/// Represents the result of a raw const operation, pre-validation.
|
||||
#[derive(Copy, Clone, Debug, Eq, PartialEq, RustcEncodable, RustcDecodable, Hash, HashStable)]
|
||||
#[derive(Clone, HashStable)]
|
||||
pub struct RawConst<'tcx> {
|
||||
// the value lives here, at offset 0, and that allocation definitely is a `AllocKind::Memory`
|
||||
// (so you can use `AllocMap::unwrap_memory`).
|
||||
|
@ -28,6 +29,9 @@ pub enum ConstValue<'tcx> {
|
|||
/// Infer the value of the const.
|
||||
Infer(InferConst<'tcx>),
|
||||
|
||||
/// Bound const variable, used only when preparing a trait query.
|
||||
Bound(DebruijnIndex, BoundVar),
|
||||
|
||||
/// A placeholder const - universally quantified higher-ranked const.
|
||||
Placeholder(PlaceholderConst),
|
||||
|
||||
|
@ -66,8 +70,9 @@ impl<'tcx> ConstValue<'tcx> {
|
|||
match *self {
|
||||
ConstValue::Param(_) |
|
||||
ConstValue::Infer(_) |
|
||||
ConstValue::Bound(..) |
|
||||
ConstValue::Placeholder(_) |
|
||||
ConstValue::ByRef{ .. } |
|
||||
ConstValue::ByRef { .. } |
|
||||
ConstValue::Unevaluated(..) |
|
||||
ConstValue::Slice { .. } => None,
|
||||
ConstValue::Scalar(val) => Some(val),
|
||||
|
@ -487,7 +492,7 @@ impl<Tag> From<Pointer<Tag>> for Scalar<Tag> {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Eq, PartialEq, Ord, PartialOrd, Hash, RustcEncodable, RustcDecodable)]
|
||||
#[derive(Clone, Copy, Eq, PartialEq, RustcEncodable, RustcDecodable)]
|
||||
pub enum ScalarMaybeUndef<Tag = (), Id = AllocId> {
|
||||
Scalar(Scalar<Tag, Id>),
|
||||
Undef,
|
||||
|
|
|
@ -468,7 +468,7 @@ impl<T: Decodable> rustc_serialize::UseSpecializedDecodable for ClearCrossCrate<
|
|||
/// Grouped information about the source code origin of a MIR entity.
|
||||
/// Intended to be inspected by diagnostics and debuginfo.
|
||||
/// Most passes can work with it as a whole, within a single function.
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, HashStable)]
|
||||
#[derive(Copy, Clone, Debug, PartialEq, RustcEncodable, RustcDecodable, HashStable)]
|
||||
pub struct SourceInfo {
|
||||
/// The source span for the AST pertaining to this MIR entity.
|
||||
pub span: Span,
|
||||
|
@ -608,7 +608,7 @@ pub enum LocalKind {
|
|||
ReturnPointer,
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Hash, Debug, RustcEncodable, RustcDecodable)]
|
||||
#[derive(Clone, Debug, RustcEncodable, RustcDecodable)]
|
||||
pub struct VarBindingForm<'tcx> {
|
||||
/// Is variable bound via `x`, `mut x`, `ref x`, or `ref mut x`?
|
||||
pub binding_mode: ty::BindingMode,
|
||||
|
@ -630,7 +630,7 @@ pub struct VarBindingForm<'tcx> {
|
|||
pub pat_span: Span,
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Hash, Debug, RustcEncodable, RustcDecodable)]
|
||||
#[derive(Clone, Debug, RustcEncodable, RustcDecodable)]
|
||||
pub enum BindingForm<'tcx> {
|
||||
/// This is a binding for a non-`self` binding, or a `self` that has an explicit type.
|
||||
Var(VarBindingForm<'tcx>),
|
||||
|
@ -641,7 +641,7 @@ pub enum BindingForm<'tcx> {
|
|||
}
|
||||
|
||||
/// Represents what type of implicit self a function has, if any.
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug, RustcEncodable, RustcDecodable)]
|
||||
#[derive(Clone, Copy, PartialEq, Debug, RustcEncodable, RustcDecodable)]
|
||||
pub enum ImplicitSelfKind {
|
||||
/// Represents a `fn x(self);`.
|
||||
Imm,
|
||||
|
@ -2392,7 +2392,7 @@ impl<'tcx> Debug for Rvalue<'tcx> {
|
|||
/// this does not necessarily mean that they are "==" in Rust -- in
|
||||
/// particular one must be wary of `NaN`!
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable, HashStable)]
|
||||
#[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, HashStable)]
|
||||
pub struct Constant<'tcx> {
|
||||
pub span: Span,
|
||||
|
||||
|
@ -2438,7 +2438,7 @@ pub struct Constant<'tcx> {
|
|||
/// The first will lead to the constraint `w: &'1 str` (for some
|
||||
/// inferred region `'1`). The second will lead to the constraint `w:
|
||||
/// &'static str`.
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable, HashStable)]
|
||||
#[derive(Clone, Debug, RustcEncodable, RustcDecodable, HashStable)]
|
||||
pub struct UserTypeProjections {
|
||||
pub(crate) contents: Vec<(UserTypeProjection, Span)>,
|
||||
}
|
||||
|
@ -2515,7 +2515,7 @@ impl<'tcx> UserTypeProjections {
|
|||
/// * `let (x, _): T = ...` -- here, the `projs` vector would contain
|
||||
/// `field[0]` (aka `.0`), indicating that the type of `s` is
|
||||
/// determined by finding the type of the `.0` field from `T`.
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable, HashStable)]
|
||||
#[derive(Clone, Debug, RustcEncodable, RustcDecodable, HashStable)]
|
||||
pub struct UserTypeProjection {
|
||||
pub base: UserTypeAnnotationIndex,
|
||||
pub projs: Vec<ProjectionKind>,
|
||||
|
@ -2724,7 +2724,7 @@ impl Location {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable, HashStable)]
|
||||
#[derive(Copy, Clone, PartialEq, RustcEncodable, RustcDecodable, HashStable)]
|
||||
pub enum UnsafetyViolationKind {
|
||||
General,
|
||||
/// Permitted both in `const fn`s and regular `fn`s.
|
||||
|
@ -2733,7 +2733,7 @@ pub enum UnsafetyViolationKind {
|
|||
BorrowPacked(hir::HirId),
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable, HashStable)]
|
||||
#[derive(Copy, Clone, PartialEq, RustcEncodable, RustcDecodable, HashStable)]
|
||||
pub struct UnsafetyViolation {
|
||||
pub source_info: SourceInfo,
|
||||
pub description: InternedString,
|
||||
|
@ -2741,7 +2741,7 @@ pub struct UnsafetyViolation {
|
|||
pub kind: UnsafetyViolationKind,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable, HashStable)]
|
||||
#[derive(Clone, RustcEncodable, RustcDecodable, HashStable)]
|
||||
pub struct UnsafetyCheckResult {
|
||||
/// Violations that are propagated *upwards* from this function.
|
||||
pub violations: Lrc<[UnsafetyViolation]>,
|
||||
|
|
|
@ -15,7 +15,7 @@ use std::fmt;
|
|||
use std::hash::Hash;
|
||||
|
||||
/// Describes how a monomorphization will be instantiated in object files.
|
||||
#[derive(PartialEq, Eq, Clone, Copy, Debug, Hash)]
|
||||
#[derive(PartialEq)]
|
||||
pub enum InstantiationMode {
|
||||
/// There will be exactly one instance of the given MonoItem. It will have
|
||||
/// external linkage so that it can be linked to from other codegen units.
|
||||
|
@ -251,7 +251,7 @@ pub struct CodegenUnit<'tcx> {
|
|||
size_estimate: Option<usize>,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug, RustcEncodable, RustcDecodable)]
|
||||
#[derive(Copy, Clone, PartialEq, Debug, RustcEncodable, RustcDecodable)]
|
||||
pub enum Linkage {
|
||||
External,
|
||||
AvailableExternally,
|
||||
|
@ -280,7 +280,7 @@ impl_stable_hash_for!(enum self::Linkage {
|
|||
Common
|
||||
});
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
|
||||
#[derive(Copy, Clone, PartialEq, Debug)]
|
||||
pub enum Visibility {
|
||||
Default,
|
||||
Hidden,
|
||||
|
|
|
@ -947,7 +947,7 @@ impl<'tcx> MirVisitable<'tcx> for Option<Terminator<'tcx>> {
|
|||
|
||||
/// Extra information passed to `visit_ty` and friends to give context
|
||||
/// about where the type etc appears.
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
|
||||
#[derive(Debug)]
|
||||
pub enum TyContext {
|
||||
LocalDecl {
|
||||
/// The index of the local variable we are visiting.
|
||||
|
|
|
@ -7,24 +7,19 @@ use crate::session::{early_error, early_warn, Session};
|
|||
use crate::session::search_paths::SearchPath;
|
||||
|
||||
use rustc_data_structures::fx::FxHashSet;
|
||||
use rustc_data_structures::sync::Lrc;
|
||||
|
||||
use rustc_target::spec::{LinkerFlavor, MergeFunctions, PanicStrategy, RelroLevel};
|
||||
use rustc_target::spec::{Target, TargetTriple};
|
||||
|
||||
use syntax;
|
||||
use syntax::ast::{self, IntTy, UintTy, MetaItemKind};
|
||||
use syntax::ast::{self, IntTy, UintTy};
|
||||
use syntax::source_map::{FileName, FilePathMapping};
|
||||
use syntax::edition::{Edition, EDITION_NAME_LIST, DEFAULT_EDITION};
|
||||
use syntax::parse::new_parser_from_source_str;
|
||||
use syntax::parse::token;
|
||||
use syntax::sess::ParseSess;
|
||||
use syntax::symbol::{sym, Symbol};
|
||||
use syntax::feature_gate::UnstableFeatures;
|
||||
use syntax::source_map::SourceMap;
|
||||
|
||||
use errors::emitter::HumanReadableErrorType;
|
||||
use errors::{ColorConfig, FatalError, Handler, SourceMapperDyn};
|
||||
use errors::{ColorConfig, FatalError, Handler};
|
||||
|
||||
use getopts;
|
||||
|
||||
|
@ -67,7 +62,7 @@ impl_stable_hash_via_hash!(OptLevel);
|
|||
|
||||
/// This is what the `LtoCli` values get mapped to after resolving defaults and
|
||||
/// and taking other command line options into account.
|
||||
#[derive(Clone, Copy, PartialEq, Hash, Debug)]
|
||||
#[derive(Clone, PartialEq)]
|
||||
pub enum Lto {
|
||||
/// Don't do any LTO whatsoever
|
||||
No,
|
||||
|
@ -301,10 +296,10 @@ impl OutputTypes {
|
|||
/// Use tree-based collections to cheaply get a deterministic `Hash` implementation.
|
||||
/// *Do not* switch `BTreeMap` or `BTreeSet` out for an unsorted container type! That
|
||||
/// would break dependency tracking for command-line arguments.
|
||||
#[derive(Clone, Hash)]
|
||||
#[derive(Clone)]
|
||||
pub struct Externs(BTreeMap<String, ExternEntry>);
|
||||
|
||||
#[derive(Clone, Hash, Eq, PartialEq, Ord, PartialOrd, Debug, Default)]
|
||||
#[derive(Clone, Debug, Default)]
|
||||
pub struct ExternEntry {
|
||||
pub locations: BTreeSet<Option<String>>,
|
||||
pub is_private_dep: bool
|
||||
|
@ -464,7 +459,7 @@ pub enum PrintRequest {
|
|||
NativeStaticLibs,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||
#[derive(Copy, Clone)]
|
||||
pub enum BorrowckMode {
|
||||
Mir,
|
||||
Migrate,
|
||||
|
@ -1854,59 +1849,6 @@ pub fn rustc_optgroups() -> Vec<RustcOptGroup> {
|
|||
opts
|
||||
}
|
||||
|
||||
struct NullEmitter;
|
||||
|
||||
impl errors::emitter::Emitter for NullEmitter {
|
||||
fn emit_diagnostic(&mut self, _: &errors::Diagnostic) {}
|
||||
fn source_map(&self) -> Option<&Lrc<SourceMapperDyn>> { None }
|
||||
}
|
||||
|
||||
// Converts strings provided as `--cfg [cfgspec]` into a `crate_cfg`.
|
||||
pub fn parse_cfgspecs(cfgspecs: Vec<String>) -> FxHashSet<(String, Option<String>)> {
|
||||
syntax::with_default_globals(move || {
|
||||
let cfg = cfgspecs.into_iter().map(|s| {
|
||||
|
||||
let cm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
|
||||
let handler = Handler::with_emitter(false, None, Box::new(NullEmitter));
|
||||
let sess = ParseSess::with_span_handler(handler, cm);
|
||||
let filename = FileName::cfg_spec_source_code(&s);
|
||||
let mut parser = new_parser_from_source_str(&sess, filename, s.to_string());
|
||||
|
||||
macro_rules! error {($reason: expr) => {
|
||||
early_error(ErrorOutputType::default(),
|
||||
&format!(concat!("invalid `--cfg` argument: `{}` (", $reason, ")"), s));
|
||||
}}
|
||||
|
||||
match &mut parser.parse_meta_item() {
|
||||
Ok(meta_item) if parser.token == token::Eof => {
|
||||
if meta_item.path.segments.len() != 1 {
|
||||
error!("argument key must be an identifier");
|
||||
}
|
||||
match &meta_item.kind {
|
||||
MetaItemKind::List(..) => {
|
||||
error!(r#"expected `key` or `key="value"`"#);
|
||||
}
|
||||
MetaItemKind::NameValue(lit) if !lit.kind.is_str() => {
|
||||
error!("argument value must be a string");
|
||||
}
|
||||
MetaItemKind::NameValue(..) | MetaItemKind::Word => {
|
||||
let ident = meta_item.ident().expect("multi-segment cfg key");
|
||||
return (ident.name, meta_item.value_str());
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(..) => {}
|
||||
Err(err) => err.cancel(),
|
||||
}
|
||||
|
||||
error!(r#"expected `key` or `key="value"`"#);
|
||||
}).collect::<ast::CrateConfig>();
|
||||
cfg.into_iter().map(|(a, b)| {
|
||||
(a.to_string(), b.map(|b| b.to_string()))
|
||||
}).collect()
|
||||
})
|
||||
}
|
||||
|
||||
pub fn get_cmd_lint_options(matches: &getopts::Matches,
|
||||
error_format: ErrorOutputType)
|
||||
-> (Vec<(String, lint::Level)>, bool, Option<lint::Level>) {
|
||||
|
@ -2877,6 +2819,3 @@ mod dep_tracking {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
use std::path::{Path, PathBuf};
|
||||
use rustc_macros::HashStable;
|
||||
use crate::session::{early_error, config};
|
||||
use crate::session::filesearch::make_target_lib_path;
|
||||
|
||||
|
@ -10,7 +9,7 @@ pub struct SearchPath {
|
|||
pub files: Vec<PathBuf>,
|
||||
}
|
||||
|
||||
#[derive(Eq, PartialEq, Clone, Copy, Debug, PartialOrd, Ord, Hash, HashStable)]
|
||||
#[derive(PartialEq, Clone, Copy, Debug, HashStable)]
|
||||
pub enum PathKind {
|
||||
Native,
|
||||
Crate,
|
||||
|
|
|
@ -40,7 +40,7 @@ pub type CanonicalTypeOpProvePredicateGoal<'tcx> =
|
|||
pub type CanonicalTypeOpNormalizeGoal<'tcx, T> =
|
||||
Canonical<'tcx, ty::ParamEnvAnd<'tcx, type_op::normalize::Normalize<T>>>;
|
||||
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct NoSolution;
|
||||
|
||||
pub type Fallible<T> = Result<T, NoSolution>;
|
||||
|
|
|
@ -3,7 +3,7 @@ use crate::traits::query::outlives_bounds::OutlivesBound;
|
|||
use crate::traits::query::Fallible;
|
||||
use crate::ty::{ParamEnvAnd, Ty, TyCtxt};
|
||||
|
||||
#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)]
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct ImpliedOutlivesBounds<'tcx> {
|
||||
pub ty: Ty<'tcx>,
|
||||
}
|
||||
|
|
|
@ -2,7 +2,7 @@ use crate::hir::BindingAnnotation::*;
|
|||
use crate::hir::BindingAnnotation;
|
||||
use crate::hir::Mutability;
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
|
||||
#[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Debug, Copy)]
|
||||
pub enum BindingMode {
|
||||
BindByReference(Mutability),
|
||||
BindByValue(Mutability),
|
||||
|
|
|
@ -827,7 +827,7 @@ rustc_index::newtype_index! {
|
|||
pub type CanonicalUserTypeAnnotations<'tcx> =
|
||||
IndexVec<UserTypeAnnotationIndex, CanonicalUserTypeAnnotation<'tcx>>;
|
||||
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable, HashStable)]
|
||||
#[derive(Clone, Debug, RustcEncodable, RustcDecodable, HashStable)]
|
||||
pub struct CanonicalUserTypeAnnotation<'tcx> {
|
||||
pub user_ty: CanonicalUserType<'tcx>,
|
||||
pub span: Span,
|
||||
|
@ -882,7 +882,7 @@ impl CanonicalUserType<'tcx> {
|
|||
},
|
||||
|
||||
GenericArgKind::Const(ct) => match ct.val {
|
||||
ConstValue::Infer(InferConst::Canonical(debruijn, b)) => {
|
||||
ConstValue::Bound(debruijn, b) => {
|
||||
// We only allow a `ty::INNERMOST` index in substitutions.
|
||||
assert_eq!(debruijn, ty::INNERMOST);
|
||||
cvar == b
|
||||
|
@ -899,7 +899,7 @@ impl CanonicalUserType<'tcx> {
|
|||
/// A user-given type annotation attached to a constant. These arise
|
||||
/// from constants that are named via paths, like `Foo::<A>::new` and
|
||||
/// so forth.
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable, HashStable)]
|
||||
#[derive(Copy, Clone, Debug, PartialEq, RustcEncodable, RustcDecodable, HashStable)]
|
||||
pub enum UserType<'tcx> {
|
||||
Ty(Ty<'tcx>),
|
||||
|
||||
|
|
|
@ -51,7 +51,6 @@ pub enum TypeError<'tcx> {
|
|||
IntrinsicCast,
|
||||
}
|
||||
|
||||
#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Hash, Debug, Copy)]
|
||||
pub enum UnconstrainedNumeric {
|
||||
UnconstrainedFloat,
|
||||
UnconstrainedInt,
|
||||
|
|
|
@ -19,7 +19,7 @@ pub type SimplifiedType = SimplifiedTypeGen<DefId>;
|
|||
/// the non-stable but fast to construct DefId-version is the better choice.
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, RustcEncodable, RustcDecodable)]
|
||||
pub enum SimplifiedTypeGen<D>
|
||||
where D: Copy + Debug + Ord + Eq + Hash
|
||||
where D: Copy + Debug + Ord + Eq
|
||||
{
|
||||
BoolSimplifiedType,
|
||||
CharSimplifiedType,
|
||||
|
@ -123,10 +123,10 @@ pub fn simplify_type(
|
|||
}
|
||||
}
|
||||
|
||||
impl<D: Copy + Debug + Ord + Eq + Hash> SimplifiedTypeGen<D> {
|
||||
impl<D: Copy + Debug + Ord + Eq> SimplifiedTypeGen<D> {
|
||||
pub fn map_def<U, F>(self, map: F) -> SimplifiedTypeGen<U>
|
||||
where F: Fn(D) -> U,
|
||||
U: Copy + Debug + Ord + Eq + Hash,
|
||||
U: Copy + Debug + Ord + Eq,
|
||||
{
|
||||
match self {
|
||||
BoolSimplifiedType => BoolSimplifiedType,
|
||||
|
@ -155,7 +155,7 @@ impl<D: Copy + Debug + Ord + Eq + Hash> SimplifiedTypeGen<D> {
|
|||
|
||||
impl<'a, D> HashStable<StableHashingContext<'a>> for SimplifiedTypeGen<D>
|
||||
where
|
||||
D: Copy + Debug + Ord + Eq + Hash + HashStable<StableHashingContext<'a>>,
|
||||
D: Copy + Debug + Ord + Eq + HashStable<StableHashingContext<'a>>,
|
||||
{
|
||||
fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) {
|
||||
mem::discriminant(self).hash_stable(hcx, hasher);
|
||||
|
|
|
@ -240,10 +240,10 @@ impl FlagComputation {
|
|||
self.add_flags(TypeFlags::HAS_FREE_LOCAL_NAMES | TypeFlags::HAS_CT_INFER);
|
||||
match infer {
|
||||
InferConst::Fresh(_) => {}
|
||||
InferConst::Canonical(debruijn, _) => self.add_binder(debruijn),
|
||||
InferConst::Var(_) => self.add_flags(TypeFlags::KEEP_IN_LOCAL_TCX),
|
||||
}
|
||||
}
|
||||
ConstValue::Bound(debruijn, _) => self.add_binder(debruijn),
|
||||
ConstValue::Param(_) => {
|
||||
self.add_flags(TypeFlags::HAS_FREE_LOCAL_NAMES | TypeFlags::HAS_PARAMS);
|
||||
}
|
||||
|
|
|
@ -88,6 +88,9 @@ pub trait TypeFoldable<'tcx>: fmt::Debug + Clone {
|
|||
fn has_infer_types(&self) -> bool {
|
||||
self.has_type_flags(TypeFlags::HAS_TY_INFER)
|
||||
}
|
||||
fn has_infer_consts(&self) -> bool {
|
||||
self.has_type_flags(TypeFlags::HAS_CT_INFER)
|
||||
}
|
||||
fn has_local_value(&self) -> bool {
|
||||
self.has_type_flags(TypeFlags::KEEP_IN_LOCAL_TCX)
|
||||
}
|
||||
|
@ -518,10 +521,7 @@ impl<'a, 'tcx> TypeFolder<'tcx> for BoundVarReplacer<'a, 'tcx> {
|
|||
}
|
||||
|
||||
fn fold_const(&mut self, ct: &'tcx ty::Const<'tcx>) -> &'tcx ty::Const<'tcx> {
|
||||
if let ty::Const {
|
||||
val: ConstValue::Infer(ty::InferConst::Canonical(debruijn, bound_const)),
|
||||
ty,
|
||||
} = *ct {
|
||||
if let ty::Const { val: ConstValue::Bound(debruijn, bound_const), ty } = *ct {
|
||||
if debruijn == self.current_index {
|
||||
let fld_c = &mut self.fld_c;
|
||||
let ct = fld_c(bound_const, ty);
|
||||
|
@ -567,7 +567,10 @@ impl<'tcx> TyCtxt<'tcx> {
|
|||
// identity for bound types and consts
|
||||
let fld_t = |bound_ty| self.mk_ty(ty::Bound(ty::INNERMOST, bound_ty));
|
||||
let fld_c = |bound_ct, ty| {
|
||||
self.mk_const_infer(ty::InferConst::Canonical(ty::INNERMOST, bound_ct), ty)
|
||||
self.mk_const(ty::Const {
|
||||
val: ConstValue::Bound(ty::INNERMOST, bound_ct),
|
||||
ty,
|
||||
})
|
||||
};
|
||||
self.replace_escaping_bound_vars(value.skip_binder(), fld_r, fld_t, fld_c)
|
||||
}
|
||||
|
@ -718,7 +721,6 @@ impl<'tcx> TyCtxt<'tcx> {
|
|||
// vars. See comment on `shift_vars_through_binders` method in
|
||||
// `subst.rs` for more details.
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
|
||||
enum Direction {
|
||||
In,
|
||||
Out,
|
||||
|
@ -799,10 +801,7 @@ impl TypeFolder<'tcx> for Shifter<'tcx> {
|
|||
}
|
||||
|
||||
fn fold_const(&mut self, ct: &'tcx ty::Const<'tcx>) -> &'tcx ty::Const<'tcx> {
|
||||
if let ty::Const {
|
||||
val: ConstValue::Infer(ty::InferConst::Canonical(debruijn, bound_const)),
|
||||
ty,
|
||||
} = *ct {
|
||||
if let ty::Const { val: ConstValue::Bound(debruijn, bound_ct), ty } = *ct {
|
||||
if self.amount == 0 || debruijn < self.current_index {
|
||||
ct
|
||||
} else {
|
||||
|
@ -813,7 +812,10 @@ impl TypeFolder<'tcx> for Shifter<'tcx> {
|
|||
debruijn.shifted_out(self.amount)
|
||||
}
|
||||
};
|
||||
self.tcx.mk_const_infer(ty::InferConst::Canonical(debruijn, bound_const), ty)
|
||||
self.tcx.mk_const(ty::Const {
|
||||
val: ConstValue::Bound(debruijn, bound_ct),
|
||||
ty,
|
||||
})
|
||||
}
|
||||
} else {
|
||||
ct.super_fold_with(self)
|
||||
|
@ -917,8 +919,7 @@ impl<'tcx> TypeVisitor<'tcx> for HasEscapingVarsVisitor {
|
|||
// const, as it has types/regions embedded in a lot of other
|
||||
// places.
|
||||
match ct.val {
|
||||
ConstValue::Infer(ty::InferConst::Canonical(debruijn, _))
|
||||
if debruijn >= self.outer_index => true,
|
||||
ConstValue::Bound(debruijn, _) if debruijn >= self.outer_index => true,
|
||||
_ => ct.super_visit_with(self),
|
||||
}
|
||||
}
|
||||
|
|
|
@ -159,7 +159,7 @@ impl AssocItemContainer {
|
|||
/// The "header" of an impl is everything outside the body: a Self type, a trait
|
||||
/// ref (in the case of a trait impl), and a set of predicates (from the
|
||||
/// bounds / where-clauses).
|
||||
#[derive(Clone, PartialEq, Eq, Hash, Debug)]
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct ImplHeader<'tcx> {
|
||||
pub impl_def_id: DefId,
|
||||
pub self_ty: Ty<'tcx>,
|
||||
|
@ -195,7 +195,7 @@ pub struct AssocItem {
|
|||
pub method_has_self_argument: bool,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Debug, Hash, RustcEncodable, RustcDecodable, HashStable)]
|
||||
#[derive(Copy, Clone, PartialEq, Debug, HashStable)]
|
||||
pub enum AssocKind {
|
||||
Const,
|
||||
Method,
|
||||
|
@ -331,7 +331,7 @@ impl Visibility {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, RustcDecodable, RustcEncodable, Hash, HashStable)]
|
||||
#[derive(Copy, Clone, PartialEq, RustcDecodable, RustcEncodable, HashStable)]
|
||||
pub enum Variance {
|
||||
Covariant, // T<A> <: T<B> iff A <: B -- e.g., function return type
|
||||
Invariant, // T<A> <: T<B> iff B == A -- e.g., type of mutable cell
|
||||
|
@ -752,7 +752,7 @@ pub struct UpvarId {
|
|||
pub closure_expr_id: LocalDefId,
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Hash, Debug, RustcEncodable, RustcDecodable, Copy, HashStable)]
|
||||
#[derive(Clone, PartialEq, Debug, RustcEncodable, RustcDecodable, Copy, HashStable)]
|
||||
pub enum BorrowKind {
|
||||
/// Data must be immutable and is aliasable.
|
||||
ImmBorrow,
|
||||
|
|
|
@ -8,14 +8,12 @@ use crate::ty::subst::SubstsRef;
|
|||
use crate::ty::fast_reject::SimplifiedType;
|
||||
use crate::mir;
|
||||
|
||||
use std::fmt::Debug;
|
||||
use std::hash::Hash;
|
||||
use syntax_pos::{Span, DUMMY_SP};
|
||||
use syntax_pos::symbol::InternedString;
|
||||
|
||||
/// The `Key` trait controls what types can legally be used as the key
|
||||
/// for a query.
|
||||
pub(super) trait Key: Clone + Hash + Eq + Debug {
|
||||
pub(super) trait Key {
|
||||
/// Given an instance of this key, what crate is it referring to?
|
||||
/// This is used to find the provider.
|
||||
fn query_crate(&self) -> CrateNum;
|
||||
|
@ -201,10 +199,7 @@ impl Key for InternedString {
|
|||
|
||||
/// Canonical query goals correspond to abstract trait operations that
|
||||
/// are not tied to any crate in particular.
|
||||
impl<'tcx, T> Key for Canonical<'tcx, T>
|
||||
where
|
||||
T: Debug + Hash + Clone + Eq,
|
||||
{
|
||||
impl<'tcx, T> Key for Canonical<'tcx, T> {
|
||||
fn query_crate(&self) -> CrateNum {
|
||||
LOCAL_CRATE
|
||||
}
|
||||
|
|
|
@ -801,7 +801,7 @@ macro_rules! define_queries_inner {
|
|||
}
|
||||
|
||||
#[allow(nonstandard_style)]
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
|
||||
#[derive(Clone, Copy)]
|
||||
pub enum QueryName {
|
||||
$($name),*
|
||||
}
|
||||
|
@ -819,7 +819,7 @@ macro_rules! define_queries_inner {
|
|||
}
|
||||
|
||||
#[allow(nonstandard_style)]
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
|
||||
#[derive(Clone, Debug)]
|
||||
pub enum Query<$tcx> {
|
||||
$($(#[$attr])* $name($K)),*
|
||||
}
|
||||
|
|
|
@ -1379,27 +1379,23 @@ impl<'tcx> TypeFoldable<'tcx> for &'tcx ty::Const<'tcx> {
|
|||
impl<'tcx> TypeFoldable<'tcx> for ConstValue<'tcx> {
|
||||
fn super_fold_with<F: TypeFolder<'tcx>>(&self, folder: &mut F) -> Self {
|
||||
match *self {
|
||||
ConstValue::ByRef { alloc, offset } =>
|
||||
ConstValue::ByRef { alloc, offset },
|
||||
ConstValue::Infer(ic) => ConstValue::Infer(ic.fold_with(folder)),
|
||||
ConstValue::Param(p) => ConstValue::Param(p.fold_with(folder)),
|
||||
ConstValue::Placeholder(p) => ConstValue::Placeholder(p),
|
||||
ConstValue::Scalar(a) => ConstValue::Scalar(a),
|
||||
ConstValue::Slice { data, start, end } => ConstValue::Slice { data, start, end },
|
||||
ConstValue::Unevaluated(did, substs)
|
||||
=> ConstValue::Unevaluated(did, substs.fold_with(folder)),
|
||||
ConstValue::ByRef { .. } | ConstValue::Bound(..) | ConstValue::Placeholder(..)
|
||||
| ConstValue::Scalar(..) | ConstValue::Slice { .. } => *self,
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
fn super_visit_with<V: TypeVisitor<'tcx>>(&self, visitor: &mut V) -> bool {
|
||||
match *self {
|
||||
ConstValue::ByRef { .. } => false,
|
||||
ConstValue::Infer(ic) => ic.visit_with(visitor),
|
||||
ConstValue::Param(p) => p.visit_with(visitor),
|
||||
ConstValue::Placeholder(_) => false,
|
||||
ConstValue::Scalar(_) => false,
|
||||
ConstValue::Slice { .. } => false,
|
||||
ConstValue::Unevaluated(_, substs) => substs.visit_with(visitor),
|
||||
ConstValue::ByRef { .. } | ConstValue::Bound(..) | ConstValue::Placeholder(_)
|
||||
| ConstValue::Scalar(_) | ConstValue::Slice { .. } => false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -304,8 +304,7 @@ static_assert_size!(TyKind<'_>, 24);
|
|||
/// type parameters is similar, but the role of CK and CS are
|
||||
/// different. CK represents the "yield type" and CS represents the
|
||||
/// "return type" of the generator.
|
||||
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug,
|
||||
RustcEncodable, RustcDecodable, HashStable)]
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
pub struct ClosureSubsts<'tcx> {
|
||||
/// Lifetime and type parameters from the enclosing function,
|
||||
/// concatenated with the types of the upvars.
|
||||
|
@ -392,8 +391,7 @@ impl<'tcx> ClosureSubsts<'tcx> {
|
|||
}
|
||||
|
||||
/// Similar to `ClosureSubsts`; see the above documentation for more.
|
||||
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug,
|
||||
RustcEncodable, RustcDecodable, HashStable)]
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
pub struct GeneratorSubsts<'tcx> {
|
||||
pub substs: SubstsRef<'tcx>,
|
||||
}
|
||||
|
@ -1035,7 +1033,7 @@ impl<'tcx> ProjectionTy<'tcx> {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable, HashStable)]
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct GenSig<'tcx> {
|
||||
pub yield_ty: Ty<'tcx>,
|
||||
pub return_ty: Ty<'tcx>,
|
||||
|
@ -2373,6 +2371,4 @@ pub enum InferConst<'tcx> {
|
|||
Var(ConstVid<'tcx>),
|
||||
/// A fresh const variable. See `infer::freshen` for more details.
|
||||
Fresh(u32),
|
||||
/// Canonicalized const variable, used only when preparing a trait query.
|
||||
Canonical(DebruijnIndex, BoundVar),
|
||||
}
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
use crate::hir::def_id::DefId;
|
||||
use crate::infer::canonical::Canonical;
|
||||
use crate::ty::{self, Lift, List, Ty, TyCtxt, InferConst, ParamConst};
|
||||
use crate::ty::{self, Lift, List, Ty, TyCtxt, ParamConst};
|
||||
use crate::ty::fold::{TypeFoldable, TypeFolder, TypeVisitor};
|
||||
use crate::mir::interpret::ConstValue;
|
||||
use crate::ty::sty::{ClosureSubsts, GeneratorSubsts};
|
||||
|
@ -234,9 +234,7 @@ impl<'a, 'tcx> InternalSubsts<'tcx> {
|
|||
|
||||
ty::GenericParamDefKind::Const => {
|
||||
tcx.mk_const(ty::Const {
|
||||
val: ConstValue::Infer(
|
||||
InferConst::Canonical(ty::INNERMOST, ty::BoundVar::from(param.index))
|
||||
),
|
||||
val: ConstValue::Bound(ty::INNERMOST, ty::BoundVar::from(param.index)),
|
||||
ty: tcx.type_of(def_id),
|
||||
}).into()
|
||||
}
|
||||
|
|
|
@ -818,6 +818,8 @@ impl<'tcx> ty::TyS<'tcx> {
|
|||
///
|
||||
/// (Note that this implies that if `ty` has a destructor attached,
|
||||
/// then `needs_drop` will definitely return `true` for `ty`.)
|
||||
///
|
||||
/// Note that this method is used to check eligible types in unions.
|
||||
#[inline]
|
||||
pub fn needs_drop(&'tcx self, tcx: TyCtxt<'tcx>, param_env: ty::ParamEnv<'tcx>) -> bool {
|
||||
tcx.needs_drop_raw(param_env.and(self)).0
|
||||
|
|
|
@ -12,9 +12,3 @@ test = false
|
|||
|
||||
[dependencies]
|
||||
rustc_llvm = { path = "../librustc_llvm" }
|
||||
|
||||
[features]
|
||||
# This is used to convince Cargo to separately cache builds of `rustc_codegen_llvm`
|
||||
# when this option is enabled or not. That way we can build two, cache two
|
||||
# artifacts, and have nice speedy rebuilds.
|
||||
emscripten = ["rustc_llvm/emscripten"]
|
||||
|
|
|
@ -53,9 +53,7 @@ fn prepare_lto(cgcx: &CodegenContext<LlvmCodegenBackend>,
|
|||
|
||||
let symbol_filter = &|&(ref name, level): &(String, SymbolExportLevel)| {
|
||||
if level.is_below_threshold(export_threshold) {
|
||||
let mut bytes = Vec::with_capacity(name.len() + 1);
|
||||
bytes.extend(name.bytes());
|
||||
Some(CString::new(bytes).unwrap())
|
||||
Some(CString::new(name.as_str()).unwrap())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
|
|
|
@ -2069,11 +2069,9 @@ fn set_members_of_composite_type(cx: &CodegenCx<'ll, 'tcx>,
|
|||
{
|
||||
let mut composite_types_completed =
|
||||
debug_context(cx).composite_types_completed.borrow_mut();
|
||||
if composite_types_completed.contains(&composite_type_metadata) {
|
||||
if !composite_types_completed.insert(&composite_type_metadata) {
|
||||
bug!("debuginfo::set_members_of_composite_type() - \
|
||||
Already completed forward declaration re-encountered.");
|
||||
} else {
|
||||
composite_types_completed.insert(composite_type_metadata);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -50,7 +50,7 @@ pub enum CallConv {
|
|||
}
|
||||
|
||||
/// LLVMRustLinkage
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
|
||||
#[derive(PartialEq)]
|
||||
#[repr(C)]
|
||||
pub enum Linkage {
|
||||
ExternalLinkage = 0,
|
||||
|
@ -67,7 +67,6 @@ pub enum Linkage {
|
|||
}
|
||||
|
||||
// LLVMRustVisibility
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
|
||||
#[repr(C)]
|
||||
pub enum Visibility {
|
||||
Default = 0,
|
||||
|
|
|
@ -79,6 +79,7 @@ impl<'a, 'tcx, V: CodegenObject> OperandRef<'tcx, V> {
|
|||
ConstValue::Unevaluated(..) => bug!("unevaluated constant in `OperandRef::from_const`"),
|
||||
ConstValue::Param(_) => bug!("encountered a ConstValue::Param in codegen"),
|
||||
ConstValue::Infer(_) => bug!("encountered a ConstValue::Infer in codegen"),
|
||||
ConstValue::Bound(..) => bug!("encountered a ConstValue::Bound in codegen"),
|
||||
ConstValue::Placeholder(_) => bug!("encountered a ConstValue::Placeholder in codegen"),
|
||||
ConstValue::Scalar(x) => {
|
||||
let scalar = match layout.abi {
|
||||
|
|
|
@ -556,7 +556,6 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
|
|||
) -> Bx::Value {
|
||||
let is_float = input_ty.is_floating_point();
|
||||
let is_signed = input_ty.is_signed();
|
||||
let is_unit = input_ty.is_unit();
|
||||
match op {
|
||||
mir::BinOp::Add => if is_float {
|
||||
bx.fadd(lhs, rhs)
|
||||
|
@ -594,13 +593,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
|
|||
mir::BinOp::Shl => common::build_unchecked_lshift(bx, lhs, rhs),
|
||||
mir::BinOp::Shr => common::build_unchecked_rshift(bx, input_ty, lhs, rhs),
|
||||
mir::BinOp::Ne | mir::BinOp::Lt | mir::BinOp::Gt |
|
||||
mir::BinOp::Eq | mir::BinOp::Le | mir::BinOp::Ge => if is_unit {
|
||||
bx.cx().const_bool(match op {
|
||||
mir::BinOp::Ne | mir::BinOp::Lt | mir::BinOp::Gt => false,
|
||||
mir::BinOp::Eq | mir::BinOp::Le | mir::BinOp::Ge => true,
|
||||
_ => unreachable!()
|
||||
})
|
||||
} else if is_float {
|
||||
mir::BinOp::Eq | mir::BinOp::Le | mir::BinOp::Ge => if is_float {
|
||||
bx.fcmp(
|
||||
base::bin_op_to_fcmp_predicate(op.to_hir_binop()),
|
||||
lhs, rhs
|
||||
|
|
|
@ -60,10 +60,10 @@ impl<N> SnapshotVecDelegate for Edge<N> {
|
|||
fn reverse(_: &mut Vec<Edge<N>>, _: ()) {}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Debug, Hash)]
|
||||
#[derive(Copy, Clone, PartialEq, Debug)]
|
||||
pub struct NodeIndex(pub usize);
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Debug, Hash)]
|
||||
#[derive(Copy, Clone, PartialEq, Debug)]
|
||||
pub struct EdgeIndex(pub usize);
|
||||
|
||||
pub const INVALID_EDGE_INDEX: EdgeIndex = EdgeIndex(usize::MAX);
|
||||
|
|
|
@ -90,7 +90,7 @@ impl<T> Sharded<T> {
|
|||
|
||||
pub type ShardedHashMap<K, V> = Sharded<FxHashMap<K, V>>;
|
||||
|
||||
impl<K: Eq + Hash, V> ShardedHashMap<K, V> {
|
||||
impl<K: Eq, V> ShardedHashMap<K, V> {
|
||||
pub fn len(&self) -> usize {
|
||||
self.lock_shards().iter().map(|shard| shard.len()).sum()
|
||||
}
|
||||
|
|
|
@ -7,7 +7,7 @@ use std::mem;
|
|||
mod tests;
|
||||
|
||||
pub struct SnapshotMap<K, V>
|
||||
where K: Hash + Clone + Eq
|
||||
where K: Clone + Eq
|
||||
{
|
||||
map: FxHashMap<K, V>,
|
||||
undo_log: Vec<UndoLog<K, V>>,
|
||||
|
|
|
@ -169,7 +169,7 @@ pub trait HashStable<CTX> {
|
|||
/// example, for DefId that can be converted to a DefPathHash. This is used for
|
||||
/// bringing maps into a predictable order before hashing them.
|
||||
pub trait ToStableHashKey<HCX> {
|
||||
type KeyType: Ord + Clone + Sized + HashStable<HCX>;
|
||||
type KeyType: Ord + Sized + HashStable<HCX>;
|
||||
fn to_stable_hash_key(&self, hcx: &HCX) -> Self::KeyType;
|
||||
}
|
||||
|
||||
|
@ -460,7 +460,7 @@ impl_stable_hash_via_hash!(::std::path::Path);
|
|||
impl_stable_hash_via_hash!(::std::path::PathBuf);
|
||||
|
||||
impl<K, V, R, HCX> HashStable<HCX> for ::std::collections::HashMap<K, V, R>
|
||||
where K: ToStableHashKey<HCX> + Eq + Hash,
|
||||
where K: ToStableHashKey<HCX> + Eq,
|
||||
V: HashStable<HCX>,
|
||||
R: BuildHasher,
|
||||
{
|
||||
|
@ -471,7 +471,7 @@ impl<K, V, R, HCX> HashStable<HCX> for ::std::collections::HashMap<K, V, R>
|
|||
}
|
||||
|
||||
impl<K, R, HCX> HashStable<HCX> for ::std::collections::HashSet<K, R>
|
||||
where K: ToStableHashKey<HCX> + Eq + Hash,
|
||||
where K: ToStableHashKey<HCX> + Eq,
|
||||
R: BuildHasher,
|
||||
{
|
||||
fn hash_stable(&self, hcx: &mut HCX, hasher: &mut StableHasher) {
|
||||
|
@ -513,10 +513,10 @@ pub fn hash_stable_hashmap<HCX, K, V, R, SK, F>(
|
|||
hasher: &mut StableHasher,
|
||||
map: &::std::collections::HashMap<K, V, R>,
|
||||
to_stable_hash_key: F)
|
||||
where K: Eq + Hash,
|
||||
where K: Eq,
|
||||
V: HashStable<HCX>,
|
||||
R: BuildHasher,
|
||||
SK: HashStable<HCX> + Ord + Clone,
|
||||
SK: HashStable<HCX> + Ord,
|
||||
F: Fn(&K, &HCX) -> SK,
|
||||
{
|
||||
let mut entries: Vec<_> = map.iter()
|
||||
|
|
|
@ -738,7 +738,7 @@ impl<T: Clone> Clone for RwLock<T> {
|
|||
|
||||
/// A type which only allows its inner value to be used in one thread.
|
||||
/// It will panic if it is used on multiple threads.
|
||||
#[derive(Copy, Clone, Hash, Debug, Eq, PartialEq)]
|
||||
#[derive(Debug)]
|
||||
pub struct OneThread<T> {
|
||||
#[cfg(parallel_compiler)]
|
||||
thread: thread::ThreadId,
|
||||
|
|
|
@ -3,7 +3,7 @@ use crate::stable_hasher::{StableHasher, HashStable};
|
|||
/// A vector type optimized for cases where this size is usually 0 (cf. `SmallVector`).
|
||||
/// The `Option<Box<..>>` wrapping allows us to represent a zero sized vector with `None`,
|
||||
/// which uses only a single (null) pointer.
|
||||
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
|
||||
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
|
||||
pub struct ThinVec<T>(Option<Box<Vec<T>>>);
|
||||
|
||||
impl<T> ThinVec<T> {
|
||||
|
|
|
@ -14,7 +14,7 @@
|
|||
#[cfg(test)]
|
||||
mod tests;
|
||||
|
||||
#[derive(Clone, Hash, Debug, PartialEq)]
|
||||
#[derive(Clone)]
|
||||
pub struct TinyList<T: PartialEq> {
|
||||
head: Option<Element<T>>
|
||||
}
|
||||
|
@ -80,7 +80,7 @@ impl<T: PartialEq> TinyList<T> {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Hash, Debug, PartialEq)]
|
||||
#[derive(Clone)]
|
||||
struct Element<T: PartialEq> {
|
||||
data: T,
|
||||
next: Option<Box<Element<T>>>,
|
||||
|
|
|
@ -11,7 +11,7 @@ use std::mem;
|
|||
mod tests;
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct TransitiveRelation<T: Clone + Debug + Eq + Hash> {
|
||||
pub struct TransitiveRelation<T: Eq + Hash> {
|
||||
// List of elements. This is used to map from a T to a usize.
|
||||
elements: Vec<T>,
|
||||
|
||||
|
@ -35,7 +35,7 @@ pub struct TransitiveRelation<T: Clone + Debug + Eq + Hash> {
|
|||
}
|
||||
|
||||
// HACK(eddyb) manual impl avoids `Default` bound on `T`.
|
||||
impl<T: Clone + Debug + Eq + Hash> Default for TransitiveRelation<T> {
|
||||
impl<T: Eq + Hash> Default for TransitiveRelation<T> {
|
||||
fn default() -> Self {
|
||||
TransitiveRelation {
|
||||
elements: Default::default(),
|
||||
|
@ -46,7 +46,7 @@ impl<T: Clone + Debug + Eq + Hash> Default for TransitiveRelation<T> {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, RustcEncodable, RustcDecodable, Debug)]
|
||||
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, RustcEncodable, RustcDecodable, Debug)]
|
||||
struct Index(usize);
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Debug)]
|
||||
|
|
|
@ -167,7 +167,7 @@ pub fn run_compiler(
|
|||
};
|
||||
|
||||
let sopts = config::build_session_options(&matches);
|
||||
let cfg = config::parse_cfgspecs(matches.opt_strs("cfg"));
|
||||
let cfg = interface::parse_cfgspecs(matches.opt_strs("cfg"));
|
||||
|
||||
let mut dummy_config = |sopts, cfg, diagnostic_output| {
|
||||
let mut config = interface::Config {
|
||||
|
|
|
@ -12,7 +12,7 @@ use Destination::*;
|
|||
use syntax_pos::{SourceFile, Span, MultiSpan};
|
||||
|
||||
use crate::{
|
||||
Level, CodeSuggestion, Diagnostic, SubDiagnostic,
|
||||
Level, CodeSuggestion, Diagnostic, SubDiagnostic, pluralise,
|
||||
SuggestionStyle, SourceMapper, SourceMapperDyn, DiagnosticId,
|
||||
};
|
||||
use crate::Level::Error;
|
||||
|
@ -1572,7 +1572,8 @@ impl EmitterWriter {
|
|||
}
|
||||
}
|
||||
if suggestions.len() > MAX_SUGGESTIONS {
|
||||
let msg = format!("and {} other candidates", suggestions.len() - MAX_SUGGESTIONS);
|
||||
let others = suggestions.len() - MAX_SUGGESTIONS;
|
||||
let msg = format!("and {} other candidate{}", others, pluralise!(others));
|
||||
buffer.puts(row_num, max_line_num_len + 3, &msg, Style::NoStyle);
|
||||
} else if notice_capitalization {
|
||||
let msg = "notice the capitalization difference";
|
||||
|
|
|
@ -27,6 +27,7 @@ rustc_codegen_utils = { path = "../librustc_codegen_utils" }
|
|||
rustc_metadata = { path = "../librustc_metadata" }
|
||||
rustc_mir = { path = "../librustc_mir" }
|
||||
rustc_passes = { path = "../librustc_passes" }
|
||||
rustc_target = { path = "../librustc_target" }
|
||||
rustc_typeck = { path = "../librustc_typeck" }
|
||||
rustc_lint = { path = "../librustc_lint" }
|
||||
rustc_errors = { path = "../librustc_errors" }
|
||||
|
|
|
@ -3,7 +3,8 @@ use crate::util;
|
|||
pub use crate::passes::BoxedResolver;
|
||||
|
||||
use rustc::lint;
|
||||
use rustc::session::config::{self, Input};
|
||||
use rustc::session::early_error;
|
||||
use rustc::session::config::{self, Input, ErrorOutputType};
|
||||
use rustc::session::{DiagnosticOutput, Session};
|
||||
use rustc::util::common::ErrorReported;
|
||||
use rustc_codegen_utils::codegen_backend::CodegenBackend;
|
||||
|
@ -14,9 +15,13 @@ use rustc_metadata::cstore::CStore;
|
|||
use std::path::PathBuf;
|
||||
use std::result;
|
||||
use std::sync::{Arc, Mutex};
|
||||
use syntax;
|
||||
use syntax::source_map::{FileLoader, SourceMap};
|
||||
use syntax::{self, parse};
|
||||
use syntax::ast::{self, MetaItemKind};
|
||||
use syntax::parse::token;
|
||||
use syntax::source_map::{FileName, FilePathMapping, FileLoader, SourceMap};
|
||||
use syntax::sess::ParseSess;
|
||||
use syntax_pos::edition;
|
||||
use rustc_errors::{Diagnostic, emitter::Emitter, Handler, SourceMapperDyn};
|
||||
|
||||
pub type Result<T> = result::Result<T, ErrorReported>;
|
||||
|
||||
|
@ -60,6 +65,58 @@ impl Compiler {
|
|||
}
|
||||
}
|
||||
|
||||
/// Converts strings provided as `--cfg [cfgspec]` into a `crate_cfg`.
|
||||
pub fn parse_cfgspecs(cfgspecs: Vec<String>) -> FxHashSet<(String, Option<String>)> {
|
||||
struct NullEmitter;
|
||||
impl Emitter for NullEmitter {
|
||||
fn emit_diagnostic(&mut self, _: &Diagnostic) {}
|
||||
fn source_map(&self) -> Option<&Lrc<SourceMapperDyn>> { None }
|
||||
}
|
||||
|
||||
syntax::with_default_globals(move || {
|
||||
let cfg = cfgspecs.into_iter().map(|s| {
|
||||
|
||||
let cm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
|
||||
let handler = Handler::with_emitter(false, None, Box::new(NullEmitter));
|
||||
let sess = ParseSess::with_span_handler(handler, cm);
|
||||
let filename = FileName::cfg_spec_source_code(&s);
|
||||
let mut parser = parse::new_parser_from_source_str(&sess, filename, s.to_string());
|
||||
|
||||
macro_rules! error {($reason: expr) => {
|
||||
early_error(ErrorOutputType::default(),
|
||||
&format!(concat!("invalid `--cfg` argument: `{}` (", $reason, ")"), s));
|
||||
}}
|
||||
|
||||
match &mut parser.parse_meta_item() {
|
||||
Ok(meta_item) if parser.token == token::Eof => {
|
||||
if meta_item.path.segments.len() != 1 {
|
||||
error!("argument key must be an identifier");
|
||||
}
|
||||
match &meta_item.kind {
|
||||
MetaItemKind::List(..) => {
|
||||
error!(r#"expected `key` or `key="value"`"#);
|
||||
}
|
||||
MetaItemKind::NameValue(lit) if !lit.kind.is_str() => {
|
||||
error!("argument value must be a string");
|
||||
}
|
||||
MetaItemKind::NameValue(..) | MetaItemKind::Word => {
|
||||
let ident = meta_item.ident().expect("multi-segment cfg key");
|
||||
return (ident.name, meta_item.value_str());
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(..) => {}
|
||||
Err(err) => err.cancel(),
|
||||
}
|
||||
|
||||
error!(r#"expected `key` or `key="value"`"#);
|
||||
}).collect::<ast::CrateConfig>();
|
||||
cfg.into_iter().map(|(a, b)| {
|
||||
(a.to_string(), b.map(|b| b.to_string()))
|
||||
}).collect()
|
||||
})
|
||||
}
|
||||
|
||||
/// The compiler configuration
|
||||
pub struct Config {
|
||||
/// Command line options
|
||||
|
|
|
@ -18,3 +18,6 @@ pub mod util;
|
|||
mod proc_macro_decls;
|
||||
|
||||
pub use interface::{run_compiler, Config};
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
|
|
|
@ -1,25 +1,24 @@
|
|||
use getopts;
|
||||
use crate::lint;
|
||||
use crate::middle::cstore;
|
||||
use crate::session::config::{
|
||||
build_configuration,
|
||||
build_session_options,
|
||||
to_crate_config,
|
||||
parse_cfgspecs,
|
||||
};
|
||||
use crate::session::config::{LtoCli, LinkerPluginLto, SwitchWithOptPath, ExternEntry};
|
||||
use crate::session::build_session;
|
||||
use crate::session::search_paths::SearchPath;
|
||||
extern crate getopts;
|
||||
|
||||
use crate::interface::parse_cfgspecs;
|
||||
|
||||
use rustc::lint;
|
||||
use rustc::middle::cstore;
|
||||
use rustc::session::config::{build_configuration, build_session_options, to_crate_config};
|
||||
use rustc::session::config::{LtoCli, LinkerPluginLto, SwitchWithOptPath, ExternEntry};
|
||||
use rustc::session::config::{Externs, OutputType, OutputTypes, SymbolManglingVersion};
|
||||
use rustc::session::config::{rustc_optgroups, Options, ErrorOutputType, Passes};
|
||||
use rustc::session::build_session;
|
||||
use rustc::session::search_paths::SearchPath;
|
||||
use std::collections::{BTreeMap, BTreeSet};
|
||||
use std::iter::FromIterator;
|
||||
use std::path::PathBuf;
|
||||
use super::{Externs, OutputType, OutputTypes, SymbolManglingVersion};
|
||||
use rustc_target::spec::{MergeFunctions, PanicStrategy, RelroLevel};
|
||||
use syntax::symbol::sym;
|
||||
use syntax::edition::{Edition, DEFAULT_EDITION};
|
||||
use syntax;
|
||||
use super::Options;
|
||||
use rustc_data_structures::fx::FxHashSet;
|
||||
use rustc_errors::{ColorConfig, emitter::HumanReadableErrorType, registry};
|
||||
|
||||
pub fn build_session_options_and_crate_config(
|
||||
matches: &getopts::Matches,
|
||||
|
@ -30,22 +29,23 @@ pub fn build_session_options_and_crate_config(
|
|||
)
|
||||
}
|
||||
|
||||
impl ExternEntry {
|
||||
fn new_public<S: Into<String>,
|
||||
I: IntoIterator<Item = Option<S>>>(locations: I) -> ExternEntry {
|
||||
let locations: BTreeSet<_> = locations.into_iter().map(|o| o.map(|s| s.into()))
|
||||
.collect();
|
||||
fn new_public_extern_entry<S, I>(locations: I) -> ExternEntry
|
||||
where
|
||||
S: Into<String>,
|
||||
I: IntoIterator<Item = Option<S>>,
|
||||
{
|
||||
let locations: BTreeSet<_> = locations.into_iter().map(|o| o.map(|s| s.into()))
|
||||
.collect();
|
||||
|
||||
ExternEntry {
|
||||
locations,
|
||||
is_private_dep: false
|
||||
}
|
||||
ExternEntry {
|
||||
locations,
|
||||
is_private_dep: false
|
||||
}
|
||||
}
|
||||
|
||||
fn optgroups() -> getopts::Options {
|
||||
let mut opts = getopts::Options::new();
|
||||
for group in super::rustc_optgroups() {
|
||||
for group in rustc_optgroups() {
|
||||
(group.apply)(&mut opts);
|
||||
}
|
||||
return opts;
|
||||
|
@ -63,7 +63,7 @@ fn test_switch_implies_cfg_test() {
|
|||
Ok(m) => m,
|
||||
Err(f) => panic!("test_switch_implies_cfg_test: {}", f),
|
||||
};
|
||||
let registry = errors::registry::Registry::new(&[]);
|
||||
let registry = registry::Registry::new(&[]);
|
||||
let (sessopts, cfg) = build_session_options_and_crate_config(matches);
|
||||
let sess = build_session(sessopts, None, registry);
|
||||
let cfg = build_configuration(&sess, to_crate_config(cfg));
|
||||
|
@ -81,7 +81,7 @@ fn test_switch_implies_cfg_test_unless_cfg_test() {
|
|||
Ok(m) => m,
|
||||
Err(f) => panic!("test_switch_implies_cfg_test_unless_cfg_test: {}", f),
|
||||
};
|
||||
let registry = errors::registry::Registry::new(&[]);
|
||||
let registry = registry::Registry::new(&[]);
|
||||
let (sessopts, cfg) = build_session_options_and_crate_config(matches);
|
||||
let sess = build_session(sessopts, None, registry);
|
||||
let cfg = build_configuration(&sess, to_crate_config(cfg));
|
||||
|
@ -95,7 +95,7 @@ fn test_switch_implies_cfg_test_unless_cfg_test() {
|
|||
fn test_can_print_warnings() {
|
||||
syntax::with_default_globals(|| {
|
||||
let matches = optgroups().parse(&["-Awarnings".to_string()]).unwrap();
|
||||
let registry = errors::registry::Registry::new(&[]);
|
||||
let registry = registry::Registry::new(&[]);
|
||||
let (sessopts, _) = build_session_options_and_crate_config(&matches);
|
||||
let sess = build_session(sessopts, None, registry);
|
||||
assert!(!sess.diagnostic().can_emit_warnings());
|
||||
|
@ -105,7 +105,7 @@ fn test_can_print_warnings() {
|
|||
let matches = optgroups()
|
||||
.parse(&["-Awarnings".to_string(), "-Dwarnings".to_string()])
|
||||
.unwrap();
|
||||
let registry = errors::registry::Registry::new(&[]);
|
||||
let registry = registry::Registry::new(&[]);
|
||||
let (sessopts, _) = build_session_options_and_crate_config(&matches);
|
||||
let sess = build_session(sessopts, None, registry);
|
||||
assert!(sess.diagnostic().can_emit_warnings());
|
||||
|
@ -113,7 +113,7 @@ fn test_can_print_warnings() {
|
|||
|
||||
syntax::with_default_globals(|| {
|
||||
let matches = optgroups().parse(&["-Adead_code".to_string()]).unwrap();
|
||||
let registry = errors::registry::Registry::new(&[]);
|
||||
let registry = registry::Registry::new(&[]);
|
||||
let (sessopts, _) = build_session_options_and_crate_config(&matches);
|
||||
let sess = build_session(sessopts, None, registry);
|
||||
assert!(sess.diagnostic().can_emit_warnings());
|
||||
|
@ -172,33 +172,33 @@ fn test_externs_tracking_hash_different_construction_order() {
|
|||
v1.externs = Externs::new(mk_map(vec![
|
||||
(
|
||||
String::from("a"),
|
||||
ExternEntry::new_public(vec![Some("b"), Some("c")])
|
||||
new_public_extern_entry(vec![Some("b"), Some("c")])
|
||||
),
|
||||
(
|
||||
String::from("d"),
|
||||
ExternEntry::new_public(vec![Some("e"), Some("f")])
|
||||
new_public_extern_entry(vec![Some("e"), Some("f")])
|
||||
),
|
||||
]));
|
||||
|
||||
v2.externs = Externs::new(mk_map(vec![
|
||||
(
|
||||
String::from("d"),
|
||||
ExternEntry::new_public(vec![Some("e"), Some("f")])
|
||||
new_public_extern_entry(vec![Some("e"), Some("f")])
|
||||
),
|
||||
(
|
||||
String::from("a"),
|
||||
ExternEntry::new_public(vec![Some("b"), Some("c")])
|
||||
new_public_extern_entry(vec![Some("b"), Some("c")])
|
||||
),
|
||||
]));
|
||||
|
||||
v3.externs = Externs::new(mk_map(vec![
|
||||
(
|
||||
String::from("a"),
|
||||
ExternEntry::new_public(vec![Some("b"), Some("c")])
|
||||
new_public_extern_entry(vec![Some("b"), Some("c")])
|
||||
),
|
||||
(
|
||||
String::from("d"),
|
||||
ExternEntry::new_public(vec![Some("f"), Some("e")])
|
||||
new_public_extern_entry(vec![Some("f"), Some("e")])
|
||||
),
|
||||
]));
|
||||
|
||||
|
@ -282,9 +282,9 @@ fn test_search_paths_tracking_hash_different_order() {
|
|||
let mut v3 = Options::default();
|
||||
let mut v4 = Options::default();
|
||||
|
||||
const JSON: super::ErrorOutputType = super::ErrorOutputType::Json {
|
||||
const JSON: ErrorOutputType = ErrorOutputType::Json {
|
||||
pretty: false,
|
||||
json_rendered: super::HumanReadableErrorType::Default(super::ColorConfig::Never),
|
||||
json_rendered: HumanReadableErrorType::Default(ColorConfig::Never),
|
||||
};
|
||||
|
||||
// Reference
|
||||
|
@ -455,7 +455,7 @@ fn test_codegen_options_tracking_hash() {
|
|||
opts.cg.codegen_units = Some(42);
|
||||
assert_eq!(reference.dep_tracking_hash(), opts.dep_tracking_hash());
|
||||
|
||||
opts.cg.remark = super::Passes::Some(vec![String::from("pass1"), String::from("pass2")]);
|
||||
opts.cg.remark = Passes::Some(vec![String::from("pass1"), String::from("pass2")]);
|
||||
assert_eq!(reference.dep_tracking_hash(), opts.dep_tracking_hash());
|
||||
|
||||
opts.cg.save_temps = true;
|
|
@ -980,35 +980,6 @@ impl<'a, 'tcx> LateLintPass<'a, 'tcx> for UnstableFeatures {
|
|||
}
|
||||
}
|
||||
|
||||
declare_lint! {
|
||||
UNIONS_WITH_DROP_FIELDS,
|
||||
Warn,
|
||||
"use of unions that contain fields with possibly non-trivial drop code"
|
||||
}
|
||||
|
||||
declare_lint_pass!(
|
||||
/// Lint for unions that contain fields with possibly non-trivial destructors.
|
||||
UnionsWithDropFields => [UNIONS_WITH_DROP_FIELDS]
|
||||
);
|
||||
|
||||
impl<'a, 'tcx> LateLintPass<'a, 'tcx> for UnionsWithDropFields {
|
||||
fn check_item(&mut self, ctx: &LateContext<'_, '_>, item: &hir::Item) {
|
||||
if let hir::ItemKind::Union(ref vdata, _) = item.kind {
|
||||
for field in vdata.fields() {
|
||||
let field_ty = ctx.tcx.type_of(
|
||||
ctx.tcx.hir().local_def_id(field.hir_id));
|
||||
if field_ty.needs_drop(ctx.tcx, ctx.param_env) {
|
||||
ctx.span_lint(UNIONS_WITH_DROP_FIELDS,
|
||||
field.span,
|
||||
"union contains a field with possibly non-trivial drop code, \
|
||||
drop code of union fields is ignored when dropping the union");
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
declare_lint! {
|
||||
pub UNREACHABLE_PUB,
|
||||
Allow,
|
||||
|
@ -1288,7 +1259,6 @@ declare_lint_pass!(
|
|||
NO_MANGLE_GENERIC_ITEMS,
|
||||
MUTABLE_TRANSMUTES,
|
||||
UNSTABLE_FEATURES,
|
||||
UNIONS_WITH_DROP_FIELDS,
|
||||
UNREACHABLE_PUB,
|
||||
TYPE_ALIAS_BOUNDS,
|
||||
TRIVIAL_BOUNDS
|
||||
|
|
|
@ -164,9 +164,6 @@ macro_rules! late_lint_mod_passes {
|
|||
// Depends on referenced function signatures in expressions
|
||||
MutableTransmutes: MutableTransmutes,
|
||||
|
||||
// Depends on types of fields, checks if they implement Drop
|
||||
UnionsWithDropFields: UnionsWithDropFields,
|
||||
|
||||
TypeAliasBounds: TypeAliasBounds,
|
||||
|
||||
TrivialConstraints: TrivialConstraints,
|
||||
|
|
|
@ -738,10 +738,10 @@ impl<'a> CrateLoader<'a> {
|
|||
if !self.sess.crate_types.borrow().iter().all(|ct| {
|
||||
match *ct {
|
||||
// Link the runtime
|
||||
config::CrateType::Staticlib |
|
||||
config::CrateType::Executable => true,
|
||||
// This crate will be compiled with the required
|
||||
// instrumentation pass
|
||||
config::CrateType::Staticlib |
|
||||
config::CrateType::Rlib |
|
||||
config::CrateType::Dylib |
|
||||
config::CrateType::Cdylib =>
|
||||
|
|
|
@ -198,12 +198,10 @@ impl Collector<'tcx> {
|
|||
self.tcx.sess.err(&format!("renaming of the library `{}` was specified, \
|
||||
however this crate contains no `#[link(...)]` \
|
||||
attributes referencing this library.", name));
|
||||
} else if renames.contains(name) {
|
||||
} else if !renames.insert(name) {
|
||||
self.tcx.sess.err(&format!("multiple renamings were \
|
||||
specified for library `{}` .",
|
||||
name));
|
||||
} else {
|
||||
renames.insert(name);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -78,7 +78,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
|||
.last()
|
||||
.unwrap();
|
||||
|
||||
if self.uninitialized_error_reported.contains(&root_place) {
|
||||
if !self.uninitialized_error_reported.insert(root_place) {
|
||||
debug!(
|
||||
"report_use_of_moved_or_uninitialized place: error about {:?} suppressed",
|
||||
root_place
|
||||
|
@ -86,8 +86,6 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
|||
return;
|
||||
}
|
||||
|
||||
self.uninitialized_error_reported.insert(root_place);
|
||||
|
||||
let item_msg = match self.describe_place_with_options(used_place,
|
||||
IncludingDowncast(true)) {
|
||||
Some(name) => format!("`{}`", name),
|
||||
|
|
|
@ -71,7 +71,7 @@ impl Index<OutlivesConstraintIndex> for OutlivesConstraintSet {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord)]
|
||||
pub struct OutlivesConstraint {
|
||||
// NB. The ordering here is not significant for correctness, but
|
||||
// it is for convenience. Before we dump the constraints in the
|
||||
|
|
|
@ -11,7 +11,7 @@ use syntax_pos::Span;
|
|||
/// indexed by the region `R0`.
|
||||
crate struct MemberConstraintSet<'tcx, R>
|
||||
where
|
||||
R: Copy + Hash + Eq,
|
||||
R: Copy + Eq,
|
||||
{
|
||||
/// Stores the first "member" constraint for a given `R0`. This is an
|
||||
/// index into the `constraints` vector below.
|
||||
|
@ -191,7 +191,7 @@ where
|
|||
|
||||
impl<'tcx, R> Index<NllMemberConstraintIndex> for MemberConstraintSet<'tcx, R>
|
||||
where
|
||||
R: Copy + Hash + Eq,
|
||||
R: Copy + Eq,
|
||||
{
|
||||
type Output = NllMemberConstraint<'tcx>;
|
||||
|
||||
|
|
|
@ -129,7 +129,7 @@ rustc_index::newtype_index! {
|
|||
|
||||
/// An individual element in a region value -- the value of a
|
||||
/// particular region variable consists of a set of these elements.
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
|
||||
#[derive(Debug)]
|
||||
crate enum RegionElement {
|
||||
/// A point in the control-flow graph.
|
||||
Location(Location),
|
||||
|
|
|
@ -146,7 +146,7 @@ struct UniversalRegionIndices<'tcx> {
|
|||
indices: FxHashMap<ty::Region<'tcx>, RegionVid>,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
|
||||
#[derive(Debug, PartialEq)]
|
||||
pub enum RegionClassification {
|
||||
/// A **global** region is one that can be named from
|
||||
/// anywhere. There is only one, `'static`.
|
||||
|
|
|
@ -189,8 +189,8 @@ use std::ops::RangeInclusive;
|
|||
use std::u128;
|
||||
use std::convert::TryInto;
|
||||
|
||||
pub fn expand_pattern<'a, 'tcx>(cx: &MatchCheckCtxt<'a, 'tcx>, pat: Pat<'tcx>) -> &'a Pat<'tcx> {
|
||||
cx.pattern_arena.alloc(LiteralExpander { tcx: cx.tcx }.fold_pattern(&pat))
|
||||
pub fn expand_pattern<'a, 'tcx>(cx: &MatchCheckCtxt<'a, 'tcx>, pat: Pat<'tcx>) -> Pat<'tcx> {
|
||||
LiteralExpander { tcx: cx.tcx }.fold_pattern(&pat)
|
||||
}
|
||||
|
||||
struct LiteralExpander<'tcx> {
|
||||
|
|
|
@ -154,7 +154,8 @@ impl<'tcx> MatchVisitor<'_, 'tcx> {
|
|||
self.tables
|
||||
);
|
||||
patcx.include_lint_checks();
|
||||
let pattern = expand_pattern(cx, patcx.lower_pattern(&pat));
|
||||
let pattern =
|
||||
cx.pattern_arena.alloc(expand_pattern(cx, patcx.lower_pattern(&pat))) as &_;
|
||||
if !patcx.errors.is_empty() {
|
||||
patcx.report_inlining_errors(pat.span);
|
||||
have_errors = true;
|
||||
|
@ -253,8 +254,9 @@ impl<'tcx> MatchVisitor<'_, 'tcx> {
|
|||
patcx.include_lint_checks();
|
||||
let pattern = patcx.lower_pattern(pat);
|
||||
let pattern_ty = pattern.ty;
|
||||
let pattern = expand_pattern(cx, pattern);
|
||||
let pats: Matrix<'_, '_> = vec![smallvec![
|
||||
expand_pattern(cx, pattern)
|
||||
&pattern
|
||||
]].into_iter().collect();
|
||||
|
||||
let witnesses = match check_not_useful(cx, pattern_ty, &pats, pat.hir_id) {
|
||||
|
|
|
@ -1214,7 +1214,7 @@ fn search_for_adt_without_structural_match<'tcx>(tcx: TyCtxt<'tcx>,
|
|||
|
||||
// tracks ADT's previously encountered during search, so that
|
||||
// we will not recur on them again.
|
||||
seen: FxHashSet<&'tcx AdtDef>,
|
||||
seen: FxHashSet<hir::def_id::DefId>,
|
||||
}
|
||||
|
||||
impl<'tcx> TypeVisitor<'tcx> for Search<'tcx> {
|
||||
|
@ -1254,14 +1254,12 @@ fn search_for_adt_without_structural_match<'tcx>(tcx: TyCtxt<'tcx>,
|
|||
return true // Halt visiting!
|
||||
}
|
||||
|
||||
if self.seen.contains(adt_def) {
|
||||
if !self.seen.insert(adt_def.did) {
|
||||
debug!("Search already seen adt_def: {:?}", adt_def);
|
||||
// let caller continue its search
|
||||
return false;
|
||||
}
|
||||
|
||||
self.seen.insert(adt_def);
|
||||
|
||||
// `#[structural_match]` does not care about the
|
||||
// instantiation of the generics in an ADT (it
|
||||
// instead looks directly at its fields outside
|
||||
|
|
|
@ -91,7 +91,7 @@ pub struct Frame<'mir, 'tcx, Tag=(), Extra=()> {
|
|||
pub extra: Extra,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Eq, PartialEq, Hash)]
|
||||
#[derive(Clone, Eq, PartialEq)]
|
||||
pub enum StackPopCleanup {
|
||||
/// Jump to the next block in the caller, or cause UB if None (that's a function
|
||||
/// that may never return). Also store layout of return place so
|
||||
|
@ -113,7 +113,7 @@ pub struct LocalState<'tcx, Tag=(), Id=AllocId> {
|
|||
}
|
||||
|
||||
/// Current value of a local variable
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
|
||||
#[derive(Clone, PartialEq, Eq)]
|
||||
pub enum LocalValue<Tag=(), Id=AllocId> {
|
||||
/// This local is not currently alive, and cannot be used at all.
|
||||
Dead,
|
||||
|
|
|
@ -7,7 +7,7 @@
|
|||
//! short-circuiting the empty case!
|
||||
|
||||
use std::collections::VecDeque;
|
||||
use std::ptr;
|
||||
use std::{ptr, iter};
|
||||
use std::borrow::Cow;
|
||||
|
||||
use rustc::ty::{self, Instance, ParamEnv, query::TyCtxtAt};
|
||||
|
@ -22,7 +22,7 @@ use super::{
|
|||
Machine, AllocMap, MayLeak, ErrorHandled, CheckInAllocMsg,
|
||||
};
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Copy, Clone, Hash)]
|
||||
#[derive(Debug, PartialEq, Copy, Clone)]
|
||||
pub enum MemoryKind<T> {
|
||||
/// Error if deallocated except during a stack pop
|
||||
Stack,
|
||||
|
@ -785,6 +785,25 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'mir, 'tcx, M> {
|
|||
self.get(ptr.alloc_id)?.read_c_str(self, ptr)
|
||||
}
|
||||
|
||||
/// Writes the given stream of bytes into memory.
|
||||
///
|
||||
/// Performs appropriate bounds checks.
|
||||
pub fn write_bytes(
|
||||
&mut self,
|
||||
ptr: Scalar<M::PointerTag>,
|
||||
src: impl IntoIterator<Item=u8, IntoIter: iter::ExactSizeIterator>,
|
||||
) -> InterpResult<'tcx>
|
||||
{
|
||||
let src = src.into_iter();
|
||||
let size = Size::from_bytes(src.len() as u64);
|
||||
let ptr = match self.check_ptr_access(ptr, size, Align::from_bytes(1).unwrap())? {
|
||||
Some(ptr) => ptr,
|
||||
None => return Ok(()), // zero-sized access
|
||||
};
|
||||
let tcx = self.tcx.tcx;
|
||||
self.get_mut(ptr.alloc_id)?.write_bytes(&tcx, ptr, src)
|
||||
}
|
||||
|
||||
/// Expects the caller to have checked bounds and alignment.
|
||||
pub fn copy(
|
||||
&mut self,
|
||||
|
|
|
@ -26,7 +26,7 @@ pub use rustc::mir::interpret::ScalarMaybeUndef;
|
|||
/// operations and fat pointers. This idea was taken from rustc's codegen.
|
||||
/// In particular, thanks to `ScalarPair`, arithmetic operations and casts can be entirely
|
||||
/// defined on `Immediate`, and do not have to work with a `Place`.
|
||||
#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)]
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
|
||||
pub enum Immediate<Tag=(), Id=AllocId> {
|
||||
Scalar(ScalarMaybeUndef<Tag, Id>),
|
||||
ScalarPair(ScalarMaybeUndef<Tag, Id>, ScalarMaybeUndef<Tag, Id>),
|
||||
|
@ -123,7 +123,7 @@ impl<'tcx, Tag> ::std::ops::Deref for ImmTy<'tcx, Tag> {
|
|||
/// An `Operand` is the result of computing a `mir::Operand`. It can be immediate,
|
||||
/// or still in memory. The latter is an optimization, to delay reading that chunk of
|
||||
/// memory and to avoid having to store arbitrary-sized data here.
|
||||
#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)]
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
|
||||
pub enum Operand<Tag=(), Id=AllocId> {
|
||||
Immediate(Immediate<Tag, Id>),
|
||||
Indirect(MemPlace<Tag, Id>),
|
||||
|
@ -153,7 +153,7 @@ impl<Tag> Operand<Tag> {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)]
|
||||
#[derive(Copy, Clone, Debug, PartialEq)]
|
||||
pub struct OpTy<'tcx, Tag=()> {
|
||||
op: Operand<Tag>, // Keep this private, it helps enforce invariants
|
||||
pub layout: TyLayout<'tcx>,
|
||||
|
@ -589,8 +589,7 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
|
|||
let ptr = self.tag_static_base_pointer(Pointer::new(id, offset));
|
||||
Operand::Indirect(MemPlace::from_ptr(ptr, layout.align.abi))
|
||||
},
|
||||
ConstValue::Scalar(x) =>
|
||||
Operand::Immediate(tag_scalar(x).into()),
|
||||
ConstValue::Scalar(x) => Operand::Immediate(tag_scalar(x).into()),
|
||||
ConstValue::Slice { data, start, end } => {
|
||||
// We rely on mutability being set correctly in `data` to prevent writes
|
||||
// where none should happen.
|
||||
|
@ -606,6 +605,7 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
|
|||
}
|
||||
ConstValue::Param(..) |
|
||||
ConstValue::Infer(..) |
|
||||
ConstValue::Bound(..) |
|
||||
ConstValue::Placeholder(..) |
|
||||
ConstValue::Unevaluated(..) =>
|
||||
bug!("eval_const_to_op: Unexpected ConstValue {:?}", val),
|
||||
|
|
|
@ -72,13 +72,11 @@ fn check_fn_for_unconditional_recursion(
|
|||
let caller_substs = &InternalSubsts::identity_for_item(tcx, def_id)[..trait_substs_count];
|
||||
|
||||
while let Some(bb) = reachable_without_self_call_queue.pop() {
|
||||
if visited.contains(bb) {
|
||||
if !visited.insert(bb) {
|
||||
//already done
|
||||
continue;
|
||||
}
|
||||
|
||||
visited.insert(bb);
|
||||
|
||||
let block = &basic_blocks[bb];
|
||||
|
||||
if let Some(ref terminator) = block.terminator {
|
||||
|
|
|
@ -199,7 +199,7 @@ use rustc_data_structures::sync::{MTRef, MTLock, ParallelIterator, par_iter};
|
|||
|
||||
use std::iter;
|
||||
|
||||
#[derive(PartialEq, Eq, Hash, Clone, Copy, Debug)]
|
||||
#[derive(PartialEq)]
|
||||
pub enum MonoItemCollectionMode {
|
||||
Eager,
|
||||
Lazy
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Reference in a new issue